config.name,config.backend.name,config.backend.version,config.backend._target_,config.backend.task,config.backend.library,config.backend.model_type,config.backend.model,config.backend.processor,config.backend.device,config.backend.device_ids,config.backend.seed,config.backend.inter_op_num_threads,config.backend.intra_op_num_threads,config.backend.model_kwargs.trust_remote_code,config.backend.no_weights,config.backend.device_map,config.backend.torch_dtype,config.backend.eval_mode,config.backend.to_bettertransformer,config.backend.low_cpu_mem_usage,config.backend.attn_implementation,config.backend.cache_implementation,config.backend.autocast_enabled,config.backend.autocast_dtype,config.backend.torch_compile,config.backend.torch_compile_target,config.backend.quantization_scheme,config.backend.deepspeed_inference,config.backend.peft_type,config.scenario.name,config.scenario._target_,config.scenario.iterations,config.scenario.duration,config.scenario.warmup_runs,config.scenario.input_shapes.batch_size,config.scenario.input_shapes.num_choices,config.scenario.input_shapes.sequence_length,config.scenario.new_tokens,config.scenario.memory,config.scenario.latency,config.scenario.energy,config.scenario.generate_kwargs.max_new_tokens,config.scenario.generate_kwargs.min_new_tokens,config.launcher.name,config.launcher._target_,config.launcher.device_isolation,config.launcher.device_isolation_action,config.launcher.numactl,config.launcher.start_method,config.environment.cpu,config.environment.cpu_count,config.environment.cpu_ram_mb,config.environment.system,config.environment.machine,config.environment.platform,config.environment.processor,config.environment.python_version,config.environment.gpu,config.environment.gpu_count,config.environment.gpu_vram_mb,config.environment.optimum_benchmark_version,config.environment.optimum_benchmark_commit,config.environment.transformers_version,config.environment.transformers_commit,config.environment.accelerate_version,config.environment.accelerate_commit,config.environment.diffusers_version,config.environment.diffusers_commit,config.environment.optimum_version,config.environment.optimum_commit,config.environment.timm_version,config.environment.timm_commit,config.environment.peft_version,config.environment.peft_commit,report.traceback,report.load.memory.unit,report.load.memory.max_ram,report.load.memory.max_global_vram,report.load.memory.max_process_vram,report.load.memory.max_reserved,report.load.memory.max_allocated,report.load.latency.unit,report.load.latency.count,report.load.latency.total,report.load.latency.mean,report.load.latency.stdev,report.load.latency.p50,report.load.latency.p90,report.load.latency.p95,report.load.latency.p99,report.load.latency.values,report.load.throughput,report.load.energy.unit,report.load.energy.cpu,report.load.energy.ram,report.load.energy.gpu,report.load.energy.total,report.load.efficiency,report.prefill.memory.unit,report.prefill.memory.max_ram,report.prefill.memory.max_global_vram,report.prefill.memory.max_process_vram,report.prefill.memory.max_reserved,report.prefill.memory.max_allocated,report.prefill.latency.unit,report.prefill.latency.count,report.prefill.latency.total,report.prefill.latency.mean,report.prefill.latency.stdev,report.prefill.latency.p50,report.prefill.latency.p90,report.prefill.latency.p95,report.prefill.latency.p99,report.prefill.latency.values,report.prefill.throughput.unit,report.prefill.throughput.value,report.prefill.energy.unit,report.prefill.energy.cpu,report.prefill.energy.ram,report.prefill.energy.gpu,report.prefill.energy.total,report.prefill.efficiency.unit,report.prefill.efficiency.value,report.decode.memory.unit,report.decode.memory.max_ram,report.decode.memory.max_global_vram,report.decode.memory.max_process_vram,report.decode.memory.max_reserved,report.decode.memory.max_allocated,report.decode.latency.unit,report.decode.latency.count,report.decode.latency.total,report.decode.latency.mean,report.decode.latency.stdev,report.decode.latency.p50,report.decode.latency.p90,report.decode.latency.p95,report.decode.latency.p99,report.decode.latency.values,report.decode.throughput.unit,report.decode.throughput.value,report.decode.energy.unit,report.decode.energy.cpu,report.decode.energy.ram,report.decode.energy.gpu,report.decode.energy.total,report.decode.efficiency.unit,report.decode.efficiency.value,report.per_token.memory,report.per_token.latency.unit,report.per_token.latency.count,report.per_token.latency.total,report.per_token.latency.mean,report.per_token.latency.stdev,report.per_token.latency.p50,report.per_token.latency.p90,report.per_token.latency.p95,report.per_token.latency.p99,report.per_token.latency.values,report.per_token.throughput.unit,report.per_token.throughput.value,report.per_token.energy,report.per_token.efficiency float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmppuwiscq_/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,914.362368,14696.710144,0.0,14294.188032,14284.158464,s,1,7.6857958984375,7.6857958984375,0.0,7.6857958984375,7.6857958984375,7.6857958984375,7.6857958984375,[7.6857958984375],,kWh,9.674804329142717e-06,1.0596090928116913e-06,4.705281542005668e-06,1.5439694963960076e-05,,MB,1341.100032,15004.991488,0.0,14587.789312,14512.892416,s,10,1.943247497558594,0.1943247497558594,0.0040496253980824345,0.19518470001220703,0.19693244018554687,0.19831324310302734,0.19941788543701172,"[0.18336431884765625, 0.19569296264648436, 0.19288636779785157, 0.194625244140625, 0.19532643127441407, 0.19371180725097656, 0.19504296875, 0.19662559509277344, 0.1962777557373047, 0.1996940460205078]",tokens/s,1317.382373174938,kWh,5.74055250302384e-06,6.327974430442646e-07,3.829300449058734e-06,1.020265039512684e-05,tokens/kWh,25091519.3685628,MB,1366.540288,15109.849088,0.0,14692.646912,14646.153216,s,10,43.29312548828125,4.329312548828125,0.006701243158026347,4.330959228515625,4.336129541015625,4.336483959960938,4.336767495117187,"[4.33457568359375, 4.33683837890625, 4.33605078125, 4.331033203125, 4.33085986328125, 4.33356201171875, 4.33088525390625, 4.32168603515625, 4.31608154296875, 4.321552734375]",tokens/s,14.551963917932671,kWh,0.00012631779599655147,1.3933503379328584e-05,8.374943628054008e-05,0.00022400073565642016,tokens/kWh,281249.07632728276,,s,630,43.2896548843384,0.06871373791164821,0.00045860960144882094,0.06867476654052734,0.06910189971923829,0.06925225257873535,0.07107424880981446,"[0.07154102325439453, 0.06851158142089844, 0.06815760040283203, 0.06808550262451171, 0.06815974426269532, 0.06847078704833984, 0.06844393920898438, 0.068301025390625, 0.06816508483886718, 0.06824809265136719, 0.06820992279052734, 0.06820531463623047, 0.06866697692871093, 0.06865885162353516, 0.06897740936279297, 0.06894163513183593, 0.068521728515625, 0.0686289291381836, 0.06858342742919922, 0.06841133117675781, 0.06834719848632813, 0.06833449554443359, 0.06868975830078125, 0.0685321273803711, 0.06840943908691406, 0.06843990325927735, 0.06862659454345703, 0.06863667297363281, 0.06872271728515625, 0.06897660827636719, 0.06872268676757813, 0.06887833404541016, 0.06864470672607421, 0.06846070098876954, 0.06893555450439454, 0.0687146224975586, 0.06845439910888672, 0.0687636489868164, 0.06874527740478516, 0.06860348510742187, 0.06877423858642578, 0.06866534423828125, 0.06887833404541016, 0.06912409973144532, 0.06909747314453125, 0.06901119995117187, 0.06982991790771484, 0.06863785552978516, 0.0688977279663086, 0.06876659393310547, 0.0691174087524414, 0.06970217895507813, 0.06881394958496094, 0.06862691497802734, 0.06932316589355468, 0.0691488037109375, 0.06907266998291016, 0.06918479919433594, 0.0690552978515625, 0.06939673614501952, 0.06931372833251953, 0.07016505432128907, 0.06907401275634766, 0.07112290954589844, 0.06864447784423829, 0.06821900939941407, 0.0686328353881836, 0.06842758178710938, 0.06841958618164062, 0.06823955535888672, 0.0682166748046875, 0.06820819091796874, 0.06851961517333985, 0.06842050933837891, 0.0683575668334961, 0.06843244934082031, 0.06886131286621094, 0.0691655044555664, 0.06908675384521484, 0.06867011260986328, 0.06828873443603516, 0.06837635040283203, 0.06861209869384766, 0.06860594940185546, 0.06850121307373047, 0.06875164794921874, 0.06858252716064453, 0.0684820785522461, 0.06850563049316406, 0.06872249603271484, 0.06888569641113282, 0.069283935546875, 0.06903266906738281, 0.06904013061523437, 0.06876992034912109, 0.06890451049804687, 0.0686943359375, 0.06848025512695312, 0.06879923248291016, 0.06867558288574219, 0.06854656219482422, 0.06883328247070312, 0.06868991851806641, 0.06862006378173828, 0.06908131408691406, 0.06925126647949219, 0.06910963439941406, 0.06914246368408203, 0.0691773452758789, 0.06905241394042969, 0.06877593231201172, 0.06904822540283204, 0.0690299835205078, 0.06900521850585938, 0.068834716796875, 0.06895206451416015, 0.06888294219970703, 0.06869625854492188, 0.06965821075439453, 0.06943580627441406, 0.0692056655883789, 0.06926326751708985, 0.06941331481933594, 0.06898687744140625, 0.06923197174072265, 0.06890147399902344, 0.0710902099609375, 0.06884764862060547, 0.06829631805419922, 0.06866358184814453, 0.0684147186279297, 0.06838566589355469, 0.06823091125488281, 0.06823276519775391, 0.06830745697021484, 0.06857936096191407, 0.06844838714599609, 0.06837152099609375, 0.0682930908203125, 0.06864268493652344, 0.06894147491455078, 0.0685225601196289, 0.06866777801513672, 0.06867584228515625, 0.06843891143798828, 0.06828851318359375, 0.06872563171386718, 0.06849126434326172, 0.06841542053222656, 0.0693057632446289, 0.0688031005859375, 0.06833964538574219, 0.06888288116455078, 0.06868278503417968, 0.06895014190673829, 0.06890147399902344, 0.06864486694335938, 0.06886009979248046, 0.0686221466064453, 0.06850969696044922, 0.06884146881103516, 0.06874752044677734, 0.06872649383544922, 0.06879440307617188, 0.06862374114990234, 0.06879910278320313, 0.06889472198486328, 0.0688189468383789, 0.06903193664550782, 0.06906674957275391, 0.06913033294677734, 0.0690871353149414, 0.06909747314453125, 0.06886335754394532, 0.06897113800048828, 0.06911516571044922, 0.0690511016845703, 0.06907247924804688, 0.06887983703613282, 0.06880140686035156, 0.06902175903320312, 0.06900940704345702, 0.06914252471923828, 0.06927680206298828, 0.06905331420898438, 0.06969542694091797, 0.06950879669189453, 0.06899510192871093, 0.0690630111694336, 0.07103517150878906, 0.06859776306152343, 0.06861209869384766, 0.06836428833007813, 0.06825548553466797, 0.06825174713134766, 0.068281982421875, 0.06823990631103516, 0.06885164642333984, 0.06865106964111328, 0.06824960327148437, 0.06821273803710938, 0.06814291381835938, 0.0688656005859375, 0.06893238067626953, 0.06919459533691406, 0.06852198028564453, 0.06840831756591798, 0.06858751678466797, 0.06851583862304687, 0.06841958618164062, 0.06842691040039063, 0.06862115478515625, 0.06859744262695312, 0.06838838195800781, 0.06830127716064453, 0.0682660140991211, 0.06885199737548828, 0.06874089813232422, 0.06893772888183594, 0.06888880157470703, 0.06854774475097657, 0.06871689605712891, 0.0685634536743164, 0.06840512084960937, 0.06843714904785156, 0.0685782699584961, 0.0686611557006836, 0.06852124786376954, 0.06884668731689453, 0.0686607666015625, 0.06872492980957032, 0.06910486602783203, 0.0687419204711914, 0.0692674560546875, 0.068890625, 0.06909951782226563, 0.06875545501708985, 0.06885734558105469, 0.06888089752197266, 0.06889881896972656, 0.06888857269287109, 0.06870425415039062, 0.0689846420288086, 0.06876953887939453, 0.06894601440429687, 0.06887586975097656, 0.06925305938720704, 0.069185791015625, 0.06911958312988281, 0.06917375946044922, 0.06932937622070312, 0.06905055999755859, 0.071364990234375, 0.06899136352539062, 0.0682803192138672, 0.06920089721679687, 0.068389404296875, 0.0683967056274414, 0.06845523071289063, 0.06825574493408203, 0.0685137939453125, 0.06859967803955078, 0.06875968170166015, 0.06829776000976563, 0.06834684753417969, 0.06869219207763672, 0.06875215911865235, 0.0690513916015625, 0.06866124725341796, 0.0684411849975586, 0.06848966217041015, 0.06843548583984375, 0.06847583770751953, 0.06827177429199219, 0.06822128295898437, 0.06852505493164063, 0.06862735748291016, 0.06863228607177735, 0.06840512084960937, 0.06855455780029297, 0.06875513458251953, 0.06873356628417969, 0.06881523132324219, 0.06865718078613281, 0.06843331146240235, 0.06856147003173828, 0.06854041290283203, 0.06853033447265625, 0.06844809722900391, 0.06857933044433594, 0.06853807830810547, 0.06883766174316407, 0.0690130844116211, 0.06864883422851563, 0.06865535736083984, 0.06892697906494141, 0.0686432647705078, 0.06887193298339844, 0.06889055633544922, 0.06912457275390625, 0.06864505767822265, 0.06873702239990234, 0.06895001220703124, 0.06872016143798829, 0.06898941040039062, 0.06882495880126953, 0.06893376159667969, 0.0689637451171875, 0.06898518371582031, 0.06901983642578124, 0.06899222564697266, 0.0690225601196289, 0.06924396514892578, 0.06902598571777344, 0.06911872100830078, 0.07141785430908203, 0.06874521636962891, 0.06820630645751953, 0.06810959625244141, 0.06813097381591797, 0.06825814056396484, 0.06845081329345704, 0.06887158203125, 0.06848758697509766, 0.06857542419433593, 0.06840684509277344, 0.0683949737548828, 0.06887062072753906, 0.068780029296875, 0.06911138916015624, 0.06899884796142577, 0.06873776245117187, 0.06846192169189454, 0.06841606140136719, 0.06863670349121094, 0.06842988586425781, 0.06839500427246094, 0.06904332733154298, 0.06889087677001954, 0.06877452850341798, 0.06865305328369141, 0.06854617309570313, 0.06899136352539062, 0.06902476501464844, 0.06921641540527344, 0.06872764587402344, 0.06895718383789062, 0.06866432189941406, 0.06864800262451172, 0.06878099060058594, 0.06871244812011719, 0.0688024673461914, 0.06871663665771484, 0.06850969696044922, 0.06852774047851562, 0.0688803482055664, 0.06872720336914062, 0.06902783966064453, 0.06887964630126953, 0.06885654449462891, 0.06893507385253907, 0.06893817901611328, 0.06886953735351563, 0.06863539123535156, 0.06881852722167969, 0.06861856079101562, 0.0684188461303711, 0.06862118530273438, 0.06883712005615235, 0.06860403442382812, 0.06891462707519531, 0.06889945220947266, 0.06912818908691407, 0.06899040222167968, 0.06908953857421875, 0.06912969970703126, 0.06927446746826171, 0.06901094055175781, 0.07121453094482422, 0.06872940826416016, 0.0680910415649414, 0.06807433319091796, 0.06854780578613281, 0.06844041442871093, 0.06851014709472657, 0.06828441619873046, 0.06829631805419922, 0.06861436462402344, 0.06849897766113282, 0.06838665771484374, 0.06815196990966797, 0.068476318359375, 0.06908799743652344, 0.06873049926757813, 0.068751708984375, 0.06907433319091796, 0.06823382568359375, 0.06845439910888672, 0.06861414337158203, 0.06851305389404297, 0.06836502075195312, 0.06878412628173829, 0.06870220947265625, 0.06848467254638672, 0.06905081939697266, 0.06878208160400391, 0.06907289886474609, 0.06910157012939454, 0.06896640014648438, 0.06870537567138672, 0.0685327377319336, 0.06877839660644532, 0.06860800170898437, 0.06861382293701172, 0.06893395233154297, 0.06868377685546875, 0.06883737945556641, 0.06869007873535156, 0.0686009292602539, 0.06908595275878907, 0.0689459228515625, 0.06918064117431641, 0.06881120300292969, 0.06903196716308593, 0.06880902099609375, 0.06874317169189453, 0.06897459411621094, 0.06863414764404296, 0.06895814514160156, 0.06875785827636718, 0.06861837005615234, 0.06887430572509766, 0.06869596862792969, 0.06886156463623047, 0.06877641296386719, 0.06880207824707031, 0.06908156585693359, 0.06868787384033204, 0.068853759765625, 0.06875135803222657, 0.0685998077392578, 0.07115980529785157, 0.0686648941040039, 0.06811052703857422, 0.06809420776367188, 0.0681546859741211, 0.0680777587890625, 0.06791423797607422, 0.06795673370361328, 0.06815948486328124, 0.06802022552490235, 0.06813286590576172, 0.06815129852294922, 0.06801987457275391, 0.06856678771972656, 0.06886051177978515, 0.06863667297363281, 0.06854041290283203, 0.06842982482910157, 0.06827008056640625, 0.0680478744506836, 0.0681707534790039, 0.06844620513916015, 0.06823731231689453, 0.06838460540771485, 0.0683617935180664, 0.06837920379638672, 0.06880873870849609, 0.06857318115234375, 0.06869606781005859, 0.06874521636962891, 0.06891110229492188, 0.07000032043457032, 0.06845471954345703, 0.06828851318359375, 0.06839705657958985, 0.0686111068725586, 0.06932144165039063, 0.06875571441650391, 0.06861619567871094, 0.06833971405029297, 0.06856018829345703, 0.06864761352539063, 0.0686192626953125, 0.068532958984375, 0.06918892669677734, 0.06871491241455079, 0.06896697235107421, 0.06862188720703125, 0.06847052764892578, 0.06882784271240235, 0.06866284942626953, 0.06857759857177734, 0.06875296020507812, 0.06865126037597656, 0.06865952301025391, 0.06875545501708985, 0.06869401550292968, 0.06864076995849609, 0.06889881896972656, 0.06887606048583984, 0.06893926239013672, 0.06884432220458984, 0.06884041595458984, 0.07097411346435546, 0.06877184295654297, 0.06811603546142578, 0.06809439849853516, 0.06815087890625, 0.06811011505126953, 0.06813529968261718, 0.06808396911621094, 0.06806118774414062, 0.06811212921142579, 0.06802867126464844, 0.06798486328125, 0.06796752166748046, 0.06810009765625, 0.06855872344970704, 0.0686217269897461, 0.06841417694091798, 0.06817587280273438, 0.06810620880126952, 0.06796905517578125, 0.0680687026977539, 0.06809075164794921, 0.06812239837646485, 0.06852114868164062, 0.06837741088867187, 0.06857727813720703, 0.06831513977050781, 0.06831001281738282, 0.068468994140625, 0.06878457641601562, 0.06839737701416015, 0.06863021087646484, 0.06874678039550781, 0.06841629028320312, 0.06831718444824218, 0.0688353271484375, 0.0684421157836914, 0.06838662719726563, 0.06842694091796875, 0.06872351837158203, 0.06858975982666016, 0.06856294250488282, 0.06856294250488282, 0.06868364715576172, 0.06853363037109375, 0.06848384094238281, 0.0687615966796875, 0.06869132995605469, 0.06850348663330078, 0.06877664184570312, 0.06859728240966798, 0.06864736175537109, 0.06885379028320313, 0.06860390472412109, 0.06857843017578125, 0.06907987213134766, 0.06872889709472656, 0.06892733001708984, 0.06880284881591797, 0.06885903930664063, 0.06876847839355468, 0.06878208160400391, 0.06892543792724609, 0.07102464294433594, 0.06893772888183594, 0.06818201446533204, 0.06821068572998047, 0.06814892578125, 0.06815567779541015, 0.06803868865966797, 0.06800691223144531, 0.06795942687988281, 0.06799091339111328, 0.06800457763671874, 0.06799183654785156, 0.06798473358154297, 0.06830352020263672, 0.06879027557373046, 0.06863616180419922, 0.06843852996826172, 0.06819983673095703, 0.06848982238769531, 0.06831718444824218, 0.06833356475830078, 0.06825984191894531, 0.06819805145263672, 0.06812025451660156, 0.06810076904296875, 0.06814883422851563, 0.06847222137451171, 0.0686739501953125, 0.0686080322265625, 0.06878265380859375, 0.06895616149902344, 0.06848921966552735, 0.06856886291503907, 0.06852630615234374, 0.06839910125732422, 0.0691773452758789, 0.06861376190185547, 0.06841792297363282, 0.06838066864013671, 0.06849890899658204, 0.06908573150634766, 0.06848614501953125, 0.06883132934570313, 0.06878300476074219, 0.06900428771972657, 0.06908211517333984, 0.06874889373779297, 0.06869852447509765, 0.06859318542480469, 0.06865740966796875, 0.06890493011474609, 0.0686919708251953, 0.06895027160644532, 0.0685854721069336, 0.06870953369140625, 0.06884848022460938, 0.06873292541503906, 0.06897869110107421, 0.06895110321044921, 0.06933190155029297, 0.06903135681152343, 0.06911443328857422, 0.0688617935180664]",tokens/s,14.553130573187492,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 582, in __init__ self.transformer = CodeGenModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 400, in __init__ self.h = nn.ModuleList([CodeGenBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 400, in self.h = nn.ModuleList([CodeGenBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 258, in __init__ self.mlp = CodeGenMLP(inner_dim, config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 236, in __init__ self.fc_in = nn.Linear(embed_dim, intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 172.12 MiB is free. Process 363711 has 14.57 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 14.15 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpjlbao5qr/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 582, in __init__ self.transformer = CodeGenModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 400, in __init__ self.h = nn.ModuleList([CodeGenBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 400, in self.h = nn.ModuleList([CodeGenBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 258, in __init__ self.mlp = CodeGenMLP(inner_dim, config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 237, in __init__ self.fc_out = nn.Linear(intermediate_size, embed_dim) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 480.12 MiB is free. Process 362979 has 14.27 GiB memory in use. Of the allocated memory 14.15 GiB is allocated by PyTorch, and 10.71 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 582, in __init__ self.transformer = CodeGenModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 400, in __init__ self.h = nn.ModuleList([CodeGenBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 400, in self.h = nn.ModuleList([CodeGenBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 258, in __init__ self.mlp = CodeGenMLP(inner_dim, config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 236, in __init__ self.fc_in = nn.Linear(embed_dim, intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 355440 has 14.74 GiB memory in use. Of the allocated memory 14.62 GiB is allocated by PyTorch, and 6.49 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpq0nrgp3k/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpgah30h5a/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,915.296256,14696.710144,0.0,14294.188032,14284.158464,s,1,7.6810771484375,7.6810771484375,0.0,7.6810771484375,7.6810771484375,7.6810771484375,7.6810771484375,[7.6810771484375],,kWh,9.905135304082553e-06,1.0853233250363174e-06,4.480559140013218e-06,1.5471017769132087e-05,,MB,1223.168,15000.797184,0.0,14587.789312,14512.892416,s,10,12.829853881835938,1.2829853881835935,0.00421568002899947,1.2850444335937499,1.2872422363281248,1.287562158203125,1.2878180957031249,"[1.2742020263671876, 1.2775047607421874, 1.280531982421875, 1.281549560546875, 1.2849490966796875, 1.2857730712890625, 1.285150390625, 1.2851397705078125, 1.287171142578125, 1.287882080078125]",tokens/s,199.5346185215998,kWh,3.753143016875735e-05,4.1392510353495144e-06,2.481549207459799e-05,6.648617327870486e-05,tokens/kWh,3850424.6428331486,MB,1240.682496,15105.654784,0.0,14692.646912,14646.153216,s,10,43.4332490234375,4.34332490234375,0.008011025973107256,4.343001220703125,4.35440263671875,4.354962060546875,4.355409599609375,"[4.33402490234375, 4.33474169921875, 4.333837890625, 4.336439453125, 4.341248046875, 4.34907177734375, 4.355521484375, 4.3542783203125, 4.3493310546875, 4.34475439453125]",tokens/s,14.50501664427726,kWh,0.00012685707515249306,1.3991307168688082e-05,8.431120633780043e-05,0.00022515958865898154,tokens/kWh,279801.5415431296,,s,630,43.429869972228985,0.06893630154322064,0.00036554511106973685,0.06890913772583007,0.06933305282592774,0.06943657341003417,0.07019725364685059,"[0.07021414184570313, 0.06850969696044922, 0.068261474609375, 0.06832988739013672, 0.0683331527709961, 0.06828482818603515, 0.06812876892089843, 0.06813625335693359, 0.0681827163696289, 0.06841139221191406, 0.0686185302734375, 0.0684349136352539, 0.06830182647705078, 0.06844758605957031, 0.06883090972900391, 0.06873538970947266, 0.06854029083251953, 0.06863868713378907, 0.068559326171875, 0.06859327697753906, 0.06852851104736328, 0.0685978240966797, 0.06853011322021485, 0.06856649780273437, 0.06872322845458985, 0.06906470489501954, 0.06900889587402344, 0.06869430541992187, 0.06886627197265625, 0.06933414459228515, 0.07004768371582032, 0.06895507049560547, 0.06884342193603515, 0.0685113296508789, 0.0687088623046875, 0.06862185668945313, 0.0685687026977539, 0.0690225601196289, 0.06868172454833985, 0.06879373168945313, 0.0687374725341797, 0.06878636932373047, 0.06899094390869141, 0.06898281860351563, 0.06917529296875, 0.06891315460205077, 0.06893068695068359, 0.068766845703125, 0.06884486389160156, 0.06889027404785156, 0.06859855651855469, 0.06896435546875, 0.06903817749023437, 0.0689521255493164, 0.06880445098876953, 0.06918681335449219, 0.06894656372070312, 0.06941410827636718, 0.0691187515258789, 0.06924095916748046, 0.06939148712158204, 0.06887513732910157, 0.06901347351074219, 0.06980684661865234, 0.06857679748535156, 0.06820531463623047, 0.06812665557861328, 0.068185791015625, 0.06823897552490234, 0.06850016021728515, 0.06856294250488282, 0.06843958282470704, 0.06851148986816406, 0.06877996826171875, 0.06852291107177734, 0.06859967803955078, 0.06891929626464843, 0.06873292541503906, 0.06859548950195313, 0.06884294128417968, 0.0688504638671875, 0.06864649963378906, 0.06854441833496094, 0.0684691162109375, 0.0684791030883789, 0.06883324432373047, 0.06870588684082031, 0.06880108642578125, 0.0688331527709961, 0.068480224609375, 0.06922525024414063, 0.06910361480712891, 0.06913638305664062, 0.06876326751708985, 0.06912038421630859, 0.06862806701660157, 0.06856352233886719, 0.06872662353515625, 0.06873702239990234, 0.06870374298095704, 0.06878278350830078, 0.06878915405273438, 0.06890383911132812, 0.06885282897949219, 0.06891817474365235, 0.06890259552001952, 0.06908783721923828, 0.06888521575927735, 0.06886061096191406, 0.06897286224365234, 0.06870630645751953, 0.06891516876220703, 0.06876367950439453, 0.06879948425292969, 0.06903910064697266, 0.06889871978759765, 0.06903817749023437, 0.06921820831298828, 0.06902384185791016, 0.06912105560302734, 0.06904847717285156, 0.06919660949707031, 0.06899900817871094, 0.0692041244506836, 0.06885171508789062, 0.06916041564941407, 0.07013394927978515, 0.06894310760498047, 0.06835257720947266, 0.06822275543212891, 0.06816995239257813, 0.06869760131835938, 0.06845283508300781, 0.06834957122802734, 0.06829452514648438, 0.06829724884033203, 0.06845648193359374, 0.06846812438964844, 0.06856674957275391, 0.0694156494140625, 0.06899520111083984, 0.06888652801513671, 0.06861209869384766, 0.06839910125732422, 0.06823750305175781, 0.0685811538696289, 0.06851168060302734, 0.06828221130371094, 0.06838646697998046, 0.06864733123779297, 0.0687567367553711, 0.06858601379394531, 0.06879888153076172, 0.06880358123779297, 0.06897293090820313, 0.06897516632080078, 0.0688741455078125, 0.06882867431640625, 0.06852706909179687, 0.06847974395751953, 0.06888665771484374, 0.06864364624023438, 0.06882466888427734, 0.06873929595947266, 0.06862393951416015, 0.06888307189941406, 0.06871817779541016, 0.06891999816894531, 0.06911504364013672, 0.06890735626220704, 0.06910896301269531, 0.06923097229003906, 0.06891919708251953, 0.06888726043701172, 0.06891725158691406, 0.0687390365600586, 0.06894425964355469, 0.06873238372802734, 0.06926905822753907, 0.06880716705322265, 0.06899520111083984, 0.06895001220703124, 0.06919782257080079, 0.06898278045654296, 0.06927283477783203, 0.06909990692138672, 0.0691337890625, 0.0690533447265625, 0.06904598236083985, 0.07014739227294922, 0.06870604705810547, 0.06882582092285157, 0.06836064147949218, 0.06828742218017578, 0.06844915008544922, 0.06836041259765625, 0.06851561737060546, 0.0683473892211914, 0.06881740570068359, 0.06885171508789062, 0.0686263656616211, 0.06855276489257812, 0.06886521911621094, 0.06863136291503906, 0.06886969757080078, 0.06866169738769531, 0.06857523345947265, 0.06873001861572266, 0.06867782592773437, 0.06866515350341797, 0.06865167999267578, 0.06862662506103516, 0.06843778991699219, 0.0685293731689453, 0.06890598297119141, 0.06865305328369141, 0.06895206451416015, 0.06891110229492188, 0.06911385345458984, 0.0690208969116211, 0.06890921783447265, 0.06871718597412109, 0.0686956787109375, 0.06885350036621093, 0.06870304107666016, 0.06886585235595703, 0.06882099151611328, 0.068569091796875, 0.0688721923828125, 0.06883087921142578, 0.06904630279541016, 0.06905197143554688, 0.0690798110961914, 0.06897254180908204, 0.06900326538085938, 0.06883123016357422, 0.06890083312988281, 0.06881215667724609, 0.0688114242553711, 0.06891241455078125, 0.06881298828125, 0.06929872131347656, 0.06867967987060547, 0.0690945587158203, 0.06892777252197266, 0.06926802825927734, 0.06898489379882812, 0.06911788940429688, 0.06897869110107421, 0.06901145935058593, 0.06925302124023437, 0.06908102416992187, 0.06983055877685547, 0.06877110290527344, 0.06813369750976563, 0.0681778564453125, 0.06850361633300782, 0.0684031982421875, 0.06835590362548828, 0.0683082275390625, 0.06824646759033202, 0.06832332611083984, 0.06875135803222657, 0.06870015716552734, 0.06846419525146484, 0.06903398132324219, 0.06873878479003906, 0.06884220886230469, 0.0688433609008789, 0.0684873275756836, 0.06877808380126953, 0.06872191619873047, 0.06848579406738281, 0.06883123016357422, 0.06890496063232422, 0.06860364532470703, 0.0689520034790039, 0.06865689849853515, 0.06882784271240235, 0.06904000091552734, 0.06908927917480469, 0.06902934265136719, 0.06904476928710937, 0.0688005142211914, 0.06873455810546875, 0.06884566497802734, 0.06875782775878907, 0.06897638702392578, 0.06881712341308593, 0.06897666931152344, 0.06884556579589844, 0.0689090576171875, 0.06911382293701172, 0.06917327880859375, 0.06913228607177735, 0.06935868835449219, 0.06914546966552734, 0.06928988647460937, 0.06920175933837891, 0.06919951629638672, 0.06931721496582031, 0.06913436889648437, 0.06905241394042969, 0.06904627227783203, 0.06895565032958985, 0.06910617828369141, 0.06899839782714844, 0.06918598175048828, 0.0691193618774414, 0.06940534210205078, 0.06961795043945312, 0.06906265258789063, 0.06919522857666016, 0.06920861053466797, 0.06936370849609375, 0.0703400650024414, 0.06894809722900391, 0.06860636901855469, 0.0686056671142578, 0.06858985900878906, 0.0685998077392578, 0.06879801940917969, 0.06865350341796875, 0.06852915191650391, 0.06842832183837891, 0.06877967834472656, 0.06876860809326171, 0.06910562896728516, 0.06890422058105469, 0.06917596435546874, 0.06887430572509766, 0.06888985443115235, 0.06908153533935547, 0.06879388427734374, 0.06872553253173828, 0.06852403259277344, 0.0689205780029297, 0.06871347045898438, 0.06860988616943359, 0.06881385803222656, 0.06861913299560547, 0.06855286407470704, 0.0690420150756836, 0.06890451049804687, 0.06914911651611329, 0.06886176300048828, 0.06893382263183594, 0.06878617858886718, 0.06910924530029297, 0.06898944091796876, 0.06911795043945312, 0.06909107208251954, 0.06914864349365234, 0.068847900390625, 0.06913638305664062, 0.06895206451416015, 0.06918505859375, 0.06916143798828125, 0.06942082977294922, 0.0692750701904297, 0.06903036499023438, 0.06920838165283204, 0.06899507141113281, 0.0690742416381836, 0.06909203338623048, 0.06922144317626953, 0.06922700500488281, 0.0690440673828125, 0.06944419097900391, 0.06933913421630859, 0.06919987487792968, 0.06942451477050782, 0.06951136016845703, 0.0698700180053711, 0.07027916717529296, 0.06907318115234375, 0.06926921844482421, 0.06933046722412109, 0.07090611267089844, 0.06893148803710937, 0.06883577728271484, 0.06851728057861328, 0.06845913696289062, 0.06887420654296875, 0.06886592102050781, 0.0688006362915039, 0.0687350082397461, 0.06849942779541016, 0.06885171508789062, 0.06884111785888672, 0.06866095733642578, 0.06928447723388671, 0.06912793731689452, 0.06914835357666016, 0.06907904052734375, 0.06888726043701172, 0.06863839721679688, 0.0687208023071289, 0.06876576232910156, 0.06890080261230469, 0.06878339385986328, 0.06879519653320312, 0.06893353271484375, 0.06883455657958984, 0.06914246368408203, 0.06911878204345703, 0.06943334197998047, 0.06943539428710938, 0.06912115478515625, 0.06927654266357422, 0.06908873748779297, 0.06902019500732422, 0.06898252868652344, 0.06922262573242187, 0.06925929260253906, 0.06908892822265625, 0.0693905258178711, 0.06933293151855469, 0.06900265502929688, 0.06961849975585938, 0.06940080261230469, 0.06934220886230469, 0.0696115493774414, 0.069417724609375, 0.06927961730957032, 0.06914470672607421, 0.06925107574462891, 0.06927974700927735, 0.06925033569335938, 0.06914736175537109, 0.06910361480712891, 0.06914575958251953, 0.06960137939453125, 0.06952422332763672, 0.06949068450927734, 0.06941426849365234, 0.06935526275634765, 0.06935187530517578, 0.0694501724243164, 0.06908723449707031, 0.0692831039428711, 0.07077613067626953, 0.06908544158935546, 0.06869635009765625, 0.06867276763916015, 0.06871100616455078, 0.06858783721923828, 0.06867874908447266, 0.06913526153564453, 0.06877161407470703, 0.06881097412109374, 0.0686346206665039, 0.06864009857177734, 0.06889321899414062, 0.06896857452392578, 0.06914591979980468, 0.06896880340576172, 0.06901385498046875, 0.06871449279785156, 0.06893561553955078, 0.06876166534423828, 0.06876774597167969, 0.06891878509521485, 0.068837890625, 0.06926131439208984, 0.06874317169189453, 0.06891718292236328, 0.06894556427001954, 0.06935939025878907, 0.06927613067626953, 0.0692297592163086, 0.069297119140625, 0.06900736236572266, 0.06927769470214844, 0.06882713317871093, 0.06894710540771484, 0.06888944244384766, 0.06913433837890624, 0.0691978530883789, 0.06904009246826172, 0.06923843383789062, 0.06911199951171874, 0.06961100769042969, 0.06930937957763672, 0.0694977569580078, 0.06937644958496093, 0.06928627014160156, 0.06928160095214844, 0.06924691009521484, 0.06915264129638672, 0.06917494201660156, 0.06918985748291015, 0.06922291564941406, 0.06923411560058594, 0.06905680084228516, 0.06926774597167969, 0.06936780548095703, 0.06939238739013671, 0.0694169921875, 0.06964425659179688, 0.06934272003173828, 0.06919948577880859, 0.06940557098388672, 0.06939347076416015, 0.07077712249755859, 0.06892339324951172, 0.06868582153320313, 0.0686223373413086, 0.06862438201904297, 0.06900089263916015, 0.06879456329345703, 0.06903939056396484, 0.06882147216796874, 0.06875993347167969, 0.06890496063232422, 0.06870611572265625, 0.06888451385498047, 0.06890306854248048, 0.06942217254638672, 0.06902642822265626, 0.06897283172607421, 0.06865817260742188, 0.06888297271728516, 0.06865545654296876, 0.06906681823730469, 0.0689889907836914, 0.06854656219482422, 0.06878617858886718, 0.06876966094970703, 0.06903437042236328, 0.06915660858154297, 0.06921625518798828, 0.06915481567382813, 0.06953778839111328, 0.06888979339599609, 0.06894009399414062, 0.06867782592773437, 0.06887264251708984, 0.06869331359863282, 0.06908972930908203, 0.06911808013916015, 0.06921727752685547, 0.06866226959228515, 0.06887423706054688, 0.0689090576171875, 0.06901318359375, 0.0692001953125, 0.06923878479003906, 0.06888243103027344, 0.06931251525878906, 0.0690494384765625, 0.06907695770263672, 0.06904723358154297, 0.06909747314453125, 0.06912204742431641, 0.06905673980712891, 0.06913011169433594, 0.06898883056640626, 0.06918463897705078, 0.0691240005493164, 0.06943228912353516, 0.06950099182128906, 0.06908131408691406, 0.06942486572265626, 0.06937190246582031, 0.06914867401123047, 0.0692305908203125, 0.0701559066772461, 0.06883312225341796, 0.06837506866455079, 0.06849282836914063, 0.06825007629394532, 0.0682449951171875, 0.06863712310791016, 0.06862035369873047, 0.06851366424560547, 0.06900064086914062, 0.06865376281738281, 0.06853836822509765, 0.06884761810302735, 0.06888674926757812, 0.06966249847412109, 0.06877593231201172, 0.06889266967773437, 0.06872406768798828, 0.06847760009765624, 0.06874931335449219, 0.06869811248779296, 0.06854041290283203, 0.06880665588378906, 0.06917529296875, 0.06890496063232422, 0.06874540710449219, 0.06884726715087891, 0.06913196563720703, 0.06915891265869141, 0.06963043212890625, 0.06897049713134766, 0.06862147521972656, 0.06875238037109375, 0.06872438049316407, 0.0685032958984375, 0.06888432312011719, 0.0687515869140625, 0.06882931518554687, 0.0688431396484375, 0.06906889343261718, 0.06902019500732422, 0.06889852905273437, 0.06913823699951172, 0.06901398468017578, 0.06971772766113281, 0.06917967987060547, 0.06898070526123047, 0.06899436950683593, 0.06876009368896484, 0.06907027435302734, 0.06882166290283204, 0.06943753814697265, 0.06900326538085938, 0.0691630096435547, 0.06931862640380859, 0.0691978530883789, 0.06931455993652344, 0.06919782257080079, 0.06921580505371094, 0.07040045166015625, 0.06931574249267577, 0.06931452941894531, 0.06904630279541016]",tokens/s,14.506145203815947,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 582, in __init__ self.transformer = CodeGenModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 400, in __init__ self.h = nn.ModuleList([CodeGenBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 400, in self.h = nn.ModuleList([CodeGenBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 258, in __init__ self.mlp = CodeGenMLP(inner_dim, config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 236, in __init__ self.fc_in = nn.Linear(embed_dim, intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 172.12 MiB is free. Process 363355 has 14.57 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 14.15 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,808.456192,673.05472,0.0,270.532608,250.474496,s,1,9.2063984375,9.2063984375,0.0,9.2063984375,9.2063984375,9.2063984375,9.2063984375,[9.2063984375],,kWh,2.5851942083306766e-06,2.779341599281842e-07,0.0,2.8631283682588606e-06,,MB,1173.651456,685.637632,0.0,272.62976,241.723904,s,27,0.4128954229354858,0.01529242307168466,0.0003832539162876997,0.015231871604919433,0.015349395179748536,0.015400012493133546,0.016732002429962155,"[0.017195135116577148, 0.015337183952331543, 0.01527785587310791, 0.015151552200317383, 0.015189184188842773, 0.015273632049560547, 0.01521340847015381, 0.015265631675720214, 0.015238688468933106, 0.015089887619018555, 0.015217791557312012, 0.015167584419250488, 0.015257887840270996, 0.015167296409606934, 0.015134943962097167, 0.015333855628967285, 0.01531811237335205, 0.015231871604919433, 0.015058815956115723, 0.015111295700073243, 0.015105600357055665, 0.015134719848632813, 0.015238944053649903, 0.015367712020874023, 0.015255295753479003, 0.015147680282592774, 0.01541385555267334]",tokens/s,16740.31635143601,kWh,5.294841688254921e-07,5.836773728422978e-08,3.50584519597826e-07,9.38436425707548e-07,tokens/kWh,272794185.0797032,MB,1212.997632,698.220544,0.0,285.212672,241.726464,s,27,9.773663330078126,0.36198753074363427,0.005444068189310141,0.3612581787109375,0.3677008544921875,0.3688610595703125,0.37651509216308593,"[0.37907257080078127, 0.3650636596679688, 0.36676181030273436, 0.3591767578125, 0.36351739501953123, 0.36504238891601565, 0.3585615539550781, 0.36923611450195315, 0.3675108032226562, 0.3618763427734375, 0.3612581787109375, 0.35576620483398436, 0.36294467163085936, 0.35851171875, 0.3595638427734375, 0.36077655029296873, 0.35653558349609377, 0.35413751220703127, 0.3528895263671875, 0.35802728271484374, 0.3604525451660156, 0.35514068603515625, 0.360106689453125, 0.3634088134765625, 0.36436312866210935, 0.3659750671386719, 0.3679859313964844]",tokens/s,174.03914402957065,kWh,1.0020358395757905e-05,1.1050991299331516e-06,4.222613583402228e-06,1.534807110909329e-05,tokens/kWh,4104750.333263332,,s,1701,9.757591330528282,0.005736385261921376,0.0001942857408269884,0.005708288192749023,0.005884064197540283,0.005975232124328613,0.006587264060974121,"[0.005910719871520996, 0.005873663902282715, 0.007460864067077637, 0.007280096054077149, 0.00753868818283081, 0.006658592224121094, 0.00586137580871582, 0.005924863815307617, 0.006307360172271729, 0.005833087921142578, 0.005818624019622803, 0.005938655853271484, 0.006181248188018799, 0.0061337599754333495, 0.00601094388961792, 0.005955520153045654, 0.00611737585067749, 0.005822720050811768, 0.0060536317825317385, 0.005855231761932373, 0.0058757119178771975, 0.005787648200988769, 0.0057916479110717775, 0.005851456165313721, 0.005869056224822998, 0.005723872184753418, 0.0062911357879638675, 0.0058512001037597655, 0.006685791969299316, 0.006082272052764893, 0.006594880104064942, 0.006694591999053955, 0.007360032081604004, 0.005882336139678955, 0.006152416229248047, 0.005880799770355225, 0.005859392166137695, 0.005818943977355957, 0.005867392063140869, 0.005779776096343994, 0.005732351779937744, 0.0057712321281433105, 0.0056929922103881835, 0.0057758398056030275, 0.005726367950439453, 0.005972959995269775, 0.0057487678527832035, 0.005826911926269531, 0.0058158721923828125, 0.00585209608078003, 0.00582860803604126, 0.005774720191955567, 0.005718656063079834, 0.00569484806060791, 0.005658751964569092, 0.0057472000122070314, 0.005703680038452149, 0.00579369592666626, 0.00567952013015747, 0.005793407917022705, 0.005599296092987061, 0.005670911788940429, 0.005682271957397461, 0.0057140798568725585, 0.0058351998329162595, 0.005763040065765381, 0.00576803207397461, 0.005691487789154053, 0.005872288227081299, 0.005795743942260742, 0.005981599807739258, 0.00576358413696289, 0.0059539518356323245, 0.005771423816680908, 0.005641280174255371, 0.005718463897705078, 0.00571020793914795, 0.005818016052246093, 0.0057136001586914065, 0.0057535037994384764, 0.0057341761589050294, 0.005792031764984131, 0.005754816055297852, 0.005742784023284912, 0.005683199882507324, 0.00571782398223877, 0.005681151866912842, 0.005699295997619629, 0.005628191947937012, 0.005685503959655762, 0.005629248142242431, 0.005755136013031006, 0.0055809922218322755, 0.005726016044616699, 0.005695680141448975, 0.005760799884796142, 0.005747039794921875, 0.005739391803741455, 0.005690368175506591, 0.005718016147613526, 0.005965248107910156, 0.00580188798904419, 0.0059062399864196775, 0.006444096088409424, 0.005721888065338135, 0.005709983825683594, 0.005795680046081543, 0.005797311782836914, 0.005728672027587891, 0.005740608215332031, 0.005746016025543213, 0.005664576053619385, 0.005737215995788574, 0.005820608139038086, 0.005769440174102783, 0.005658495903015137, 0.0058056640625, 0.005720384120941162, 0.005807583808898926, 0.0068056640625, 0.0058023681640625, 0.005677055835723877, 0.005894400119781494, 0.00598195219039917, 0.005802303791046143, 0.005726016044616699, 0.00582092809677124, 0.005791744232177734, 0.005599135875701904, 0.00576313591003418, 0.0057136001586914065, 0.005828959941864014, 0.005750783920288086, 0.00587721586227417, 0.005732480049133301, 0.005794496059417725, 0.005856256008148194, 0.006124415874481201, 0.005861216068267823, 0.0057300481796264645, 0.005978591918945312, 0.005797760009765625, 0.0058501439094543455, 0.005708288192749023, 0.005857984066009522, 0.005723999977111816, 0.005848320007324219, 0.005750368118286133, 0.005789792060852051, 0.005706655979156494, 0.005816319942474365, 0.005741631984710694, 0.005716544151306152, 0.00579417610168457, 0.005873023986816406, 0.006058623790740967, 0.0058715839385986325, 0.005922848224639892, 0.005703680038452149, 0.005844607830047608, 0.005836319923400879, 0.0059359679222106936, 0.005830527782440186, 0.0058488001823425294, 0.005841311931610108, 0.005791744232177734, 0.005803167819976807, 0.005814271926879883, 0.005796703815460205, 0.005880000114440918, 0.0058386559486389164, 0.005750783920288086, 0.005635359764099121, 0.005755616188049316, 0.005646336078643799, 0.005808256149291992, 0.00574451208114624, 0.00589577579498291, 0.005871488094329834, 0.005896671772003174, 0.005823584079742432, 0.005929952144622803, 0.005869599819183349, 0.0058306241035461425, 0.005871071815490723, 0.005683263778686523, 0.0058065600395202635, 0.00566864013671875, 0.005859519958496094, 0.00595958423614502, 0.005689439773559571, 0.005810175895690918, 0.005662720203399658, 0.005857279777526855, 0.0056397438049316405, 0.005824384212493897, 0.005694015979766846, 0.005720064163208007, 0.005705215930938721, 0.0057103362083435055, 0.005785568237304688, 0.005746719837188721, 0.005816095829010009, 0.005731872081756592, 0.005775968074798584, 0.005735648155212402, 0.005672927856445313, 0.006056575775146484, 0.005970111846923828, 0.005916800022125244, 0.005748703956604004, 0.005789696216583252, 0.005754623889923096, 0.005842207908630371, 0.005790143966674805, 0.005696032047271728, 0.005856704235076904, 0.005816895961761475, 0.005660672187805176, 0.005664224147796631, 0.00575267219543457, 0.005591743946075439, 0.005681151866912842, 0.005603328227996827, 0.005597184181213379, 0.00558460807800293, 0.005656864166259766, 0.005642240047454834, 0.005613471984863281, 0.0055207362174987794, 0.005612256050109864, 0.005678815841674805, 0.005658944129943847, 0.005570240020751953, 0.005648320198059082, 0.00567251205444336, 0.005616447925567627, 0.0055890240669250486, 0.005580192089080811, 0.005581056118011475, 0.005599552154541015, 0.005440576076507568, 0.0055668802261352535, 0.005570496082305908, 0.005641056060791016, 0.005582592010498047, 0.005586944103240967, 0.005604767799377441, 0.005710464000701904, 0.0055725760459899906, 0.00565180778503418, 0.0055446081161499025, 0.00555731201171875, 0.005652575969696045, 0.005550816059112549, 0.005736767768859863, 0.005642144203186035, 0.005750688076019287, 0.005588863849639893, 0.005616928100585937, 0.0056631040573120114, 0.005795328140258789, 0.005956575870513916, 0.005750304222106934, 0.005925343990325928, 0.0059269118309021, 0.005938464164733886, 0.005812960147857666, 0.005888127803802491, 0.0064728641510009765, 0.005853184223175049, 0.005824416160583496, 0.005805215835571289, 0.0057422399520874025, 0.005831903934478759, 0.005712672233581543, 0.005753087997436524, 0.006139872074127198, 0.005780767917633056, 0.005746496200561523, 0.005788288116455078, 0.005891136169433594, 0.0058726401329040525, 0.005795072078704834, 0.005835519790649414, 0.005750559806823731, 0.005660960197448731, 0.005654463768005371, 0.005977824211120606, 0.005680895805358886, 0.0057136001586914065, 0.00583513593673706, 0.005752575874328613, 0.0057060160636901855, 0.005611968040466308, 0.005674464225769043, 0.0055240001678466795, 0.00565996789932251, 0.005564832210540772, 0.005657023906707763, 0.0056236801147460935, 0.005690976142883301, 0.005626399993896484, 0.005649792194366455, 0.005627456188201904, 0.005700511932373047, 0.005629951953887939, 0.005840896129608154, 0.005687295913696289, 0.005857279777526855, 0.005713312149047851, 0.0058271679878234865, 0.005835775852203369, 0.005823552131652832, 0.005771200180053711, 0.005745440006256103, 0.005818367958068848, 0.0056929922103881835, 0.005805600166320801, 0.005630879878997803, 0.005717184066772461, 0.005659455776214599, 0.005721216201782227, 0.005729152202606201, 0.005709824085235596, 0.005688864231109619, 0.005757567882537842, 0.005783040046691895, 0.005763423919677734, 0.005666944026947021, 0.005827807903289795, 0.005730239868164062, 0.005812960147857666, 0.005723968029022217, 0.0058609600067138674, 0.005720672130584717, 0.005741631984710694, 0.005741504192352295, 0.005627007961273194, 0.005786496162414551, 0.005758463859558105, 0.005837664127349853, 0.00574019193649292, 0.0057712640762329105, 0.005697375774383545, 0.005754144191741944, 0.005751423835754395, 0.0060414400100708004, 0.005727935791015625, 0.005715904235839844, 0.005783679962158203, 0.005804704189300537, 0.0058716158866882326, 0.0058240962028503415, 0.0057461438179016115, 0.005985087871551513, 0.0058635520935058595, 0.0058419198989868165, 0.00577788782119751, 0.005937695980072022, 0.0058371200561523435, 0.005797664165496826, 0.005746592044830323, 0.005783552169799804, 0.005873663902282715, 0.005888192176818847, 0.00586240005493164, 0.005771168231964111, 0.005904640197753906, 0.0059071040153503415, 0.005858719825744629, 0.005680863857269287, 0.005798783779144287, 0.005641791820526123, 0.005755328178405762, 0.005924863815307617, 0.005892096042633056, 0.005803743839263916, 0.00572873592376709, 0.005803904056549072, 0.00559935998916626, 0.005689343929290771, 0.005527584075927734, 0.0056722559928894046, 0.0056756801605224606, 0.005623807907104492, 0.005701632022857666, 0.005602367877960205, 0.005626560211181641, 0.005568736076354981, 0.0056360321044921875, 0.0056908798217773435, 0.0056425600051879885, 0.005654335975646973, 0.005599711894989014, 0.005615615844726562, 0.005631999969482422, 0.005595136165618897, 0.005678175926208496, 0.00559830379486084, 0.005606239795684815, 0.005798880100250244, 0.005619711875915527, 0.0055316481590271, 0.005556447982788086, 0.005574111938476563, 0.005685567855834961, 0.005569536209106446, 0.005637119770050049, 0.005550079822540284, 0.005636096000671387, 0.005719999790191651, 0.005677120208740234, 0.005661791801452637, 0.0056351680755615235, 0.005713727951049805, 0.005723135948181152, 0.005655648231506348, 0.005703423976898193, 0.005586080074310303, 0.005753856182098388, 0.005763072013854981, 0.005840000152587891, 0.005685120105743408, 0.005661695957183838, 0.005710048198699951, 0.0056640000343322755, 0.0057831997871398925, 0.005748672008514404, 0.00569871997833252, 0.005685184001922607, 0.005598207950592041, 0.0057721281051635745, 0.005764287948608398, 0.0058089599609375, 0.005639935970306396, 0.005654016017913818, 0.005645055770874024, 0.006430016040802002, 0.005777664184570312, 0.005787392139434814, 0.005937151908874512, 0.00587775993347168, 0.005802239894866943, 0.00583244800567627, 0.0058527359962463376, 0.005986752033233643, 0.005871071815490723, 0.005883872032165527, 0.0057523841857910155, 0.005743328094482422, 0.005757215976715088, 0.005842688083648681, 0.0057729921340942385, 0.0059192957878112795, 0.007460224151611328, 0.0057350401878356935, 0.005697535991668701, 0.005740128040313721, 0.005671328067779541, 0.005715904235839844, 0.0057610878944396975, 0.005865471839904785, 0.005824512004852295, 0.0059658241271972655, 0.007284543991088867, 0.007194623947143554, 0.0062048320770263675, 0.005696288108825684, 0.005818399906158447, 0.005809120178222656, 0.005774335861206055, 0.005824543952941894, 0.005773248195648194, 0.0056583681106567385, 0.005646111965179443, 0.005811744213104248, 0.005630943775177002, 0.005715968132019043, 0.005735648155212402, 0.005722271919250488, 0.005757567882537842, 0.0058122239112854005, 0.005689055919647217, 0.005633376121520996, 0.005747647762298584, 0.005725887775421143, 0.005689568042755127, 0.005625951766967774, 0.005668863773345947, 0.005765279769897461, 0.005689439773559571, 0.005839647769927979, 0.0056780481338500974, 0.005826240062713623, 0.005799424171447754, 0.005794655799865722, 0.005773280143737793, 0.005887455940246582, 0.005878623962402344, 0.005702752113342285, 0.005700064182281494, 0.005668992042541504, 0.005689343929290771, 0.005705952167510986, 0.005771552085876465, 0.005730400085449219, 0.005645919799804688, 0.005773248195648194, 0.005704224109649658, 0.005757279872894287, 0.005572256088256836, 0.005640192031860352, 0.005695487976074219, 0.005748032093048095, 0.005722815990447998, 0.005778656005859375, 0.005755680084228516, 0.005729440212249756, 0.0056490559577941895, 0.005865600109100342, 0.005644351959228516, 0.006458879947662354, 0.0060659837722778324, 0.006381919860839844, 0.006400352001190186, 0.006125792026519776, 0.006587264060974121, 0.00582751989364624, 0.0058982081413269044, 0.005677055835723877, 0.0057875199317932126, 0.005697216033935547, 0.005755328178405762, 0.005730495929718017, 0.00566864013671875, 0.005779776096343994, 0.005970655918121338, 0.005805088043212891, 0.005928927898406982, 0.005811872005462647, 0.00583897590637207, 0.005742815971374512, 0.005706943988800049, 0.005884064197540283, 0.005943967819213867, 0.006344255924224854, 0.00582041597366333, 0.005812320232391357, 0.005736288070678711, 0.005726655960083008, 0.005678880214691162, 0.00575926399230957, 0.005826144218444824, 0.005808576107025147, 0.0057439360618591305, 0.005745183944702148, 0.005740992069244385, 0.005846720218658447, 0.005668447971343994, 0.005697535991668701, 0.005586880207061768, 0.005744287967681885, 0.005871840000152588, 0.0057857918739318845, 0.005775231838226319, 0.005810719966888428, 0.00569324779510498, 0.0057391681671142575, 0.00569052791595459, 0.00575984001159668, 0.005686944007873535, 0.005716512203216553, 0.005598464012145996, 0.0056592321395874026, 0.005599199771881103, 0.005685088157653809, 0.005678400039672852, 0.005679423809051514, 0.005650400161743164, 0.005603231906890869, 0.005663392066955567, 0.005740543842315674, 0.005578752040863037, 0.0057424001693725584, 0.005692927837371826, 0.005925568103790283, 0.0057487359046936035, 0.005764607906341553, 0.005689856052398682, 0.005715583801269531, 0.00579417610168457, 0.005918560028076172, 0.005779615879058838, 0.005818208217620849, 0.005793759822845459, 0.005858943939208984, 0.005730591773986817, 0.005715456008911133, 0.005583199977874756, 0.00572054386138916, 0.005645279884338379, 0.006104063987731933, 0.00596556806564331, 0.005872191905975342, 0.00578115177154541, 0.0057281599044799805, 0.005750976085662842, 0.00583676815032959, 0.005692416191101074, 0.005777823925018311, 0.005771423816680908, 0.005709568023681641, 0.005681727886199951, 0.005627999782562256, 0.005811520099639892, 0.005700287818908692, 0.005666816234588623, 0.0057274560928344724, 0.005710624217987061, 0.005740799903869629, 0.005696832180023193, 0.005822432041168213, 0.005716447830200195, 0.006154240131378174, 0.005649856090545654, 0.005617280006408692, 0.005655263900756836, 0.005588960170745849, 0.005646592140197754, 0.005789696216583252, 0.005945119857788086, 0.005860032081604004, 0.005746655941009522, 0.005804063796997071, 0.005810175895690918, 0.005834752082824707, 0.005662720203399658, 0.005701504230499268, 0.005734528064727783, 0.005752768039703369, 0.005836863994598388, 0.005726143836975097, 0.005773375988006592, 0.005712031841278076, 0.005754015922546387, 0.005884064197540283, 0.005683807849884033, 0.0056728959083557125, 0.005643680095672608, 0.005660736083984375, 0.005620480060577393, 0.005653471946716309, 0.00565670394897461, 0.0056657280921936035, 0.005796639919281006, 0.005632351875305175, 0.005679359912872314, 0.00571020793914795, 0.005746208190917969, 0.005685152053833008, 0.005617568016052246, 0.007074495792388916, 0.005772831916809082, 0.005750879764556884, 0.0056323838233947755, 0.005674975872039795, 0.0056935038566589356, 0.005704927921295166, 0.005659359931945801, 0.005736447811126709, 0.0056112961769104, 0.005593311786651611, 0.005971168041229248, 0.005687776088714599, 0.005764863967895508, 0.005671487808227539, 0.005646336078643799, 0.005641632080078125, 0.005644864082336426, 0.005704768180847168, 0.005663712024688721, 0.00567471981048584, 0.00561081600189209, 0.005596127986907959, 0.005744256019592285, 0.005617504119873047, 0.005662879943847656, 0.005668960094451904, 0.00562614393234253, 0.005623807907104492, 0.005536096096038818, 0.005569536209106446, 0.005855167865753174, 0.00576204776763916, 0.005709824085235596, 0.005830560207366943, 0.005688640117645263, 0.005654496192932129, 0.005751616001129151, 0.0056258559226989744, 0.0056358718872070316, 0.005562304019927978, 0.005467648029327392, 0.005528351783752441, 0.0055946559906005855, 0.005503488063812256, 0.0055500478744506835, 0.0055582718849182125, 0.005591040134429932, 0.005567903995513916, 0.005623616218566894, 0.005688096046447754, 0.005537504196166992, 0.005565919876098633, 0.005585984230041504, 0.005578080177307129, 0.005539296150207519, 0.005539008140563965, 0.005671008110046387, 0.005639008045196533, 0.005627744197845459, 0.005672128200531006, 0.005637919902801514, 0.005727327823638916, 0.00566758394241333, 0.005590816020965576, 0.0056193599700927735, 0.005814239978790283, 0.005706463813781738, 0.005637695789337158, 0.005660575866699219, 0.005681407928466797, 0.005609983921051025, 0.005690847873687744, 0.005696063995361328, 0.005760159969329834, 0.005573247909545899, 0.005568511962890625, 0.0057285442352294925, 0.005650144100189209, 0.005678400039672852, 0.005636320114135742, 0.005908063888549805, 0.005710720062255859, 0.005627359867095947, 0.005665184020996094, 0.0056423678398132325, 0.005619711875915527, 0.005573855876922607, 0.005554080009460449, 0.005624320030212402, 0.005581183910369873, 0.005611648082733154, 0.005526495933532715, 0.005485663890838623, 0.005819392204284668, 0.00567523193359375, 0.005688831806182861, 0.005728864192962647, 0.005801663875579834, 0.005812479972839356, 0.0057300481796264645, 0.005770495891571045, 0.005882368087768554, 0.00569484806060791, 0.005675615787506104, 0.00571011209487915, 0.005741856098175049, 0.005696159839630127, 0.0063175358772277835, 0.0057424001693725584, 0.005731200218200684, 0.005689248085021973, 0.00582860803604126, 0.005796895980834961, 0.005788640022277832, 0.005675263881683349, 0.005674111843109131, 0.005622399806976318, 0.005975232124328613, 0.00563481616973877, 0.006016543865203857, 0.005954016208648682, 0.0056928319931030275, 0.005787456035614013, 0.006142911911010743, 0.006055840015411377, 0.005664383888244629, 0.006060480117797851, 0.005666751861572266, 0.005710048198699951, 0.005690815925598144, 0.005872191905975342, 0.005797247886657714, 0.005632224082946777, 0.00571779203414917, 0.0056999998092651365, 0.005692736148834228, 0.005659296035766602, 0.0056863360404968265, 0.005703775882720947, 0.0056943359375, 0.005703423976898193, 0.0056711678504943845, 0.005908480167388916, 0.005691455841064453, 0.005678271770477295, 0.005880832195281982, 0.005586304187774658, 0.005582464218139648, 0.005574495792388916, 0.005626783847808838, 0.005691135883331299, 0.00560972785949707, 0.005609216213226319, 0.005683455944061279, 0.005692863941192627, 0.0056656961441040035, 0.0057158079147338866, 0.005714303970336914, 0.005605375766754151, 0.005658432006835938, 0.00578115177154541, 0.005670559883117676, 0.005695903778076172, 0.005655007839202881, 0.005793888092041016, 0.0056854720115661625, 0.005665952205657959, 0.005607711791992188, 0.005661151885986328, 0.005741663932800293, 0.005655231952667236, 0.005752831935882568, 0.005769216060638428, 0.005756671905517578, 0.0056440639495849605, 0.00561840009689331, 0.0058221759796142575, 0.0057218561172485355, 0.00575926399230957, 0.005633952140808106, 0.005666912078857422, 0.005631999969482422, 0.005670591831207276, 0.005724480152130127, 0.005646240234375, 0.005709919929504394, 0.005609280109405518, 0.005689536094665527, 0.0056433920860290525, 0.005581920146942139, 0.005704832077026367, 0.005646207809448242, 0.005585696220397949, 0.005619071960449219, 0.005724160194396972, 0.0058353919982910155, 0.005713535785675049, 0.005800415992736816, 0.005639935970306396, 0.005728415966033936, 0.0057346878051757815, 0.0057177281379699706, 0.005634047985076904, 0.0055625920295715335, 0.005617440223693848, 0.00567683219909668, 0.006109151840209961, 0.005718400001525879, 0.005635968208312988, 0.005701824188232422, 0.005625088214874268, 0.005675807952880859, 0.005645088195800781, 0.005587967872619629, 0.005594687938690186, 0.0055812478065490724, 0.005576896190643311, 0.0055987839698791505, 0.005556479930877685, 0.005746047973632812, 0.005715231895446777, 0.005631872177124023, 0.005606688022613525, 0.005619616031646729, 0.005653247833251953, 0.005576608180999756, 0.005703680038452149, 0.005640063762664795, 0.0056200318336486815, 0.00566048002243042, 0.005656032085418701, 0.005647200107574463, 0.005607103824615479, 0.005635072231292724, 0.005667840003967285, 0.005600927829742431, 0.005677440166473389, 0.005540832042694092, 0.005739520072937012, 0.005678880214691162, 0.005706079959869385, 0.005662591934204102, 0.005587999820709229, 0.005653471946716309, 0.005666079998016357, 0.005781280040740967, 0.00571068811416626, 0.0056741762161254886, 0.0056984639167785645, 0.005688672065734863, 0.005591072082519531, 0.005597599983215332, 0.005646560192108154, 0.005711328029632568, 0.005610015869140625, 0.005853184223175049, 0.005734399795532226, 0.005754848003387451, 0.005685023784637451, 0.0056711678504943845, 0.005668863773345947, 0.005719359874725342, 0.005694143772125244, 0.005647391796112061, 0.00561407995223999, 0.005662240028381347, 0.005654943943023682, 0.005761055946350097, 0.005772863864898682, 0.005684256076812744, 0.005666719913482666, 0.005783552169799804, 0.005697055816650391, 0.00578607988357544, 0.006203392028808594, 0.005683199882507324, 0.005644608020782471, 0.006286111831665039, 0.006204127788543701, 0.0056219520568847655, 0.005631999969482422, 0.005590047836303711, 0.0059304962158203125, 0.005749248027801514, 0.005719808101654053, 0.005808224201202392, 0.005753151893615723, 0.005813568115234375, 0.005625504016876221, 0.005857312202453614, 0.005807072162628174, 0.005865183830261231, 0.005678976058959961, 0.005727583885192871, 0.006049856185913086, 0.005711743831634522, 0.005790048122406006, 0.005781888008117676, 0.005753119945526123, 0.005679103851318359, 0.005639904022216797, 0.005580319881439209, 0.005624127864837646, 0.005796288013458252, 0.0056501121520996095, 0.005687615871429443, 0.005715360164642334, 0.0056179518699646, 0.005624127864837646, 0.005601280212402344, 0.005890048027038574, 0.005814527988433838, 0.005656288146972656, 0.0055912318229675295, 0.0057199039459228515, 0.005949120044708252, 0.005708096027374268, 0.005658624172210694, 0.005570591926574707, 0.005780511856079101, 0.005718976020812988, 0.00562169599533081, 0.005593023777008057, 0.005662847995758056, 0.005761119842529297, 0.0056278080940246585, 0.005668863773345947, 0.005640192031860352, 0.005698751926422119, 0.005751776218414307, 0.005626912117004394, 0.0055528321266174316, 0.005662847995758056, 0.005664095878601074, 0.005782176017761231, 0.005850719928741455, 0.00581708812713623, 0.005570208072662354, 0.00579804801940918, 0.005651487827301026, 0.005741087913513183, 0.0057387838363647465, 0.005639935970306396, 0.005644544124603272, 0.005718207836151123, 0.005756768226623535, 0.0056897602081298826, 0.005645631790161133, 0.005655231952667236, 0.00575267219543457, 0.005703839778900146, 0.005601280212402344, 0.005578688144683838, 0.00571123218536377, 0.005603903770446777, 0.005676511764526367, 0.00556713581085205, 0.0056315197944641114, 0.005593567848205566, 0.005623807907104492, 0.005636223793029785, 0.005654655933380127, 0.005633791923522949, 0.005595136165618897, 0.005642240047454834, 0.005738495826721191, 0.0056217598915100095, 0.00561356782913208, 0.00555353593826294, 0.005530111789703369, 0.00559219217300415, 0.005587615966796875, 0.005667168140411377, 0.005647840023040772, 0.0056746239662170414, 0.005757343769073486, 0.005595104217529297, 0.0056234879493713376, 0.005483359813690186, 0.005482560157775879, 0.005533728122711181, 0.005468063831329346, 0.0055848960876464845, 0.005668863773345947, 0.0056455998420715334, 0.005663455963134766, 0.00561571216583252, 0.005633952140808106, 0.005572864055633545, 0.0055723519325256345, 0.0057794561386108395, 0.0056583361625671385, 0.005672287940979004, 0.005954080104827881, 0.005648799896240234, 0.005780896186828613, 0.005732319831848145, 0.005802624225616455, 0.005648032188415527, 0.005724512100219727, 0.005675168037414551, 0.005886911869049072, 0.005643263816833496, 0.005682943820953369, 0.005708064079284668, 0.005591040134429932, 0.005631872177124023, 0.005651679992675782, 0.0057381119728088376, 0.0056854720115661625, 0.005669536113739014, 0.005760191917419433, 0.005675039768218994, 0.0056509442329406735, 0.005595424175262451, 0.005675007820129394, 0.005678592205047607, 0.005632415771484375, 0.005662816047668457, 0.00558678388595581, 0.005597152233123779, 0.005669087886810303, 0.005681119918823242, 0.005719488143920899, 0.005525375843048096, 0.0055343999862670894, 0.005507232189178467, 0.0054735360145568845, 0.005507679939270019, 0.005519360065460205, 0.0055764799118041995, 0.005572800159454346, 0.005535679817199707, 0.005608767986297608, 0.005574495792388916, 0.005628064155578614, 0.005587744235992432, 0.0055316481590271, 0.005561600208282471, 0.0055095357894897464, 0.005429887771606445, 0.005610527992248535, 0.005625919818878174, 0.005650847911834717, 0.005495264053344727, 0.005518591880798339, 0.006619616031646729, 0.005715839862823487, 0.005684447765350342, 0.005535007953643799, 0.005629600048065186, 0.0056495680809021, 0.005546463966369629, 0.005934624195098877, 0.005540703773498535, 0.005586944103240967, 0.005517312049865723, 0.005482495784759522, 0.005610752105712891, 0.005638368129730224, 0.005591455936431885, 0.005509183883666992, 0.005438720226287842, 0.005536479949951172, 0.005471776008605957, 0.005907008171081543, 0.005520480155944824, 0.005557151794433593, 0.005569888114929199, 0.005507487773895264, 0.005478432178497314, 0.005529056072235108, 0.005523871898651123, 0.005578752040863037, 0.0056639680862426755, 0.005544095993041992, 0.0055773439407348635, 0.005514272212982178, 0.005762015819549561, 0.005548031806945801, 0.005578752040863037, 0.005597184181213379, 0.005527167797088623, 0.00553001594543457, 0.0055766720771789555, 0.005575839996337891, 0.005558591842651367, 0.005425024032592774, 0.005421440124511719, 0.005458432197570801, 0.0054288959503173825, 0.005437568187713623, 0.005494688034057617, 0.0055032958984375, 0.005545887947082519, 0.005906303882598877, 0.0060067839622497555, 0.005545983791351319, 0.0054897918701171874, 0.005563295841217041, 0.005502336025238037, 0.005773920059204102, 0.005527552127838135, 0.0054908480644226074, 0.00562169599533081, 0.005598720073699951, 0.00558460807800293, 0.005524159908294678, 0.005503232002258301, 0.005541632175445557, 0.0055808000564575196, 0.005593215942382813, 0.005598176002502442, 0.005548768043518067, 0.00566918420791626, 0.005668799877166748, 0.005601280212402344, 0.005612736225128174, 0.005544064044952393, 0.005636352062225342, 0.005603712081909179, 0.005564479827880859, 0.005668799877166748, 0.005576704025268555, 0.0056564159393310545, 0.005701568126678467, 0.00574076795578003, 0.005659840106964111, 0.0058837437629699705, 0.005711071968078614, 0.005531744003295898, 0.005568160057067871, 0.005633503913879395, 0.0056236801147460935, 0.00567577600479126, 0.005619328022003173, 0.005599584102630615, 0.005525504112243652, 0.005708799839019775, 0.005729119777679443, 0.005572768211364746, 0.0056852478981018065, 0.005578976154327392, 0.005652128219604492, 0.007937280178070069, 0.005711904048919678, 0.005651360034942627, 0.005718976020812988, 0.005745664119720459, 0.00572438383102417, 0.005707231998443604, 0.005763391971588135, 0.005681151866912842, 0.005571936130523682, 0.005623456001281738, 0.005505119800567627, 0.005493663787841797, 0.005615200042724609, 0.005575071811676025, 0.005596767902374267, 0.005502848148345947, 0.005459551811218262, 0.005522367954254151, 0.005536992073059082, 0.005472928047180176, 0.005479743957519531, 0.005571392059326172, 0.005601280212402344, 0.005517312049865723, 0.005527552127838135, 0.005584415912628174, 0.0055220799446105955, 0.005631936073303223, 0.0057870402336120605, 0.005799456119537354, 0.005651391983032226, 0.0058280320167541505, 0.005769792079925537, 0.005900288105010986, 0.005722400188446045, 0.005760064125061035, 0.006036128044128418, 0.005672959804534912, 0.005636127948760986, 0.0055912318229675295, 0.005561247825622558, 0.0057264318466186525, 0.005623519897460938, 0.005614528179168701, 0.005521759986877441, 0.005674655914306641, 0.005750368118286133, 0.005639776229858399, 0.0055550079345703125, 0.005571616172790527, 0.0055528321266174316, 0.005615551948547363, 0.005868224143981934, 0.0061931519508361815, 0.005758975982666016, 0.005726463794708252, 0.005715936183929443, 0.00564358377456665, 0.005744448184967041, 0.005759647846221924, 0.005676735877990722, 0.005642176151275635, 0.005712255954742431, 0.0057077760696411135, 0.005650239944458008, 0.005625152111053467, 0.005600128173828125, 0.005628992080688477, 0.005583903789520264, 0.005525407791137696, 0.0061972479820251464, 0.005744639873504639, 0.005703807830810547, 0.005615839958190918, 0.005623616218566894, 0.006690656185150147, 0.0063201279640197755, 0.005638144016265869, 0.005682240009307862, 0.0056431999206542965, 0.005640416145324707, 0.0055848960876464845, 0.005573728084564209, 0.005675712108612061, 0.005687104225158691, 0.005693215847015381, 0.00559555196762085, 0.005818367958068848, 0.005670911788940429, 0.0057487359046936035, 0.005616640090942383, 0.005540768146514893, 0.005918335914611816, 0.006000351905822754, 0.005570879936218262, 0.00557696008682251, 0.005504223823547363, 0.005525599956512451, 0.005532544136047363, 0.0059136638641357426, 0.005614528179168701, 0.005609663963317871, 0.005754432201385498, 0.0056579518318176265, 0.005679103851318359, 0.005671840190887451, 0.005709568023681641, 0.005652063846588134, 0.005624288082122802, 0.005677279949188233, 0.005586495876312256, 0.005678847789764404, 0.005651103973388672, 0.005627071857452393, 0.005670911788940429, 0.0058252477645874025, 0.005834303855895996, 0.005791296005249023, 0.005822912216186523, 0.005709375858306885, 0.005700160026550293, 0.005744959831237793, 0.0056564478874206545, 0.005613791942596436, 0.005586847782135009, 0.0055808000564575196, 0.005660960197448731, 0.00556774377822876, 0.005546463966369629, 0.0056258559226989744, 0.005581120014190674, 0.005660352230072022, 0.005639776229858399, 0.005665184020996094, 0.005593088150024414, 0.005545983791351319, 0.005752639770507812, 0.005754623889923096, 0.00573689603805542, 0.005708960056304932, 0.005611968040466308, 0.0056286721229553225, 0.0055517759323120116, 0.0057285442352294925, 0.005678815841674805, 0.005638144016265869, 0.005598624229431152, 0.005583456039428711, 0.005902016162872314, 0.005681471824645996, 0.00555622386932373, 0.005601280212402344, 0.005598464012145996, 0.005567232131958008, 0.005492735862731934, 0.005658080101013183, 0.005630496025085449, 0.005585216045379639, 0.005572288036346435, 0.00551043176651001, 0.005609407901763916, 0.0056936640739440915, 0.0055710082054138185, 0.0055669121742248535, 0.0055016961097717285, 0.005570911884307862, 0.005493535995483399, 0.005482304096221924, 0.0055584959983825686, 0.005528448104858398, 0.005548384189605713, 0.005589791774749756, 0.005562143802642822, 0.005588223934173584, 0.005535871982574463, 0.005549920082092285, 0.005577792167663574, 0.005748447895050049, 0.005677504062652588, 0.00572054386138916, 0.005702847957611084, 0.005665599822998047, 0.005568511962890625, 0.005612927913665772, 0.00556060791015625, 0.005634560108184815, 0.005922560214996338, 0.00571398401260376, 0.005818399906158447, 0.005887968063354492, 0.0058196158409118655, 0.005755167961120605, 0.005667359828948975, 0.005736447811126709, 0.005895520210266113, 0.005777152061462403, 0.005622687816619873, 0.005660511970520019, 0.005685408115386963, 0.005712031841278076, 0.005584640026092529, 0.005602975845336914, 0.005755328178405762, 0.005775199890136719, 0.00588592004776001, 0.005797152042388916, 0.00575596809387207, 0.005703519821166992, 0.005697247982025147, 0.005614943981170654, 0.0056431999206542965, 0.005660128116607666, 0.005675039768218994, 0.005708191871643066, 0.005576863765716553, 0.005556447982788086, 0.005586656093597412, 0.005597343921661377, 0.00561081600189209, 0.00559558391571045, 0.005652575969696045, 0.005646175861358642, 0.005640351772308349, 0.005675007820129394, 0.005724415779113769, 0.0057259521484375, 0.005804031848907471, 0.00572211217880249, 0.005730463981628418, 0.0063448319435119626, 0.005775199890136719, 0.005620607852935791, 0.00566537618637085, 0.0056897602081298826, 0.005753983974456787, 0.005651328086853027, 0.0056787519454956055, 0.005702176094055176, 0.005702591896057129, 0.0057227201461791995, 0.005687583923339843, 0.005830848217010498, 0.005758336067199707, 0.005705408096313477, 0.005896768093109131, 0.00583513593673706, 0.005758272171020508, 0.005808832168579102, 0.005780543804168701, 0.005784512042999268, 0.00588595199584961, 0.005814271926879883, 0.0057654080390930175, 0.0057955517768859864, 0.0058278398513793945, 0.005778175830841064, 0.0057833919525146485, 0.0060620479583740235, 0.0057379198074340824, 0.005765279769897461, 0.005677055835723877, 0.005753439903259277, 0.005752831935882568, 0.005638144016265869, 0.005717855930328369, 0.00560752010345459, 0.005680543899536133, 0.005581471920013428, 0.005683199882507324, 0.0056828479766845704, 0.005622111797332764, 0.006095935821533203, 0.005772192001342774, 0.005709856033325195, 0.005646336078643799, 0.005636000156402588, 0.005659840106964111, 0.005659552097320557, 0.005779104232788086, 0.005722784042358399, 0.0057136001586914065, 0.005795008182525635, 0.005833536148071289, 0.005724160194396972, 0.0058388481140136715, 0.00576416015625, 0.005747231960296631, 0.005802400112152099, 0.005745920181274414, 0.005791776180267334, 0.005722847938537598, 0.005655903816223145, 0.005783296108245849, 0.005665855884552002, 0.0058224000930786135, 0.005751711845397949, 0.0058921918869018555, 0.005890975952148438, 0.005788991928100586, 0.005731008052825928, 0.005818367958068848, 0.005785759925842285, 0.005662559986114502, 0.005654623985290527, 0.005750815868377685, 0.005972928047180176, 0.006342271804809571, 0.005990655899047852, 0.005860352039337159, 0.006075456142425537, 0.00619923210144043, 0.005904384136199951, 0.0058122239112854005, 0.005863423824310303, 0.005844128131866455, 0.005802847862243652, 0.005836319923400879, 0.005833119869232178, 0.005991968154907226, 0.00580457592010498, 0.005822463989257813, 0.0056969280242919924, 0.0057227201461791995, 0.00569158411026001, 0.005775392055511475, 0.0057420477867126465, 0.0057610878944396975, 0.005787903785705566, 0.005797887802124023, 0.005945343971252442, 0.00576688003540039, 0.005896480083465576, 0.0058245759010314944, 0.0056863360404968265, 0.005634943962097168, 0.005703584194183349, 0.005750592231750488, 0.005744927883148193, 0.005732351779937744, 0.005871551990509033, 0.00571398401260376, 0.005701824188232422, 0.005708672046661377, 0.005680352210998535, 0.0055682239532470704, 0.005642240047454834, 0.005756447792053223, 0.005710303783416748, 0.005726208209991455, 0.005703392028808593, 0.00572649621963501, 0.005734399795532226, 0.0056869759559631345, 0.005607744216918945, 0.005628128051757813, 0.0056358718872070316, 0.005597184181213379, 0.005745728015899659, 0.0056943678855896, 0.0056891517639160154, 0.005608831882476807, 0.005642271995544433, 0.005700416088104248, 0.005733376026153565, 0.005839647769927979, 0.005650176048278809, 0.005707327842712402, 0.005702367782592773, 0.005777408123016357, 0.005720064163208007, 0.005668863773345947, 0.005696800231933594, 0.0058245759010314944, 0.005778175830841064, 0.0057160000801086425, 0.005684512138366699, 0.0056735677719116214, 0.005703680038452149, 0.005870848178863525, 0.005674943923950195, 0.005749567985534668, 0.0057058558464050295, 0.005717631816864014, 0.005728864192962647, 0.005686944007873535, 0.005709824085235596, 0.005593279838562012, 0.005685056209564209, 0.00562604808807373, 0.005759039878845215, 0.0059552001953125, 0.005717440128326416, 0.0057842559814453125, 0.005834688186645507, 0.005648447990417481, 0.006016479969024658, 0.0060581440925598145, 0.005889920234680176, 0.00601366376876831, 0.005861152172088623, 0.006049791812896729, 0.005840288162231445, 0.005810783863067627, 0.005762847900390625, 0.006117728233337403, 0.005756800174713135, 0.0057077760696411135, 0.006618815898895264, 0.0058464322090148926, 0.005872543811798096, 0.005754879951477051, 0.00606822395324707, 0.005683199882507324, 0.005872992038726807, 0.00574121618270874, 0.005752831935882568, 0.005636096000671387, 0.005708992004394531, 0.005715775966644287, 0.005893439769744873, 0.005837855815887451, 0.005720416069030762, 0.005783872127532959, 0.005695775985717773, 0.005803423881530762, 0.006029759883880615, 0.005784639835357666, 0.005681151866912842, 0.005748672008514404, 0.005866432189941406, 0.005734399795532226, 0.005843167781829834, 0.005695583820343017, 0.005899680137634277, 0.005759488105773926, 0.005865568161010742, 0.005846303939819336, 0.005745344161987305, 0.005791296005249023, 0.005789152145385742, 0.005816864013671875, 0.005744927883148193, 0.005768896102905274, 0.005714431762695312, 0.005774432182312011, 0.005802080154418945, 0.0057608962059021, 0.005710783958435058, 0.005720064163208007, 0.005750239849090576, 0.0058189120292663574, 0.0058121919631958004, 0.005861536026000976, 0.005859200000762939, 0.005797887802124023, 0.005879807949066162, 0.005863423824310303, 0.006130847930908203, 0.005933152198791504, 0.005773280143737793, 0.0059827837944030765, 0.005888224124908447, 0.005967360019683838, 0.0057851519584655765, 0.006011231899261475, 0.005838592052459717, 0.005925727844238281, 0.00570739221572876, 0.005943456172943115, 0.005828351974487305, 0.005982687950134277, 0.005847040176391601, 0.005904223918914795, 0.005791264057159424, 0.005915264129638672, 0.005824351787567138, 0.005896416187286377, 0.00582585620880127, 0.005827072143554687, 0.0059222722053527835, 0.005780128002166748, 0.00591974401473999, 0.005761312007904053, 0.005785344123840332, 0.005868000030517578, 0.005896704196929932, 0.005739967823028564, 0.005823391914367676, 0.0058607678413391115, 0.005830912113189697, 0.005728032112121582, 0.005730688095092773, 0.005762752056121826]",tokens/s,174.3258087350037,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,809.111552,673.05472,0.0,270.532608,250.474496,s,1,9.99446484375,9.99446484375,0.0,9.99446484375,9.99446484375,9.99446484375,9.99446484375,[9.99446484375],,kWh,2.5515368666655525e-06,2.7421810148109097e-07,9.641674380000964e-07,3.78992240614674e-06,,MB,1287.139328,687.734784,0.0,272.62976,241.723904,s,34,0.22012713670730594,0.00647432755021488,0.0001310529125736693,0.006466223955154419,0.006632579231262207,0.0066929537534713745,0.006716196904182434,"[0.006717728137969971, 0.006713088035583496, 0.006566751956939698, 0.006505663871765137, 0.006559711933135986, 0.0066072001457214355, 0.00645692777633667, 0.006570047855377197, 0.006581888198852539, 0.0066054720878601075, 0.006643455982208252, 0.006475520133972168, 0.006341631889343262, 0.006415103912353516, 0.00625164794921875, 0.006573728084564209, 0.006285056114196778, 0.006439136028289795, 0.006372223854064942, 0.006439231872558594, 0.006486847877502442, 0.006682112216949463, 0.006486464023590088, 0.006589280128479004, 0.00654310417175293, 0.006181312084197998, 0.006427519798278809, 0.006307807922363282, 0.00640227222442627, 0.006365920066833496, 0.006448256015777588, 0.006361184120178223, 0.006316160202026367, 0.0064076800346374516]",tokens/s,39540.78597575797,kWh,1.820260741994162e-07,2.007414735206369e-08,1.1671995890580389e-07,3.188201804572837e-07,tokens/kWh,802960463.8979229,MB,1333.501952,700.317696,0.0,285.212672,241.726464,s,34,10.0343466796875,0.29512784352022053,0.006782802929479167,0.2937698669433594,0.3041244964599609,0.3055077163696289,0.3115900772094727,"[0.3039126892089844, 0.30786886596679686, 0.31342291259765626, 0.304236328125, 0.2973733215332031, 0.293662841796875, 0.296540771484375, 0.30184933471679687, 0.30243072509765623, 0.29950091552734376, 0.30421527099609375, 0.2935286865234375, 0.28969631958007813, 0.290993896484375, 0.29361053466796877, 0.28981536865234375, 0.29609921264648437, 0.29586236572265623, 0.29026763916015624, 0.28865032958984377, 0.29865036010742185, 0.3000317993164062, 0.29358224487304685, 0.2959901123046875, 0.29032742309570314, 0.28548785400390625, 0.2829889831542969, 0.28865939331054685, 0.2936240234375, 0.28783807373046877, 0.29387689208984374, 0.29001669311523437, 0.28404888916015625, 0.29568560791015625]",tokens/s,213.46681237713713,kWh,7.99103398170337e-06,8.812656443552803e-07,3.543020623205257e-06,1.241532024926391e-05,tokens/kWh,5074375.749891365,,s,2142,10.01479509544373,0.0046754412210288174,0.00017943440298060002,0.0046612641811370845,0.004857171106338501,0.004912644839286805,0.005178657145500183,"[0.004298592090606689, 0.004630815982818603, 0.004578815937042236, 0.004839935779571533, 0.004794367790222168, 0.004696127891540528, 0.004769728183746338, 0.0048230400085449215, 0.004681727886199951, 0.004911136150360107, 0.004701183795928955, 0.004625631809234619, 0.004805984020233155, 0.0046291518211364745, 0.004865439891815185, 0.004607808113098144, 0.004678207874298096, 0.004818848133087158, 0.004695583820343017, 0.004704800128936768, 0.004703519821166992, 0.004656864166259766, 0.004719552040100098, 0.0051129918098449705, 0.004870528221130371, 0.004994847774505615, 0.004682496070861817, 0.0047223358154296876, 0.004895071983337402, 0.00491315221786499, 0.0048557758331298825, 0.004874271869659424, 0.00484768009185791, 0.004990592002868653, 0.00483958387374878, 0.0048334717750549315, 0.004990623950958252, 0.004954592227935791, 0.00490070390701294, 0.004989984035491943, 0.005204959869384766, 0.004972896099090576, 0.00501532793045044, 0.004916768074035644, 0.004908736228942871, 0.004848288059234619, 0.004890624046325683, 0.005015488147735596, 0.004909279823303222, 0.0047532482147216795, 0.004725056171417236, 0.004887648105621338, 0.004738815784454346, 0.004780896186828613, 0.004771520137786865, 0.004706079959869385, 0.004812511920928955, 0.0048525438308715824, 0.004826560020446778, 0.004738944053649902, 0.004801216125488281, 0.004894911766052246, 0.00485152006149292, 0.004428768157958984, 0.004757120132446289, 0.004796576023101806, 0.004886464118957519, 0.004835648059844971, 0.00491267204284668, 0.004780640125274658, 0.0047487359046936035, 0.005082719802856446, 0.005046559810638428, 0.004778528213500976, 0.004810751914978028, 0.004818592071533203, 0.004970848083496094, 0.004889887809753418, 0.00487497615814209, 0.004948031902313232, 0.0048782720565795894, 0.004880671977996826, 0.004890143871307373, 0.0049656319618225095, 0.004887104034423828, 0.005108096122741699, 0.0048496642112731934, 0.0049192957878112795, 0.00488969612121582, 0.004976607799530029, 0.004905920028686523, 0.004808832168579102, 0.00487820816040039, 0.004851808071136474, 0.0049622077941894535, 0.0049090561866760255, 0.0048774399757385255, 0.005129183769226074, 0.005061632156372071, 0.004817952156066894, 0.004861824035644531, 0.004954304218292236, 0.0049714879989624025, 0.0048353919982910155, 0.004858047962188721, 0.004866655826568604, 0.004853759765625, 0.004825088024139404, 0.00485811185836792, 0.005178880214691162, 0.004810592174530029, 0.004886879920959473, 0.005436575889587402, 0.00481606388092041, 0.004769440174102783, 0.004843584060668945, 0.004777632236480713, 0.004792448043823242, 0.004870368003845215, 0.004761760234832764, 0.004797728061676026, 0.004745791912078858, 0.004776159763336182, 0.004712287902832031, 0.0047738242149353025, 0.004813119888305664, 0.004468736171722412, 0.004921343803405762, 0.004869440078735352, 0.004793024063110352, 0.0047833919525146484, 0.004807072162628174, 0.004900288105010986, 0.004856768131256103, 0.004847839832305908, 0.005944863796234131, 0.0049480957984924315, 0.004818304061889649, 0.005890079975128174, 0.00515337610244751, 0.0055155520439147945, 0.004822495937347412, 0.004808191776275635, 0.004741695880889893, 0.004679872035980225, 0.004783904075622559, 0.0047861762046813965, 0.004763936042785645, 0.0048065600395202634, 0.004830880165100098, 0.004759679794311524, 0.004770112037658691, 0.004798463821411133, 0.004785696029663086, 0.004970975875854492, 0.004769792079925537, 0.00470630407333374, 0.0047820801734924315, 0.00480460786819458, 0.004843679904937745, 0.0048269758224487305, 0.004841536045074463, 0.004834847927093506, 0.004702623844146729, 0.004712224006652832, 0.004680191993713379, 0.004670176029205322, 0.004662496089935303, 0.004818496227264405, 0.005644576072692871, 0.006469535827636719, 0.005855264186859131, 0.007012351989746094, 0.0055830078125, 0.004876128196716309, 0.005144576072692871, 0.00501145601272583, 0.004788224220275879, 0.004753407955169678, 0.00491212797164917, 0.005067776203155518, 0.0047636480331420894, 0.004831136226654053, 0.004886623859405518, 0.0047923197746276855, 0.004768095970153809, 0.004778848171234131, 0.004795008182525635, 0.004784319877624512, 0.004401535987854004, 0.004730720043182373, 0.004695295810699463, 0.004799615859985351, 0.004730815887451172, 0.004836671829223633, 0.004792895793914795, 0.004796256065368652, 0.004816895961761475, 0.004812640190124512, 0.004780543804168701, 0.004814752101898193, 0.004820735931396485, 0.004736544132232666, 0.004768223762512207, 0.004902912139892578, 0.0048886399269104, 0.004908991813659668, 0.004911104202270508, 0.004929535865783692, 0.005021312236785889, 0.004807040214538574, 0.004947968006134033, 0.004880000114440918, 0.004882463932037354, 0.00491545581817627, 0.0047719359397888185, 0.0047209601402282714, 0.004756447792053223, 0.004752096176147461, 0.004765696048736572, 0.004730656147003174, 0.004706528186798096, 0.004671487808227539, 0.0048412480354309085, 0.004832608222961426, 0.004882751941680908, 0.004753983974456787, 0.004746367931365967, 0.004719488143920899, 0.004683775901794434, 0.0048577280044555665, 0.004788352012634277, 0.004905248165130615, 0.004746784210205078, 0.004862143993377686, 0.004790272235870361, 0.004785280227661132, 0.004776832103729248, 0.004949567794799804, 0.004849728107452393, 0.005458303928375244, 0.004898752212524414, 0.004772064208984375, 0.004874080181121826, 0.004816127777099609, 0.004698880195617676, 0.004936927795410156, 0.004813600063323975, 0.004780128002166748, 0.0048085122108459475, 0.004853856086730957, 0.0049574079513549805, 0.004437056064605713, 0.004791327953338623, 0.004651103973388672, 0.004740831851959229, 0.004829535961151123, 0.00468284797668457, 0.0047214398384094235, 0.004738944053649902, 0.004737023830413818, 0.00464896011352539, 0.004673535823822021, 0.004666528224945068, 0.004658271789550781, 0.004680607795715332, 0.004784992218017578, 0.004746592044830323, 0.0047571520805358885, 0.004707136154174805, 0.0048837437629699705, 0.004815775871276856, 0.004721920013427735, 0.004731135845184326, 0.004759903907775879, 0.004617760181427002, 0.004589344024658203, 0.0046295361518859865, 0.004658048152923584, 0.004624767780303955, 0.004729407787322998, 0.004660575866699219, 0.004654816150665283, 0.004651968002319336, 0.004638463973999024, 0.004663551807403564, 0.004632575988769531, 0.004795680046081543, 0.004704800128936768, 0.004695231914520264, 0.004580095767974853, 0.00464134407043457, 0.004709887981414795, 0.00463481616973877, 0.004712063789367676, 0.004639167785644532, 0.004601791858673095, 0.004605984210968018, 0.004710400104522705, 0.0046219840049743655, 0.004642943859100342, 0.004638912200927735, 0.004664480209350586, 0.004641632080078125, 0.004611519813537598, 0.004653600215911866, 0.004675615787506104, 0.0046386241912841795, 0.004687039852142334, 0.005743519783020019, 0.005199935913085938, 0.004726208209991455, 0.004948480129241943, 0.004738175868988037, 0.00462937593460083, 0.004239840030670166, 0.00467574405670166, 0.004695968151092529, 0.0046813440322875976, 0.0046304001808166505, 0.004620672225952149, 0.00455238389968872, 0.004541952133178711, 0.004604928016662598, 0.0046078720092773435, 0.004592832088470459, 0.004618144035339356, 0.004643743991851807, 0.004747263908386231, 0.00471395206451416, 0.004657695770263672, 0.004730016231536865, 0.004647776126861573, 0.004761600017547608, 0.004607999801635742, 0.004630559921264648, 0.00459881591796875, 0.004639039993286133, 0.004655807971954346, 0.004593535900115967, 0.004599711894989014, 0.004575391769409179, 0.004620160102844238, 0.004755008220672608, 0.00464134407043457, 0.004859807968139648, 0.004634719848632812, 0.0045445761680603025, 0.004532159805297852, 0.004718815803527832, 0.004707231998443604, 0.004760575771331787, 0.004650879859924317, 0.00466864013671875, 0.004572959899902343, 0.004671711921691895, 0.004709248065948486, 0.004645887851715088, 0.004657536029815674, 0.004717343807220459, 0.004625247955322266, 0.004597983837127686, 0.004663008213043213, 0.004711391925811767, 0.00464086389541626, 0.004614048004150391, 0.0046460161209106446, 0.004659840106964111, 0.004657408237457275, 0.004673535823822021, 0.0046382398605346676, 0.0045920958518981935, 0.004798783779144287, 0.004761184215545654, 0.004732287883758545, 0.004741407871246338, 0.004747647762298584, 0.004663360118865967, 0.004303487777709961, 0.0047389760017395016, 0.004776127815246582, 0.004769951820373535, 0.004759359836578369, 0.004697472095489502, 0.004664031982421875, 0.004785632133483887, 0.00481932783126831, 0.004656544208526611, 0.00467519998550415, 0.004678656101226806, 0.004685311794281006, 0.004678112030029297, 0.005051487922668457, 0.004688992023468018, 0.004693920135498047, 0.004708576202392578, 0.004670432090759277, 0.004735936164855957, 0.004697216033935547, 0.004604479789733887, 0.004612192153930664, 0.004650559902191162, 0.004901567935943604, 0.004822783946990967, 0.004634624004364014, 0.004703296184539795, 0.004799424171447754, 0.004795711994171142, 0.004906816005706787, 0.004786719799041748, 0.004712800025939942, 0.0047060480117797855, 0.004635968208312988, 0.004626560211181641, 0.004616576194763184, 0.004804319858551025, 0.004860991954803467, 0.004834976196289063, 0.004687679767608643, 0.004717887878417969, 0.004684000015258789, 0.004565919876098633, 0.004532063961029053, 0.004587423801422119, 0.004769792079925537, 0.004617695808410645, 0.004590112209320069, 0.004579328060150147, 0.00459990406036377, 0.0045322561264038086, 0.004539711952209473, 0.004643551826477051, 0.004829023838043213, 0.004689919948577881, 0.0046408319473266605, 0.0046135358810424805, 0.004722591876983643, 0.00464896011352539, 0.004827775955200195, 0.004722655773162842, 0.004666463851928711, 0.004394495964050293, 0.00469708776473999, 0.004601696014404297, 0.004663487911224365, 0.0047225279808044435, 0.00469532823562622, 0.00466815996170044, 0.004675168037414551, 0.0046984639167785645, 0.004861760139465332, 0.00499235200881958, 0.004743743896484375, 0.004839744091033936, 0.005140096187591553, 0.004941215991973877, 0.004848351955413818, 0.004708960056304932, 0.005097439765930176, 0.005760704040527344, 0.004759552001953125, 0.0048661441802978515, 0.004831136226654053, 0.0047859840393066405, 0.004872576236724853, 0.004779871940612793, 0.0047247037887573244, 0.004739071846008301, 0.004812255859375, 0.004710591793060302, 0.004675936222076416, 0.004703328132629394, 0.004735167980194092, 0.004692512035369873, 0.004786367893218994, 0.004712287902832031, 0.00483465576171875, 0.00469484806060791, 0.004724736213684082, 0.00495411205291748, 0.005070847988128662, 0.004722591876983643, 0.004667488098144532, 0.0046284799575805665, 0.0047513599395751956, 0.004673535823822021, 0.00467574405670166, 0.004724575996398926, 0.004683616161346435, 0.00457916784286499, 0.004649280071258545, 0.004753215789794922, 0.004718976020812988, 0.004686719894409179, 0.0049143037796020505, 0.004875135898590088, 0.0047849597930908205, 0.004726912021636963, 0.004747263908386231, 0.00486195182800293, 0.004929535865783692, 0.004812736034393311, 0.004736512184143066, 0.004738624095916748, 0.00438102388381958, 0.0046954240798950195, 0.004729599952697754, 0.004675039768218994, 0.004640575885772705, 0.004751423835754395, 0.00470201587677002, 0.00461411190032959, 0.004698783874511719, 0.004770080089569092, 0.004671232223510742, 0.004745471954345703, 0.004805984020233155, 0.004694687843322754, 0.00485539197921753, 0.004722464084625244, 0.004833920001983643, 0.00492249584197998, 0.0047235841751098635, 0.004706079959869385, 0.004735199928283691, 0.004680736064910889, 0.00467248010635376, 0.004724544048309326, 0.004831232070922851, 0.0048148479461669925, 0.004804096221923828, 0.004746111869812011, 0.004816192150115967, 0.005028480052947998, 0.004927360057830811, 0.004801983833312988, 0.004889152050018311, 0.004886688232421875, 0.0048640317916870115, 0.0050070080757141115, 0.004886688232421875, 0.004924799919128418, 0.004713088035583496, 0.0048733439445495606, 0.005008096218109131, 0.004819104194641113, 0.0048057279586791995, 0.0047227201461791995, 0.004729407787322998, 0.004841792106628418, 0.004838560104370117, 0.004736095905303955, 0.00467519998550415, 0.004796639919281006, 0.00468995189666748, 0.004773759841918945, 0.0048189439773559575, 0.004836832046508789, 0.004960864067077637, 0.004845503807067871, 0.004795648097991943, 0.0048558077812194825, 0.004976831912994385, 0.004782271862030029, 0.0048644161224365235, 0.0047881917953491214, 0.004829184055328369, 0.004397119998931885, 0.004785024166107178, 0.0047288317680358885, 0.004761600017547608, 0.004679488182067871, 0.004599296092987061, 0.004674367904663086, 0.004718463897705078, 0.004681920051574707, 0.004726592063903808, 0.004767744064331054, 0.004628223896026611, 0.004632832050323487, 0.00471619176864624, 0.0046442561149597165, 0.004768383979797363, 0.004888895988464355, 0.00467683219909668, 0.004690720081329346, 0.004708255767822266, 0.004697311878204346, 0.0047358717918396, 0.0047226881980895995, 0.004675712108612061, 0.004726655960083008, 0.00465667200088501, 0.004933119773864746, 0.00477894401550293, 0.00469814395904541, 0.004771840095520019, 0.004693952083587647, 0.00463046407699585, 0.00467471981048584, 0.004836383819580078, 0.004652671813964844, 0.004698431968688964, 0.004876287937164306, 0.004743167877197266, 0.004724736213684082, 0.0047578558921813965, 0.004737760066986084, 0.004658112049102783, 0.0047226881980895995, 0.004691487789154053, 0.004731296062469482, 0.004818496227264405, 0.004843776226043701, 0.004800288200378418, 0.004696352005004883, 0.004731071949005127, 0.005261568069458008, 0.004963136196136474, 0.004786431789398193, 0.004794144153594971, 0.004741824150085449, 0.0047250881195068355, 0.00477785587310791, 0.004907008171081543, 0.004703839778900146, 0.0048930878639221196, 0.00491487979888916, 0.004710912227630615, 0.004724575996398926, 0.00511414384841919, 0.004832960128784179, 0.004886943817138672, 0.004857215881347656, 0.004747903823852539, 0.004789567947387695, 0.004864640235900879, 0.004692031860351563, 0.004804480075836182, 0.004649087905883789, 0.00459548807144165, 0.004732160091400146, 0.004688640117645263, 0.004727039813995361, 0.0048100481033325196, 0.004880512237548828, 0.00474780797958374, 0.004677631855010986, 0.0047300481796264645, 0.0047992000579833985, 0.004809120178222656, 0.004751039981842041, 0.0049192957878112795, 0.005150688171386719, 0.004784192085266113, 0.0048537278175354, 0.004871327877044678, 0.004710559844970703, 0.00479638385772705, 0.006458271980285644, 0.004812607765197754, 0.004814047813415528, 0.004825183868408203, 0.0048026881217956546, 0.004833856105804443, 0.0047185921669006346, 0.0048148479461669925, 0.004859903812408447, 0.004775936126708984, 0.004881919860839844, 0.004730847835540772, 0.004752031803131104, 0.004763232231140137, 0.004763360023498535, 0.00474403190612793, 0.004773888111114502, 0.0047244482040405275, 0.00475648021697998, 0.004733952045440673, 0.004749311923980713, 0.004751232147216797, 0.004746496200561523, 0.0047235841751098635, 0.004742335796356201, 0.004801248073577881, 0.0048698558807373045, 0.0048949441909790035, 0.004892447948455811, 0.0048031358718872074, 0.004752639770507812, 0.004790463924407959, 0.0047233920097351076, 0.004773568153381347, 0.0043844799995422365, 0.004679679870605469, 0.0046152639389038085, 0.005120704174041748, 0.004808832168579102, 0.004799776077270508, 0.004661983966827392, 0.00474560022354126, 0.0045749440193176266, 0.004603744029998779, 0.004677792072296142, 0.004892672061920166, 0.004863327980041504, 0.0047066879272460935, 0.004794943809509278, 0.004753119945526123, 0.004820991992950439, 0.004775904178619385, 0.004734176158905029, 0.004795487880706787, 0.004613855838775635, 0.0046284799575805665, 0.004681727886199951, 0.00465715217590332, 0.004550784111022949, 0.004527872085571289, 0.0045305280685424805, 0.0044767999649047855, 0.004637728214263916, 0.004782559871673584, 0.0047702078819274905, 0.004978687763214112, 0.004612287998199463, 0.00471020793914795, 0.004712704181671142, 0.004648704051971436, 0.004695072174072266, 0.004635615825653076, 0.004577184200286865, 0.004588992118835449, 0.0047307519912719725, 0.004637472152709961, 0.004603903770446777, 0.004537919998168946, 0.004505280017852783, 0.00456713581085205, 0.004561567783355713, 0.004585472106933594, 0.004561024188995361, 0.004593632221221924, 0.004567008018493652, 0.004613408088684082, 0.00461023998260498, 0.004538847923278809, 0.004606272220611572, 0.004673215866088867, 0.004523327827453613, 0.0044997758865356445, 0.004540800094604492, 0.004512800216674805, 0.004467296123504639, 0.004510240077972412, 0.00458735990524292, 0.004255743980407714, 0.00455017614364624, 0.00457366418838501, 0.00452729606628418, 0.004532608032226562, 0.00455459213256836, 0.004596320152282715, 0.0045957121849060055, 0.004532320022583008, 0.004562848091125489, 0.004517727851867675, 0.004512159824371338, 0.0045463042259216305, 0.004742527961730957, 0.004787903785705566, 0.004635583877563476, 0.004636672019958496, 0.004595295906066894, 0.0046035838127136235, 0.004654880046844482, 0.004572095870971679, 0.004598080158233642, 0.004542208194732666, 0.00456822395324707, 0.004533023834228516, 0.004626175880432129, 0.004649024009704589, 0.004780223846435547, 0.004603903770446777, 0.004661248207092285, 0.004603903770446777, 0.004595424175262451, 0.004618239879608154, 0.004577760219573975, 0.004609504222869873, 0.0045090560913085935, 0.004510687828063965, 0.0044943361282348635, 0.004582399845123291, 0.0045342397689819335, 0.00455241584777832, 0.00464518404006958, 0.004550655841827392, 0.004600863933563232, 0.004543168067932129, 0.004571424007415771, 0.004597760200500488, 0.004601856231689453, 0.0046284799575805665, 0.004624735832214355, 0.0046560959815979, 0.004557184219360352, 0.00474345588684082, 0.004659071922302246, 0.004614304065704346, 0.004554944038391113, 0.004516672134399414, 0.004617343902587891, 0.004587456226348877, 0.004561024188995361, 0.004572832107543946, 0.004634367942810059, 0.004654880046844482, 0.004306784152984619, 0.004632736206054687, 0.004657440185546875, 0.0046341438293457035, 0.004685344219207764, 0.00466806411743164, 0.00469977617263794, 0.004681119918823242, 0.004607999801635742, 0.004635456085205078, 0.004579487800598144, 0.00461568021774292, 0.004583168029785156, 0.004610432147979736, 0.00455731201171875, 0.004552576065063476, 0.00465283203125, 0.004647136211395264, 0.004679679870605469, 0.00469379186630249, 0.004646495819091797, 0.004634655952453613, 0.004728960037231445, 0.004633056163787842, 0.004564544200897217, 0.004512191772460938, 0.004585472106933594, 0.004511360168457031, 0.004538943767547607, 0.004572991847991943, 0.00475705623626709, 0.00456496000289917, 0.004893311977386475, 0.00473203182220459, 0.00456387186050415, 0.004498464107513428, 0.0044224319458007815, 0.004421631813049317, 0.0045015039443969725, 0.004585472106933594, 0.004554751873016357, 0.004658656120300293, 0.004461088180541992, 0.004448256015777588, 0.004611264228820801, 0.004723519802093506, 0.0046278080940246585, 0.004639391899108887, 0.004632607936859131, 0.004634175777435303, 0.004704224109649658, 0.00461033582687378, 0.004638879776000976, 0.004593664169311523, 0.004632575988769531, 0.004601856231689453, 0.004601503849029541, 0.004697535991668701, 0.00452288007736206, 0.004785280227661132, 0.004658080101013183, 0.004661503791809082, 0.004502719879150391, 0.004333568096160889, 0.004611167907714844, 0.004535200119018555, 0.00454041576385498, 0.004605120182037354, 0.00454534387588501, 0.004476895809173584, 0.004489247798919678, 0.004521984100341797, 0.004582399845123291, 0.004581600189208984, 0.004551519870758056, 0.004554111957550049, 0.00466707181930542, 0.004717440128326416, 0.0047513599395751956, 0.00464896011352539, 0.004644864082336426, 0.004676703929901123, 0.004623263835906982, 0.004562464237213135, 0.004637152194976807, 0.004571263790130615, 0.00458739185333252, 0.004622111797332764, 0.004684000015258789, 0.00471449613571167, 0.004663455963134766, 0.004695903778076172, 0.004785247802734375, 0.0046945281028747555, 0.0046495680809021, 0.00466323184967041, 0.004619135856628418, 0.004664447784423828, 0.0047635197639465336, 0.004743167877197266, 0.004829023838043213, 0.004747424125671387, 0.004771455764770508, 0.004886623859405518, 0.004677760124206543, 0.004769951820373535, 0.004663296222686767, 0.004619743824005127, 0.00466812801361084, 0.004669151782989502, 0.004718688011169434, 0.004680831909179687, 0.00465119981765747, 0.004667295932769776, 0.004644735813140869, 0.004608191967010498, 0.004533055782318115, 0.0048414402008056644, 0.004736480236053467, 0.00471676778793335, 0.004688127994537354, 0.004693535804748536, 0.004642816066741944, 0.004613887786865234, 0.004612095832824707, 0.004680255889892578, 0.00432534408569336, 0.0045940160751342775, 0.004533760070800781, 0.004521599769592285, 0.00454915189743042, 0.004559040069580078, 0.004596992015838623, 0.004627007961273194, 0.004648096084594726, 0.004642879962921143, 0.0046377601623535155, 0.0049304962158203125, 0.00467955207824707, 0.00476643180847168, 0.004673920154571533, 0.004616223812103272, 0.00461187219619751, 0.00470739221572876, 0.004627295970916748, 0.00463267183303833, 0.004607840061187744, 0.004548768043518067, 0.004966400146484375, 0.004692255973815918, 0.004607552051544189, 0.004570464134216309, 0.0045855998992919925, 0.004528831958770752, 0.004751584053039551, 0.004697408199310303, 0.004637152194976807, 0.00464089584350586, 0.00458739185333252, 0.0045766720771789554, 0.004594560146331787, 0.004593376159667969, 0.004564383983612061, 0.004659103870391846, 0.004718431949615479, 0.004631648063659668, 0.004533472061157226, 0.004561439990997314, 0.00459881591796875, 0.0046212801933288574, 0.0045792322158813474, 0.004615295886993409, 0.004658080101013183, 0.004521664142608643, 0.004462975978851319, 0.0045231680870056155, 0.004485983848571778, 0.004437119960784912, 0.004424575805664062, 0.004441567897796631, 0.00459007978439331, 0.004501279830932617, 0.004512063980102539, 0.004505536079406738, 0.004558015823364258, 0.00446563196182251, 0.004511583805084229, 0.004481023788452149, 0.004455935955047607, 0.0041641597747802735, 0.00450377607345581, 0.004515071868896484, 0.004552576065063476, 0.004585408210754395, 0.004565951824188233, 0.004587584018707276, 0.004560704231262207, 0.004538176059722901, 0.004491775989532471, 0.00450486421585083, 0.004639232158660888, 0.004550687789916992, 0.004468959808349609, 0.004552480220794678, 0.0045957121849060055, 0.0046035838127136235, 0.00457916784286499, 0.004637407779693604, 0.004719456195831299, 0.004700511932373047, 0.00465996789932251, 0.004879903793334961, 0.004737535953521729, 0.0048596482276916505, 0.0047729601860046385, 0.00472979211807251, 0.0047738561630249025, 0.0049683518409729, 0.004822656154632569, 0.005163519859313965, 0.005464064121246338, 0.004868095874786377, 0.004711904048919678, 0.004731423854827881, 0.00471065616607666, 0.004703999996185303, 0.004704415798187256, 0.004724575996398926, 0.004735231876373291, 0.0046077117919921876, 0.004708384037017822, 0.0047636480331420894, 0.00468393611907959, 0.004611936092376709, 0.00456112003326416, 0.004738080024719238, 0.004675936222076416, 0.004671711921691895, 0.00473526382446289, 0.004732831954956055, 0.004697760105133056, 0.004724544048309326, 0.004720255851745605, 0.004617119789123535, 0.004624192237854004, 0.004691872119903564, 0.00471888017654419, 0.004689119815826416, 0.005017983913421631, 0.004684224128723145, 0.00479747200012207, 0.004703167915344238, 0.00482809591293335, 0.004673535823822021, 0.004747263908386231, 0.004536320209503173, 0.004597760200500488, 0.004634624004364014, 0.0046080961227416995, 0.004530079841613769, 0.004627840042114258, 0.0045710082054138184, 0.004548799991607666, 0.004592383861541748, 0.004708159923553467, 0.004656735897064209, 0.004584928035736084, 0.004580543994903564, 0.0046425600051879885, 0.004611616134643555, 0.00471721601486206, 0.004639679908752441, 0.004576128005981445, 0.004582784175872803, 0.004759456157684326, 0.004877024173736573, 0.00483244800567627, 0.004779071807861328, 0.00481663990020752, 0.004787487983703613, 0.0047842879295349125, 0.004892447948455811, 0.005058495998382568, 0.004748127937316895, 0.004612319946289063, 0.004579103946685791, 0.004772928237915039, 0.004926400184631348, 0.0046284799575805665, 0.004687871932983399, 0.0045979199409484865, 0.004767712116241455, 0.004902592182159424, 0.004726240158081055, 0.0048217282295227055, 0.004798463821411133, 0.0046341118812561035, 0.0047539200782775876, 0.004847616195678711, 0.004698112010955811, 0.004678815841674805, 0.004705023765563965, 0.004569183826446533, 0.004577375888824463, 0.004556191921234131, 0.004692031860351563, 0.004637119770050049, 0.004962495803833008, 0.0046590080261230465, 0.004556287765502929, 0.00476796817779541, 0.004573472023010254, 0.00457747220993042, 0.0044951682090759275, 0.004442240238189697, 0.00420249605178833, 0.0045808000564575195, 0.004595871925354004, 0.0045879678726196286, 0.004517983913421631, 0.004500480175018311, 0.004576223850250244, 0.004474815845489502, 0.004546559810638427, 0.004515327930450439, 0.004497504234313965, 0.004632991790771484, 0.004859903812408447, 0.00453872013092041, 0.004572832107543946, 0.004622335910797119, 0.0047185921669006346, 0.004617311954498291, 0.0046048321723937986, 0.004482880115509034, 0.0044810881614685055, 0.004456575870513916, 0.004513792037963867, 0.004509696006774902, 0.00454860782623291, 0.004460544109344483, 0.004517183780670166, 0.0045593280792236325, 0.004556320190429688, 0.004719295978546143, 0.004572959899902343, 0.004497568130493164, 0.00446998405456543, 0.004547423839569091, 0.004636672019958496, 0.0046077117919921876, 0.004548480033874511, 0.004585023880004883, 0.0047190399169921874, 0.004600160121917725, 0.004607359886169434, 0.004631455898284912, 0.004631487846374511, 0.004545663833618164, 0.004553631782531738, 0.004643360137939453, 0.004821280002593994, 0.004782400131225586, 0.004763328075408936, 0.004788224220275879, 0.004741119861602783, 0.004661248207092285, 0.0046592001914978025, 0.004562911987304688, 0.00465123176574707, 0.004603712081909179, 0.004863423824310303, 0.0047530560493469235, 0.004631328105926514, 0.004643968105316162, 0.00456825590133667, 0.004611936092376709, 0.0046096000671386715, 0.004371615886688232, 0.0047400321960449215, 0.004613376140594482, 0.004729504108428955, 0.0047524161338806156, 0.004604576110839843, 0.004668992042541504, 0.00477177619934082, 0.004766111850738526, 0.004807072162628174, 0.0047226881980895995, 0.004642496109008789, 0.004616352081298828, 0.00468393611907959, 0.004668992042541504, 0.004706560134887695, 0.004658624172210694, 0.004582079887390136, 0.0045797438621520995, 0.004551455974578857, 0.004553599834442139, 0.0045426878929138185, 0.004518015861511231, 0.004714431762695312, 0.004599520206451416, 0.004534272193908692, 0.004550015926361084, 0.004546944141387939, 0.004524288177490235, 0.004531263828277588, 0.004532576084136963, 0.004495967864990234, 0.004456448078155518, 0.0044068160057067875, 0.004456319808959961, 0.004467167854309082, 0.004485087871551514, 0.004520095825195312, 0.004435200214385987, 0.004472959995269776, 0.004442751884460449, 0.0045015039443969725, 0.004532415866851807, 0.004490431785583496, 0.0044980478286743165, 0.0046096320152282716, 0.004560480117797852, 0.004526495933532715, 0.004523647785186768, 0.004569215774536133, 0.0045380802154541015, 0.004508607864379883, 0.004627488136291504, 0.004629792213439942, 0.004608704090118408, 0.004546944141387939, 0.0045146880149841305, 0.004579296112060547, 0.0045313920974731445, 0.004473087787628174, 0.004501247882843017, 0.004673344135284424, 0.004541215896606446, 0.004311615943908691, 0.004726975917816162, 0.00536352014541626, 0.004648255825042725, 0.004678463935852051, 0.004831424236297607, 0.004777664184570312, 0.004898816108703613, 0.004711872100830078, 0.004638336181640625, 0.004666336059570312, 0.004644832134246826, 0.004818655967712402, 0.0048475837707519535, 0.004766111850738526, 0.00469708776473999, 0.00488102388381958, 0.0048683519363403325, 0.005039680004119873, 0.004847616195678711, 0.004702239990234375, 0.004689439773559571, 0.004823872089385986, 0.004711808204650879, 0.004596447944641114, 0.0046406397819519045, 0.004668928146362304, 0.00458784008026123, 0.0046267518997192385, 0.004667488098144532, 0.004599167823791504, 0.0047008638381958005, 0.004648799896240234, 0.004853888034820557, 0.004788095951080322, 0.004708352088928222, 0.004679679870605469, 0.00467577600479126, 0.0046815361976623535, 0.004800576210021973, 0.004787168025970459, 0.004616352081298828, 0.004629600048065186, 0.004691071987152099, 0.004589759826660157, 0.004567455768585205, 0.004663296222686767, 0.004675487995147705, 0.00473302412033081, 0.004799647808074951, 0.004658016204833984, 0.004562943935394287, 0.004673535823822021, 0.004744607925415039, 0.004737023830413818, 0.004776544094085693, 0.004988704204559326, 0.005089248180389404, 0.00472489595413208, 0.004685855865478515, 0.004690176010131836, 0.004673344135284424, 0.004758560180664063, 0.004640416145324707, 0.005003327846527099, 0.004905568122863769, 0.004933631896972656, 0.004919456005096435, 0.00483516788482666, 0.004857215881347656, 0.004845952033996582, 0.004798751831054687, 0.004845727920532226, 0.004800320148468018, 0.004739071846008301, 0.004797696113586426, 0.0048158721923828125, 0.004779263973236084, 0.0047539200782775876, 0.0046854400634765625, 0.004732831954956055, 0.00462275218963623, 0.004729919910430909, 0.004657408237457275, 0.0047348799705505375, 0.004737887859344482, 0.004708000183105469, 0.004686399936676026, 0.0048989119529724125, 0.004819647789001465, 0.004823296070098877, 0.0047338237762451175, 0.004652927875518799, 0.004670623779296875, 0.004989791870117188, 0.0047339839935302734, 0.005178336143493652, 0.0047364158630371096, 0.004791200160980225, 0.004814527988433838, 0.004683328151702881, 0.004721183776855469, 0.004700064182281494, 0.004697855949401856, 0.00469382381439209, 0.004661280155181885, 0.004632991790771484, 0.00455679988861084, 0.004562943935394287, 0.004570335865020752, 0.004629280090332031, 0.004677536010742188, 0.00464086389541626, 0.004649216175079345, 0.004632319927215576, 0.0046410241127014164, 0.004852960109710694, 0.004655519962310791, 0.004644703865051269, 0.004638463973999024, 0.004849440097808838, 0.004665472030639648, 0.005181312084197998, 0.004678016185760498, 0.004862656116485595, 0.004664159774780273, 0.004305215835571289, 0.004566271781921386, 0.0045382399559021, 0.0046638398170471196, 0.0046505918502807615, 0.0046145920753479005, 0.004612095832824707, 0.004634079933166504, 0.004530208110809326, 0.004563168048858642, 0.004603616237640381, 0.004692543983459472, 0.004646912097930909, 0.004593567848205566, 0.004639999866485596, 0.004690783977508545, 0.004739071846008301, 0.004734975814819336, 0.004693439960479736, 0.004790847778320313, 0.004753407955169678, 0.0047325758934021, 0.004694272041320801, 0.004657343864440918, 0.004629663944244385, 0.004786943912506104, 0.0045640959739685055, 0.004627071857452393, 0.004673791885375977, 0.0047923197746276855, 0.004868000030517578, 0.004684000015258789, 0.004651072025299072, 0.0047450242042541505, 0.004664447784423828, 0.0046943678855895996, 0.004637216091156006, 0.004676608085632325, 0.0046929922103881834, 0.004704256057739258, 0.004679520130157471, 0.004634655952453613, 0.004677760124206543, 0.00459980821609497, 0.004578559875488281, 0.00459222412109375, 0.004521567821502686, 0.004481599807739258, 0.004769951820373535, 0.004594944000244141, 0.004684447765350342, 0.0046976637840271, 0.0045756158828735355, 0.004546559810638427, 0.004636640071868896, 0.00470633602142334, 0.004694015979766846, 0.004721983909606934, 0.004653759956359863, 0.004630527973175049, 0.004632575988769531, 0.004630720138549805, 0.0046323838233947754, 0.00433519983291626, 0.004729375839233398, 0.004686016082763672, 0.004683775901794434, 0.0047628159523010255, 0.004741919994354248, 0.004741280078887939, 0.004728767871856689, 0.004744448184967041, 0.004793248176574707, 0.0047931838035583495, 0.00476255989074707, 0.004886496067047119, 0.004783999919891357, 0.004697375774383545, 0.004665952205657959, 0.004608255863189697, 0.004727071762084961, 0.004632319927215576, 0.004959968090057373, 0.004716479778289795, 0.004714015960693359, 0.004617023944854737, 0.0045359997749328616, 0.004681888103485107, 0.004585184097290039, 0.00471020793914795, 0.00465718412399292, 0.004719200134277343, 0.004614143848419189, 0.004534272193908692, 0.004618207931518555, 0.004614175796508789, 0.004677631855010986, 0.004739071846008301, 0.00461407995223999, 0.004681087970733642, 0.004799168109893798, 0.004759552001953125, 0.00469920015335083, 0.004715456008911133, 0.004693535804748536, 0.004651391983032226, 0.004769887924194336, 0.004671487808227539, 0.004728896141052246, 0.004648896217346191, 0.004668896198272705, 0.004864543914794922, 0.004743167877197266, 0.004737023830413818, 0.004597760200500488, 0.004554751873016357, 0.004687871932983399, 0.004656896114349365, 0.004599391937255859, 0.004708767890930176, 0.004695807933807373, 0.004610655784606933, 0.004597343921661377, 0.004673855781555176, 0.004618239879608154, 0.004747263908386231, 0.004240640163421631, 0.004580095767974853, 0.004685535907745362, 0.004718656063079834, 0.004710624217987061, 0.004845151901245117, 0.004674304008483887, 0.004787903785705566, 0.004728991985321045, 0.004651904106140137, 0.004661600112915039, 0.004678368091583252, 0.0046275839805603024, 0.004654079914093018, 0.004634272098541259, 0.004613408088684082, 0.004692319869995117, 0.004628960132598877, 0.004622079849243164, 0.004662975788116455, 0.004585279941558838, 0.004544640064239502, 0.004582304000854492, 0.0046384320259094235, 0.004617695808410645, 0.004624800205230713, 0.0045668802261352535, 0.004477344036102295, 0.004522016048431396, 0.004494688034057617, 0.004497920036315918, 0.0045194878578186035, 0.004662047863006591, 0.004585216045379639, 0.004622240066528321, 0.00473302412033081, 0.004758495807647705, 0.0046089601516723635, 0.004784319877624512, 0.004674655914306641, 0.00471884822845459, 0.004602079868316651, 0.00461030387878418, 0.0045281281471252445, 0.004516863822937011, 0.004562272071838379, 0.004515520095825195, 0.004506976127624512, 0.004469183921813965, 0.00446073579788208, 0.004538368225097656, 0.004560704231262207, 0.0045032958984375, 0.004479423999786377, 0.0045116481781005855, 0.004581088066101074, 0.004591775894165039, 0.004716415882110595, 0.004614496231079101, 0.00444323205947876, 0.004507936000823974, 0.004414080142974853, 0.004616159915924073, 0.004098847866058349, 0.0043983678817749025, 0.004446112155914307, 0.004418591976165772, 0.004386591911315918, 0.004479008197784424, 0.004378592014312744, 0.00447049617767334, 0.00448089599609375, 0.004473023891448975, 0.004425951957702637, 0.004387872219085693, 0.004431104183197022, 0.0044902081489562986, 0.004451136112213135, 0.00455014419555664, 0.004499167919158936, 0.0044898238182067875, 0.004620416164398193, 0.0045298562049865725, 0.004884191989898682, 0.004884575843811035, 0.00469862413406372, 0.004915200233459473, 0.005371200084686279, 0.005114560127258301, 0.004890848159790039, 0.004646687984466553, 0.0046694397926330565, 0.004617760181427002, 0.00471292781829834, 0.004749279975891113, 0.0045937919616699215, 0.004554656028747559, 0.004493631839752197, 0.004429215908050537, 0.00446892786026001, 0.00451423978805542, 0.004586143970489502, 0.004495520114898682, 0.004576096057891846, 0.004447519779205322, 0.004448991775512695, 0.004384064197540284, 0.0043424639701843265, 0.00435148811340332, 0.0043639039993286135, 0.004399328231811523, 0.004376832008361816, 0.004389632225036621, 0.004357791900634765, 0.004673439979553223, 0.004407104015350342, 0.004469024181365967, 0.004390431880950928, 0.004391200065612793, 0.004364480018615722, 0.004393087863922119, 0.004435967922210693, 0.004361728191375733, 0.0044150400161743165, 0.004440576076507568, 0.004569407939910889, 0.004153855800628662, 0.004488448143005371, 0.004610976219177246, 0.004497280120849609, 0.004498720169067383, 0.004697824001312256, 0.004518879890441895, 0.004519936084747315, 0.004512095928192138, 0.004553567886352539, 0.004453184127807618, 0.004460544109344483, 0.004441984176635742, 0.004480288028717041, 0.004471648216247559, 0.004388864040374756, 0.00438643217086792, 0.0043541440963745116, 0.004321568012237549, 0.004325376033782959, 0.004325439929962158, 0.004462175846099854, 0.004335968017578125, 0.004422976016998291, 0.004377056121826172, 0.004419104099273681, 0.004418272018432617, 0.004413407802581787, 0.004407296180725098, 0.004354047775268555, 0.004341695785522461, 0.004410880088806152, 0.00438483190536499, 0.004442592144012451, 0.004400352001190186, 0.004434976100921631, 0.004417056083679199, 0.004430079936981201, 0.0044746241569519046, 0.0044548802375793456, 0.004454463958740234, 0.004633791923522949, 0.00458400011062622, 0.004570784091949463, 0.004592991828918457, 0.004619232177734375, 0.004536416053771973, 0.004555935859680176, 0.004577663898468018, 0.004571104049682617, 0.004616608142852783, 0.004634335994720459, 0.004557119846343994, 0.004580639839172364, 0.004519968032836914, 0.004492159843444824, 0.004525248050689697, 0.004534880161285401, 0.004511744022369385, 0.004646912097930909, 0.004583360195159912, 0.004622719764709473, 0.004606880187988281, 0.004274144172668457, 0.004711135864257813, 0.004607967853546143, 0.00455683183670044, 0.004583775997161865, 0.004613791942596436, 0.0045725760459899905, 0.004589344024658203, 0.0045576319694519045, 0.004585696220397949, 0.0046077117919921876, 0.0045443840026855465, 0.004497600078582764, 0.0045006399154663085, 0.004504000186920166, 0.004554336071014404, 0.0047849597930908205, 0.004511616230010986, 0.004483200073242187, 0.0045424637794494625, 0.0044973759651184084, 0.004517343997955323, 0.0044609918594360354, 0.004540351867675782, 0.004559008121490479, 0.0045138239860534664, 0.004467967987060547, 0.0045164480209350585, 0.0045610561370849605, 0.0045220799446105955, 0.004451839923858642, 0.005085631847381592, 0.004659520149230957, 0.004501152038574219, 0.004454400062561035, 0.004545728206634522, 0.00464569616317749, 0.004616415977478027, 0.004605343818664551, 0.004534656047821045, 0.004468736171722412, 0.004466720104217529, 0.004497663974761963, 0.00452288007736206, 0.004487872123718262, 0.004712607860565186, 0.004611519813537598, 0.0046638398170471196, 0.004673439979553223, 0.004636864185333252, 0.004560512065887451, 0.004612415790557861, 0.004584767818450928, 0.00465334415435791, 0.004594079971313477, 0.004620287895202637, 0.004573503971099854, 0.004680863857269287, 0.004543007850646973, 0.004595808029174805, 0.004650911808013916, 0.004640992164611816, 0.004605728149414063, 0.004383359909057617, 0.0046574721336364745, 0.0046919679641723635, 0.0047021441459655765, 0.004683167934417725, 0.00460259199142456, 0.004625792026519775, 0.0045495681762695315, 0.0045766720771789554, 0.004605823993682861, 0.004587007999420166, 0.004553855895996094, 0.0047320637702941894, 0.004602303981781006, 0.004711999893188477, 0.004659776210784912, 0.0046878399848937985, 0.004685984134674072, 0.00462553596496582, 0.004547103881835938, 0.004590943813323974, 0.004608895778656006, 0.004707520008087158, 0.004754240036010742, 0.004658880233764648, 0.004643136024475097, 0.004630527973175049, 0.004651008129119873, 0.0046449599266052246, 0.004881728172302246, 0.004669951915740967, 0.004701312065124512, 0.004659999847412109, 0.004724160194396972, 0.004735648155212402, 0.00466758394241333, 0.004837408065795899, 0.00472051191329956, 0.0047309441566467285, 0.0046748161315917965, 0.0046384320259094235, 0.0047112002372741695, 0.004670783996582031, 0.0046703357696533205, 0.004670623779296875, 0.00478879976272583, 0.004702688217163086, 0.004638527870178223, 0.004607999801635742, 0.004570240020751953, 0.004674015998840332, 0.0046921601295471195, 0.004644544124603272, 0.004567103862762451, 0.004531744003295898, 0.004578239917755127, 0.004904352188110352, 0.004616640090942383, 0.0046100797653198244, 0.0045623998641967775, 0.004569600105285645, 0.004528512001037597, 0.004552703857421875, 0.0042659201622009275, 0.004565919876098633, 0.004502719879150391, 0.0044716157913208005, 0.0045240321159362796, 0.004531231880187989, 0.004518720149993896, 0.004487360000610351, 0.004485087871551514, 0.004465760231018067, 0.004396063804626465, 0.004439807891845703, 0.004407167911529541, 0.004401408195495606, 0.004468959808349609, 0.0044562239646911625, 0.004523744106292724, 0.004501952171325684, 0.004476863861083985, 0.0044399361610412596, 0.0044234881401062015, 0.00454099178314209, 0.0044991040229797365, 0.004696127891540528, 0.004539743900299072, 0.004680416107177734, 0.004727839946746826, 0.004645855903625489, 0.004586719989776612, 0.004571807861328125, 0.004616543769836425, 0.004658463954925537, 0.004650911808013916, 0.004567232131958008, 0.004593408107757569, 0.00459449577331543, 0.004550367832183838, 0.004779168128967285, 0.004692543983459472, 0.004630176067352295, 0.004743167877197266, 0.004614783763885498, 0.00466534423828125, 0.004671487808227539, 0.00465715217590332, 0.004921247959136963, 0.004634719848632812, 0.004550655841827392, 0.004550655841827392, 0.0045015039443969725, 0.004468736171722412, 0.004499519824981689, 0.004484064102172851, 0.004520927906036377, 0.004531231880187989, 0.004590240001678467, 0.0045548157691955565, 0.004669695854187012, 0.004593408107757569, 0.00461030387878418, 0.0045847039222717285, 0.004561984062194825, 0.004526976108551025, 0.004310272216796875, 0.004647744178771973, 0.004620319843292237, 0.0045660161972045895, 0.0046020479202270505, 0.004598368167877198, 0.004544864177703858, 0.004484032154083252, 0.004446784019470215, 0.00456928014755249, 0.0046265277862548825, 0.004554240226745605, 0.004524159908294678, 0.0045734400749206545, 0.004546592235565186, 0.004569183826446533, 0.004618144035339356, 0.004587903976440429, 0.00459769582748413, 0.004622240066528321, 0.004575104236602783, 0.004585472106933594, 0.004699808120727539, 0.004745279788970947, 0.004693952083587647, 0.004649600028991699, 0.004658912181854248, 0.004905312061309814, 0.004761248111724853, 0.004744991779327392, 0.004610112190246582, 0.004610208034515381, 0.004759359836578369, 0.004710591793060302, 0.004670623779296875, 0.00462883186340332, 0.004872704029083252, 0.004683775901794434, 0.0047329277992248535, 0.004631648063659668, 0.00463270378112793, 0.004671775817871094, 0.00467519998550415, 0.004661888122558594, 0.004692224025726319, 0.004679679870605469, 0.004699903964996338, 0.004711008071899414, 0.004697792053222656, 0.0047058238983154295, 0.004684224128723145, 0.004642816066741944, 0.004661248207092285, 0.00458681583404541, 0.004616896152496338, 0.004679200172424317, 0.004566559791564942, 0.004630815982818603, 0.005763552188873291, 0.004671679973602295, 0.0045989761352539064, 0.004631648063659668, 0.004604896068572998, 0.004246975898742676, 0.004586048126220703, 0.004586751937866211, 0.0046128640174865725, 0.004577280044555664, 0.004766816139221191, 0.0049036798477172855, 0.0046509761810302735, 0.00463481616973877, 0.0047045121192932126, 0.004611423969268799, 0.004711904048919678, 0.004620800018310547, 0.004646656036376953, 0.004581567764282227, 0.004573952198028564, 0.0046490559577941895, 0.00457260799407959, 0.0046061758995056155, 0.004537375926971435, 0.004551167964935303, 0.004592383861541748, 0.004570335865020752, 0.004579808235168457, 0.004542272090911865, 0.004602335929870606, 0.004636415958404541, 0.0046193599700927734, 0.0045784001350402835, 0.004546559810638427, 0.004637631893157959, 0.004638720035552979, 0.004627200126647949, 0.004585472106933594, 0.004587264060974121, 0.0047066879272460935, 0.004576288223266602, 0.004535264015197754, 0.004668863773345947, 0.004567679882049561, 0.004742144107818603, 0.004758463859558105, 0.004612095832824707, 0.004552703857421875, 0.004694015979766846, 0.004591584205627441, 0.004591648101806641, 0.004688127994537354, 0.004782015800476074, 0.00463647985458374, 0.004535391807556153, 0.004532351970672608, 0.0045872960090637205, 0.00444220781326294, 0.00452291202545166, 0.004431871891021728, 0.004437119960784912, 0.004463488101959228, 0.00443612813949585, 0.004607840061187744, 0.0045119037628173824, 0.004484096050262451, 0.0045023679733276365, 0.0042360639572143555, 0.0045090560913085935, 0.004551487922668457, 0.004488831996917725, 0.004471327781677246, 0.004646592140197754, 0.004500864028930664, 0.004566624164581299, 0.004449440002441406, 0.004462783813476563, 0.004456096172332764, 0.0044421119689941405, 0.004487135887145996, 0.004512063980102539, 0.004515520095825195, 0.004488383769989014, 0.004440032005310059, 0.00452288007736206, 0.0044297599792480465, 0.004587584018707276, 0.004512063980102539, 0.004419360160827637, 0.004505472183227539, 0.004489247798919678, 0.004539519786834717, 0.004549536228179931, 0.0046767997741699215, 0.0046210880279541015, 0.00448905611038208, 0.004460800170898438, 0.004446239948272705, 0.004496479988098144, 0.0045146880149841305, 0.004465856075286865, 0.004546847820281983, 0.004506048202514649, 0.004550303936004639, 0.00457535982131958, 0.004562880039215088, 0.00460214376449585, 0.004507775783538818, 0.004509151935577393, 0.004501920223236084, 0.004752768039703369, 0.004532544136047363, 0.0044588160514831545, 0.004473184108734131, 0.004434815883636475, 0.004420383930206299, 0.004455647945404053, 0.004477727890014649, 0.004487071990966797, 0.0044538240432739256, 0.004440735816955566, 0.004525760173797608, 0.004449664115905761, 0.004513919830322266, 0.004448160171508789, 0.004448256015777588, 0.004506527900695801, 0.004433919906616211, 0.004483071804046631, 0.00449945592880249, 0.004234240055084228, 0.004623360157012939, 0.004571135997772217, 0.004599040031433106, 0.0046722559928894045, 0.004798463821411133, 0.0047019200325012205, 0.004659359931945801, 0.004614272117614746, 0.004655104160308838, 0.004632575988769531, 0.004777984142303467, 0.004711616039276123, 0.004702527999877929, 0.0047272958755493165, 0.004661248207092285, 0.004712607860565186, 0.0046276159286499025, 0.004663487911224365, 0.004762112140655517, 0.004612095832824707, 0.00473529577255249, 0.004717984199523926, 0.004796639919281006, 0.004776000022888183, 0.004862143993377686, 0.004802591800689697, 0.0047511358261108395, 0.004745215892791748, 0.004605120182037354, 0.004647168159484863, 0.004635327816009521, 0.0047432641983032224, 0.004653056144714355, 0.004584832191467285, 0.0046835517883300785, 0.004594560146331787, 0.004843264102935791, 0.004637951850891113, 0.0046119999885559084, 0.004625247955322266, 0.004554751873016357, 0.004681215763092041, 0.004575744152069092, 0.004632319927215576, 0.004786431789398193, 0.004673024177551269, 0.004776447772979736, 0.0047669439315795896, 0.004737919807434082, 0.004704160213470459, 0.004702208042144776, 0.004712448120117187, 0.0047185921669006346, 0.004745215892791748, 0.004701568126678467, 0.004700799942016601, 0.004679679870605469, 0.004835328102111816, 0.004746528148651123, 0.004586207866668701, 0.004693376064300537, 0.004627071857452393]",tokens/s,213.88355723568543,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,808.513536,933.101568,0.0,530.579456,489.934848,s,1,9.349224609375,9.349224609375,0.0,9.349224609375,9.349224609375,9.349224609375,9.349224609375,[9.349224609375],,kWh,2.494457437503191e-06,2.668472163971674e-07,9.51111872000282e-07,3.7124165259006403e-06,,MB,1141.092352,947.781632,0.0,534.77376,462.069248,s,36,0.380106113433838,0.010558503150939941,0.0002146172809553254,0.01050428819656372,0.010634096145629882,0.01076524806022644,0.011426788997650145,"[0.011698176383972168, 0.01057910442352295, 0.010544447898864747, 0.010646080017089844, 0.010450431823730469, 0.01047225570678711, 0.010439040184020996, 0.010500032424926758, 0.010463711738586425, 0.01049398422241211, 0.01055072021484375, 0.010508543968200683, 0.010481151580810547, 0.010712736129760743, 0.010622112274169922, 0.010512895584106445, 0.010413151741027832, 0.0104900484085083, 0.010548800468444823, 0.010588671684265137, 0.010458399772644043, 0.010557791709899903, 0.01052355194091797, 0.010537856101989746, 0.010477312088012695, 0.010483263969421387, 0.01058073616027832, 0.01048960018157959, 0.01058790397644043, 0.01057091236114502, 0.01040931224822998, 0.010428607940673828, 0.010922783851623536, 0.01043228816986084, 0.010452192306518554, 0.010477503776550294]",tokens/s,24245.861022185734,kWh,3.791884665909003e-07,4.181742963149757e-08,2.506794934727279e-07,6.716853896951257e-07,tokens/kWh,381130815.00283486,MB,1152.925696,970.850304,0.0,557.842432,462.071808,s,36,9.886624755859376,0.2746284654405382,0.0038277238350473424,0.27430192565917966,0.27770510864257814,0.28130641174316406,0.28580888519287106,"[0.28167880249023436, 0.28803277587890624, 0.2688783874511719, 0.27774127197265625, 0.2776689453125, 0.27694873046875, 0.2687189025878906, 0.27164511108398437, 0.27403729248046876, 0.2720401000976562, 0.2711937255859375, 0.27661972045898436, 0.2744055480957031, 0.274278564453125, 0.27269464111328123, 0.2676060791015625, 0.27567697143554687, 0.2719415283203125, 0.27010025024414064, 0.27333425903320313, 0.2703841247558594, 0.27443194580078123, 0.2743252868652344, 0.27335137939453125, 0.2753865661621094, 0.27343466186523435, 0.28118228149414065, 0.27341766357421876, 0.27639495849609375, 0.2758948974609375, 0.2770768432617188, 0.274124755859375, 0.2776158142089844, 0.2731585693359375, 0.27529885864257814, 0.275904541015625]",tokens/s,229.40083759686078,kWh,7.5001677470056054e-06,8.271359726496656e-07,3.807298899106234e-06,1.2134602618761503e-05,tokens/kWh,5191764.574358183,,s,2268,9.870341084957111,0.004352002242044586,0.00014648594511044188,0.004332223892211914,0.004467113685607911,0.004529846286773682,0.004902701034545891,"[0.004356416225433349, 0.004579135894775391, 0.0044297919273376465, 0.004460447788238525, 0.004404448032379151, 0.004441152095794678, 0.004502880096435547, 0.004497791767120361, 0.0045649919509887695, 0.004443424224853516, 0.004433824062347412, 0.004512256145477295, 0.004528448104858398, 0.004483359813690186, 0.0044081277847290035, 0.004361184120178223, 0.00440883207321167, 0.005267903804779052, 0.0051320638656616215, 0.004447807788848877, 0.004364672183990478, 0.004309279918670654, 0.004323264122009277, 0.004425792217254638, 0.00445849609375, 0.004421247959136963, 0.004358528137207031, 0.004419583797454834, 0.0043407359123229985, 0.004424704074859619, 0.004455743789672851, 0.00441209602355957, 0.004406367778778076, 0.004404128074645996, 0.004357151985168457, 0.004398047924041748, 0.00444163179397583, 0.004450784206390381, 0.004487167835235595, 0.004456384181976318, 0.004505887985229492, 0.004517151832580567, 0.00455731201171875, 0.004505055904388428, 0.004497951984405517, 0.0044540162086486815, 0.004433792114257812, 0.004462175846099854, 0.004387648105621338, 0.00455075216293335, 0.004409632205963135, 0.00437772798538208, 0.004405856132507324, 0.004525152206420899, 0.004483327865600586, 0.00446940803527832, 0.004423679828643799, 0.0043886079788208006, 0.004382976055145264, 0.004382719993591308, 0.004443967819213867, 0.004446400165557861, 0.004411327838897705, 0.004387712001800537, 0.0045463042259216305, 0.004515679836273193, 0.004483488082885742, 0.00443939208984375, 0.004509632110595703, 0.004475584030151367, 0.004479231834411621, 0.00443068790435791, 0.004438975811004639, 0.004472832202911377, 0.004405248165130615, 0.004417471885681153, 0.004509759902954102, 0.0044432001113891605, 0.004535232067108154, 0.005914368152618408, 0.006251808166503906, 0.005972959995269775, 0.006010240077972412, 0.005933695793151855, 0.005476352214813233, 0.004449952125549317, 0.0044445118904113765, 0.0043786239624023435, 0.004310527801513672, 0.004280352115631104, 0.004343776226043701, 0.004366847991943359, 0.004466752052307129, 0.004484447956085205, 0.004460864067077637, 0.0045099520683288576, 0.004708384037017822, 0.004554240226745605, 0.0044568958282470705, 0.00459987211227417, 0.004538368225097656, 0.004421631813049317, 0.004558847904205322, 0.004556511878967285, 0.004473120212554932, 0.004392960071563721, 0.004607327938079834, 0.004370944023132324, 0.004354464054107666, 0.004365312099456787, 0.004360544204711914, 0.004329567909240723, 0.004319551944732666, 0.0043433279991149905, 0.004337503910064698, 0.004268447875976563, 0.004307168006896973, 0.004378367900848388, 0.004294847965240479, 0.004298816204071045, 0.004375711917877197, 0.00429702377319336, 0.004305439949035645, 0.004273632049560547, 0.0042911038398742675, 0.004290815830230713, 0.004268032073974609, 0.004370431900024414, 0.004277503967285156, 0.004281087875366211, 0.004185440063476563, 0.004155200004577636, 0.004139808177947998, 0.004188000202178955, 0.004178143978118896, 0.004220928192138672, 0.00420195198059082, 0.004152959823608399, 0.004189087867736817, 0.0041799678802490236, 0.004156447887420654, 0.004211679935455322, 0.00418998384475708, 0.004174143791198731, 0.004156608104705811, 0.004145887851715088, 0.004141056060791016, 0.004139008045196534, 0.004124159812927246, 0.004132575988769531, 0.00428931188583374, 0.004169248104095459, 0.004166111946105957, 0.004161119937896729, 0.004137375831604004, 0.004208320140838623, 0.0042293438911437985, 0.0042939200401306155, 0.004236000061035156, 0.0042293758392333985, 0.0042453441619873045, 0.004292768001556396, 0.004269919872283936, 0.004512800216674805, 0.0044345598220825195, 0.00427558422088623, 0.004282783985137939, 0.004280447959899902, 0.004290815830230713, 0.00424505615234375, 0.004255680084228516, 0.004452832221984863, 0.004298624038696289, 0.004284704208374023, 0.004238624095916748, 0.00425654411315918, 0.004242496013641357, 0.004276703834533691, 0.004302815914154053, 0.004306464195251465, 0.004493663787841797, 0.004398848056793213, 0.004338560104370118, 0.004409503936767578, 0.004392799854278565, 0.004483071804046631, 0.0043745279312133786, 0.004364287853240967, 0.004386816024780273, 0.004419904232025146, 0.004493311882019043, 0.004429823875427246, 0.00450764799118042, 0.004452352046966553, 0.004379936218261719, 0.004326047897338867, 0.004482304096221924, 0.004364480018615722, 0.004414336204528808, 0.004371744155883789, 0.0043771200180053715, 0.004361855983734131, 0.004393280029296875, 0.004327424049377441, 0.004298816204071045, 0.004269887924194336, 0.0043536958694458, 0.004283999919891358, 0.004229695796966553, 0.00429702377319336, 0.004313087940216064, 0.00435968017578125, 0.004319744110107422, 0.004283552169799805, 0.0045064640045166015, 0.004286464214324951, 0.00440496015548706, 0.004344096183776855, 0.0043089919090271, 0.004261888027191162, 0.004333792209625244, 0.004502816200256348, 0.004337535858154297, 0.004381311893463135, 0.004292448043823242, 0.004294079780578613, 0.004393695831298828, 0.004356095790863037, 0.004357439994812012, 0.004358751773834229, 0.00440556812286377, 0.004443935871124268, 0.004398943901062012, 0.004486847877502442, 0.004475359916687012, 0.004472064018249512, 0.004493663787841797, 0.0044399361610412596, 0.00473961591720581, 0.004475967884063721, 0.004421599864959717, 0.004375520229339599, 0.004377600193023682, 0.004315584182739258, 0.004352767944335937, 0.00455244779586792, 0.00441161584854126, 0.004416543960571289, 0.004469567775726318, 0.004719840049743652, 0.004600607872009278, 0.0044707517623901365, 0.004380671977996826, 0.004534272193908692, 0.004449664115905761, 0.004464831829071045, 0.004457024097442627, 0.004441984176635742, 0.004419583797454834, 0.004392960071563721, 0.004421631813049317, 0.004413440227508545, 0.0043786239624023435, 0.004372064113616943, 0.004432223796844483, 0.00450544023513794, 0.00444755220413208, 0.004367136001586914, 0.004405375957489014, 0.0046835517883300785, 0.00471673583984375, 0.004364704132080078, 0.004402592182159424, 0.004323552131652832, 0.004352000236511231, 0.004345856189727783, 0.004344960212707519, 0.004418432235717773, 0.004644224166870117, 0.004440256118774414, 0.004596159934997558, 0.0045066561698913574, 0.004369376182556152, 0.0043367681503295894, 0.00432422399520874, 0.0043656320571899415, 0.004301504135131836, 0.004316256046295166, 0.004326303958892822, 0.00436633586883545, 0.004297920227050781, 0.004272960186004639, 0.0043101758956909176, 0.004352863788604736, 0.004271743774414062, 0.0042848000526428225, 0.004347904205322266, 0.004527423858642578, 0.004387519836425781, 0.004435328006744385, 0.004423391819000244, 0.004380864143371582, 0.00460259199142456, 0.0045957121849060055, 0.004462783813476563, 0.004375391960144043, 0.004420576095581055, 0.004382431983947754, 0.004341887950897217, 0.004382400035858154, 0.004259967803955078, 0.004272543907165528, 0.004240479946136475, 0.004234079837799072, 0.004237311840057373, 0.004249599933624268, 0.004388864040374756, 0.004411392211914063, 0.004452352046966553, 0.004369472026824951, 0.0043796801567077635, 0.004302432060241699, 0.004325856208801269, 0.004364128112792969, 0.004593664169311523, 0.004395008087158203, 0.004347904205322266, 0.004376031875610351, 0.004403488159179687, 0.00435430383682251, 0.004323328018188476, 0.004347072124481201, 0.004384960174560547, 0.004536896228790283, 0.004396192073822022, 0.004508575916290283, 0.00441161584854126, 0.004346848011016846, 0.004360159873962402, 0.004267936229705811, 0.004332511901855469, 0.004292128086090088, 0.004295104026794434, 0.004310143947601318, 0.004315872192382813, 0.004345280170440674, 0.0043730239868164066, 0.004378367900848388, 0.004368832111358642, 0.00436633586883545, 0.004441472053527832, 0.004522624015808105, 0.004448256015777588, 0.00438915205001831, 0.0045175042152404785, 0.004435776233673095, 0.004421919822692871, 0.004390912055969238, 0.004406911849975586, 0.004415520191192627, 0.004411744117736816, 0.004347743988037109, 0.00436352014541626, 0.004393887996673584, 0.004429503917694092, 0.004452064037322998, 0.004411200046539307, 0.004375328063964843, 0.004345856189727783, 0.004381696224212647, 0.004377600193023682, 0.0045090880393981935, 0.004444767951965332, 0.004403200149536133, 0.004392960071563721, 0.004357791900634765, 0.004364192008972168, 0.004342207908630371, 0.004268032073974609, 0.004403200149536133, 0.004364287853240967, 0.004395199775695801, 0.004386943817138672, 0.004325056076049804, 0.004349055767059326, 0.004293504238128662, 0.0042631678581237795, 0.004295040130615235, 0.004324992179870605, 0.004389632225036621, 0.004294655799865723, 0.004246975898742676, 0.004438079833984375, 0.004389344215393066, 0.0043994240760803225, 0.004335328102111817, 0.004259840011596679, 0.004271327972412109, 0.004311647891998291, 0.00431283187866211, 0.004366208076477051, 0.004307519912719726, 0.004268032073974609, 0.004227071762084961, 0.004173823833465576, 0.004208640098571777, 0.004388864040374756, 0.004235263824462891, 0.004206592082977295, 0.004231167793273926, 0.004186111927032471, 0.004191967964172363, 0.004251776218414306, 0.0042763838768005375, 0.004208415985107422, 0.004210207939147949, 0.00417248010635376, 0.004167679786682129, 0.004281599998474121, 0.004186560153961181, 0.004161856174468994, 0.004145376205444336, 0.00414902400970459, 0.004139008045196534, 0.004169824123382568, 0.004226304054260254, 0.004223840236663818, 0.004171584129333496, 0.004153664112091064, 0.0042462401390075686, 0.004217696189880371, 0.004206719875335693, 0.004177087783813476, 0.004176415920257568, 0.00417139196395874, 0.004179647922515869, 0.004279263973236084, 0.004267039775848388, 0.004223968029022217, 0.004395008087158203, 0.004214079856872559, 0.00415174388885498, 0.004304512023925781, 0.004307231903076172, 0.004298751831054688, 0.004291744232177735, 0.0042624959945678715, 0.004208032131195068, 0.004371391773223877, 0.004645887851715088, 0.004287839889526367, 0.004339360237121582, 0.004239359855651856, 0.004249407768249512, 0.004250847816467286, 0.004301727771759033, 0.004597856044769287, 0.004319200038909912, 0.0042741761207580565, 0.004247583866119385, 0.0042362561225891115, 0.0042772159576416014, 0.004200479984283447, 0.0042022719383239746, 0.0042211518287658695, 0.004203551769256591, 0.004212800025939941, 0.004211616039276123, 0.004482528209686279, 0.004399519920349121, 0.004362207889556885, 0.004348063945770263, 0.004335616111755371, 0.004325439929962158, 0.00430790376663208, 0.004347008228302002, 0.004351871967315674, 0.004298975944519043, 0.004265696048736572, 0.0042575359344482425, 0.004256192207336426, 0.004286431789398194, 0.004271488189697265, 0.0042659521102905275, 0.004231103897094727, 0.004194591999053955, 0.0042408638000488285, 0.004248288154602051, 0.004244768142700196, 0.004286655902862549, 0.004284512042999267, 0.004267839908599853, 0.004317984104156494, 0.004630623817443847, 0.004404223918914795, 0.004361120223999024, 0.004472608089447021, 0.004300127983093262, 0.004264832019805908, 0.004271615982055664, 0.004297215938568116, 0.004337183952331543, 0.004319712162017823, 0.004353119850158692, 0.004372831821441651, 0.0045294718742370605, 0.0043732161521911625, 0.00434335994720459, 0.004327936172485352, 0.0043580479621887205, 0.0042854399681091304, 0.004440383911132812, 0.0042648959159851075, 0.004585216045379639, 0.004305952072143555, 0.004355040073394776, 0.004298751831054688, 0.00429472017288208, 0.0043146882057189945, 0.004327040195465088, 0.0043216958045959476, 0.004300191879272461, 0.0042607679367065426, 0.004236767768859864, 0.0042293438911437985, 0.004473184108734131, 0.004370431900024414, 0.0043450241088867185, 0.004277056217193604, 0.004288512229919434, 0.004241407871246338, 0.004235104084014892, 0.004231328010559082, 0.004234464168548584, 0.00424835205078125, 0.00434768009185791, 0.004351967811584472, 0.004319551944732666, 0.0042945919036865236, 0.004280320167541504, 0.0042741761207580565, 0.004388864040374756, 0.0043311681747436525, 0.004401504039764404, 0.0044421119689941405, 0.004456448078155518, 0.004354047775268555, 0.00437663984298706, 0.004398079872131347, 0.00440169620513916, 0.004299168109893799, 0.0043211841583251955, 0.00428272008895874, 0.004294400215148926, 0.004355967998504639, 0.004325503826141357, 0.004363552093505859, 0.004340640068054199, 0.004304224014282226, 0.0043433279991149905, 0.004328735828399658, 0.004341023921966553, 0.0044076800346374515, 0.0044067840576171875, 0.004483583927154541, 0.0044646401405334475, 0.0044028801918029785, 0.004411168098449707, 0.004458847999572754, 0.004395008087158203, 0.004333568096160889, 0.0043151359558105465, 0.004355999946594238, 0.004350048065185547, 0.004386784076690674, 0.004322368144989014, 0.004329823970794678, 0.004409984111785888, 0.004445631980895996, 0.004319808006286621, 0.004352000236511231, 0.004302847862243653, 0.00432476806640625, 0.004313695907592774, 0.004300159931182861, 0.004303071975708008, 0.004243872165679932, 0.004253183841705322, 0.0042806720733642575, 0.004387263774871826, 0.004338751792907715, 0.004295328140258789, 0.004282368183135986, 0.004235231876373291, 0.004272192001342773, 0.004399072170257568, 0.004405280113220215, 0.004357600212097168, 0.004321792125701904, 0.004294655799865723, 0.004241407871246338, 0.00445030403137207, 0.004321280002593994, 0.004341663837432862, 0.004339360237121582, 0.004358496189117432, 0.004343200206756592, 0.004272831916809082, 0.0042285118103027345, 0.0042091522216796875, 0.004238719940185547, 0.004260640144348144, 0.0043888001441955565, 0.004257408142089844, 0.004281856060028076, 0.004280320167541504, 0.00428326416015625, 0.004268032073974609, 0.004253920078277588, 0.004263584136962891, 0.004308544158935547, 0.0042739839553833005, 0.004241151809692383, 0.004256703853607178, 0.004206655979156494, 0.004317183971405029, 0.004261888027191162, 0.004255680084228516, 0.004250847816467286, 0.004250080108642578, 0.0048146238327026365, 0.004441696166992188, 0.0044345598220825195, 0.0043736639022827145, 0.004330175876617432, 0.0043292160034179685, 0.004351391792297364, 0.004365312099456787, 0.004376095771789551, 0.004321536064147949, 0.004396351814270019, 0.004246111869812012, 0.004253471851348877, 0.004272031784057617, 0.004254335880279541, 0.004287615776062011, 0.0043160319328308105, 0.004289599895477295, 0.004235328197479248, 0.004242335796356201, 0.00423635196685791, 0.004225759983062744, 0.004190176010131836, 0.004208576202392578, 0.00420688009262085, 0.004200191974639893, 0.0042293758392333985, 0.004204127788543701, 0.004356863975524902, 0.004304543972015381, 0.004286623954772949, 0.00433135986328125, 0.004283840179443359, 0.004255424022674561, 0.004264832019805908, 0.004290559768676758, 0.004244639873504639, 0.004254559993743897, 0.00424889612197876, 0.004225728034973145, 0.00430079984664917, 0.004232287883758545, 0.004273056030273437, 0.004218751907348633, 0.004276351928710937, 0.004227263927459717, 0.004206528186798095, 0.004222527980804443, 0.004294879913330078, 0.004306528091430664, 0.004278048038482666, 0.004270336151123047, 0.004274720191955566, 0.004272064208984375, 0.004456448078155518, 0.00432092809677124, 0.004327775955200196, 0.004263872146606445, 0.004317215919494629, 0.004335455894470215, 0.004362304210662842, 0.004365920066833496, 0.004368927955627442, 0.004366655826568603, 0.004546495914459228, 0.0044728960990905765, 0.004404255867004395, 0.004371424198150634, 0.0043721599578857425, 0.004378943920135498, 0.004321280002593994, 0.004372479915618896, 0.004357120037078857, 0.004301536083221436, 0.004352287769317627, 0.004306687831878662, 0.004311359882354736, 0.004292416095733643, 0.004300640106201172, 0.004290592193603515, 0.004284927845001221, 0.00430463981628418, 0.004292672157287598, 0.004335552215576172, 0.004640704154968262, 0.004677279949188233, 0.004436192035675049, 0.004421472072601318, 0.004442463874816895, 0.004361760139465332, 0.004364480018615722, 0.004311327934265137, 0.00432480001449585, 0.004309567928314209, 0.004322624206542969, 0.004387519836425781, 0.004752639770507812, 0.004453120231628418, 0.004454400062561035, 0.004352000236511231, 0.004362400054931641, 0.004401343822479248, 0.0044150080680847165, 0.0043623681068420414, 0.0043946561813354495, 0.004365888118743897, 0.00449564790725708, 0.004370528221130371, 0.004358560085296631, 0.004349952220916748, 0.004329472064971924, 0.0043489599227905275, 0.004350783824920654, 0.0043805761337280275, 0.004323040008544922, 0.004303391933441162, 0.004388095855712891, 0.004346399784088135, 0.004348127841949463, 0.004366176128387451, 0.004337855815887451, 0.0044011201858520504, 0.00445030403137207, 0.0044011201858520504, 0.004380415916442871, 0.004487455844879151, 0.004238719940185547, 0.004418560028076172, 0.004325376033782959, 0.004327424049377441, 0.004407072067260742, 0.0044230718612670895, 0.004391456127166748, 0.0043526082038879395, 0.004356863975524902, 0.004320159912109375, 0.004358176231384277, 0.00435920000076294, 0.004303487777709961, 0.0042991042137146, 0.004339168071746826, 0.004407999992370605, 0.004357791900634765, 0.004317376136779785, 0.004330527782440185, 0.004455391883850098, 0.004349952220916748, 0.004325056076049804, 0.004370751857757568, 0.004298751831054688, 0.00430079984664917, 0.004349952220916748, 0.004353536128997803, 0.0043422718048095705, 0.004333568096160889, 0.004335807800292969, 0.00428659200668335, 0.004267583847045899, 0.004245312213897705, 0.004271999835968017, 0.004227136135101318, 0.004216896057128906, 0.004544159889221191, 0.004321951866149903, 0.004353919982910156, 0.004349855899810791, 0.004393184185028076, 0.0042897920608520506, 0.004253856182098389, 0.004356704235076904, 0.004285791873931885, 0.004251359939575195, 0.004260287761688232, 0.0044109120368957516, 0.004550687789916992, 0.004494272232055664, 0.0044646401405334475, 0.004382719993591308, 0.004354047775268555, 0.004370431900024414, 0.0044011521339416505, 0.004376575946807861, 0.004394783973693848, 0.004464896202087403, 0.004327167987823486, 0.004347328186035156, 0.004322015762329102, 0.004325439929962158, 0.004349120140075683, 0.0043290557861328125, 0.004512159824371338, 0.004407072067260742, 0.00436246395111084, 0.004396704196929932, 0.00444432020187378, 0.004368576049804687, 0.004356416225433349, 0.0043311362266540525, 0.0044132800102233884, 0.004372704029083252, 0.004480607986450195, 0.004398623943328858, 0.004340767860412598, 0.004271103858947754, 0.00425161600112915, 0.0042566399574279785, 0.004275199890136719, 0.004301407814025879, 0.004332223892211914, 0.004314559936523438, 0.00432371187210083, 0.004268159866333008, 0.004277184009552002, 0.00424022388458252, 0.004263936042785644, 0.004280320167541504, 0.004273471832275391, 0.004272831916809082, 0.004230976104736328, 0.004464831829071045, 0.004299839973449707, 0.004405375957489014, 0.0042709121704101564, 0.004311295986175537, 0.0042657279968261715, 0.00424348783493042, 0.004257760047912598, 0.004300992012023926, 0.004289375782012939, 0.004339776039123535, 0.004423808097839355, 0.0043364157676696775, 0.004288512229919434, 0.004264992237091064, 0.004231647968292236, 0.004288095951080323, 0.00430131196975708, 0.0043441600799560545, 0.004353216171264648, 0.004291456222534179, 0.004278463840484619, 0.004491104125976563, 0.004753376007080078, 0.004593664169311523, 0.004481023788452149, 0.004380159854888916, 0.004393343925476074, 0.004376543998718262, 0.004365920066833496, 0.004360767841339111, 0.004487167835235595, 0.004316991806030274, 0.004317183971405029, 0.004471968173980713, 0.004403840065002442, 0.004335840225219726, 0.0043721280097961425, 0.004411744117736816, 0.004339072227478027, 0.004317279815673828, 0.004298624038696289, 0.004391583919525146, 0.00434991979598999, 0.004284448146820068, 0.004319039821624756, 0.004334815979003906, 0.004307936191558838, 0.004368383884429932, 0.004294015884399414, 0.004475520133972168, 0.004447487831115723, 0.004319583892822266, 0.004443967819213867, 0.0043935680389404295, 0.004339360237121582, 0.004527872085571289, 0.00443612813949585, 0.0044522881507873535, 0.004316736221313477, 0.004283584117889404, 0.004322624206542969, 0.004277760028839111, 0.004288735866546631, 0.004334400177001953, 0.004284095764160156, 0.004306975841522217, 0.004317247867584228, 0.0042575359344482425, 0.004249983787536621, 0.004291808128356934, 0.0043896641731262205, 0.004390912055969238, 0.004411456108093262, 0.004374464035034179, 0.004333568096160889, 0.0042782721519470214, 0.004263936042785644, 0.00442080020904541, 0.0042739839553833005, 0.0042557759284973145, 0.004234208106994629, 0.004237023830413819, 0.00421507215499878, 0.004227071762084961, 0.004246975898742676, 0.004223552227020264, 0.0042106881141662595, 0.00428223991394043, 0.004266111850738525, 0.004257791996002197, 0.004231423854827881, 0.00424729585647583, 0.004263936042785644, 0.004265183925628662, 0.004221439838409424, 0.004139488220214844, 0.004515615940093995, 0.004437727928161621, 0.004384960174560547, 0.004342144012451172, 0.004321568012237549, 0.004257023811340332, 0.004229599952697754, 0.004217951774597168, 0.004237855911254883, 0.004178304195404053, 0.004173823833465576, 0.00416153621673584, 0.004130655765533447, 0.004133024215698242, 0.004146592140197754, 0.0041027522087097165, 0.004261888027191162, 0.004154975891113281, 0.004190624237060547, 0.004238944053649902, 0.004235680103302002, 0.004268032073974609, 0.004257728099822998, 0.004255807876586914, 0.00427129602432251, 0.00419868803024292, 0.004172031879425049, 0.004132160186767578, 0.004186944007873535, 0.004164927959442138, 0.004165535926818848, 0.004167679786682129, 0.00428332805633545, 0.004233215808868408, 0.004206592082977295, 0.004192255973815918, 0.004190207958221436, 0.004222976207733154, 0.004268095970153808, 0.004292543888092041, 0.004242879867553711, 0.004217535972595215, 0.004206399917602539, 0.0042230401039123535, 0.004229119777679443, 0.0042509760856628415, 0.004212831974029541, 0.004342591762542725, 0.0043639039993286135, 0.004288127899169922, 0.004265600204467773, 0.004205440044403076, 0.004225024223327637, 0.004212736129760742, 0.004232319831848144, 0.00422979211807251, 0.004526303768157959, 0.004304895877838135, 0.0042559680938720704, 0.004203680038452148, 0.00436902379989624, 0.004277728080749512, 0.004329631805419922, 0.00459449577331543, 0.005093535900115967, 0.004638368129730224, 0.00443609619140625, 0.004466752052307129, 0.004747263908386231, 0.005167359828948974, 0.004644608020782471, 0.004369887828826904, 0.0043771200180053715, 0.0043089919090271, 0.004314847946166992, 0.004347263813018799, 0.004295360088348388, 0.004265344142913819, 0.004254559993743897, 0.004270080089569092, 0.004243103981018066, 0.00427023983001709, 0.004208831787109375, 0.004257567882537842, 0.00428659200668335, 0.004257823944091797, 0.004285791873931885, 0.00432147216796875, 0.004314752101898194, 0.00429318380355835, 0.0042837438583374025, 0.004267007827758789, 0.0044514241218566895, 0.004437983989715576, 0.004403903961181641, 0.004408895969390869, 0.004395711898803711, 0.004463647842407227, 0.0044585919380187985, 0.004380928039550781, 0.004410336017608643, 0.004347040176391602, 0.004360864162445069, 0.004314976215362549, 0.0043146882057189945, 0.004282623767852783, 0.004278463840484619, 0.004296703815460205, 0.004257791996002197, 0.004252863883972168, 0.004303423881530761, 0.004303103923797608, 0.004256800174713134, 0.0042501120567321774, 0.00423308801651001, 0.0042082881927490235, 0.004248511791229248, 0.0042104959487915036, 0.004249792098999023, 0.004354112148284912, 0.004480768203735352, 0.004335807800292969, 0.004329311847686768, 0.0043101758956909176, 0.004395584106445313, 0.004325376033782959, 0.004440063953399658, 0.004407104015350342, 0.004360383987426758, 0.0043089919090271, 0.004225024223327637, 0.004255743980407714, 0.004403295993804931, 0.004248544216156006, 0.004301599979400635, 0.004267392158508301, 0.0043504638671875, 0.004260447978973389, 0.004282048225402832, 0.0042536001205444336, 0.004251455783843994, 0.004254144191741943, 0.00424073600769043, 0.004213568210601807, 0.004228799819946289, 0.0041799678802490236, 0.0043110399246215824, 0.004291584014892578, 0.004283167839050293, 0.004406943798065186, 0.004278367996215821, 0.004348383903503418, 0.004352000236511231, 0.004371520042419434, 0.004433856010437011, 0.004418208122253418, 0.004358496189117432, 0.004368383884429932, 0.004342944145202637, 0.004303552150726318, 0.004456672191619873, 0.004357471942901612, 0.00433190393447876, 0.004288735866546631, 0.004268032073974609, 0.0043089919090271, 0.004298751831054688, 0.004257023811340332, 0.004211455821990967, 0.004241024017333984, 0.004219264030456543, 0.0042211518287658695, 0.004419360160827637, 0.004287551879882812, 0.004288832187652588, 0.0042666239738464356, 0.004282368183135986, 0.004298848152160644, 0.004243360042572021, 0.004357535839080811, 0.004286943912506103, 0.004305024147033692, 0.004335296154022217, 0.004452672004699707, 0.004284416198730469, 0.0043069438934326175, 0.004280320167541504, 0.004415487766265869, 0.004220831871032715, 0.004415584087371826, 0.004404928207397461, 0.004359615802764893, 0.004303743839263916, 0.004281983852386474, 0.004254079818725586, 0.004332575798034668, 0.0042731199264526365, 0.004247200012207031, 0.004254047870635986, 0.004224544048309326, 0.004239583969116211, 0.004294655799865723, 0.0042575039863586425, 0.004282815933227539, 0.004354047775268555, 0.004241504192352295, 0.004208640098571777, 0.0044002881050109865, 0.004295519828796387, 0.004386816024780273, 0.00435814380645752, 0.004374303817749023, 0.0043788480758666995, 0.004612031936645508, 0.004353888034820556, 0.004317408084869385, 0.0042512001991271975, 0.004241856098175048, 0.004294655799865723, 0.0043207998275756835, 0.00429260778427124, 0.004282847881317138, 0.004231167793273926, 0.004224224090576172, 0.004356927871704101, 0.0044338879585266115, 0.004382719993591308, 0.004441376209259033, 0.004285151958465576, 0.004282368183135986, 0.004290559768676758, 0.004314879894256592, 0.004272384166717529, 0.004229119777679443, 0.004206592082977295, 0.0042081279754638675, 0.004176256179809571, 0.004196479797363282, 0.004206592082977295, 0.004182015895843506, 0.004242911815643311, 0.004151199817657471, 0.004123263835906983, 0.004170783996582031, 0.004207583904266357, 0.004220928192138672, 0.004190207958221436, 0.00415667200088501, 0.004175807952880859, 0.0042668161392211915, 0.004263936042785644, 0.0043645758628845215, 0.004505760192871093, 0.00445363187789917, 0.0044941120147705075, 0.004421631813049317, 0.004345856189727783, 0.00427180814743042, 0.004299071788787842, 0.00430406379699707, 0.004260928153991699, 0.004259583950042725, 0.004233215808868408, 0.004235263824462891, 0.004376575946807861, 0.0045281281471252445, 0.004495520114898682, 0.00436025619506836, 0.004337440013885498, 0.00430079984664917, 0.00429260778427124, 0.0045088639259338375, 0.004315968036651611, 0.004235263824462891, 0.004244927883148193, 0.0042583680152893065, 0.004282368183135986, 0.0042782721519470214, 0.004276224136352539, 0.004218880176544189, 0.004203807830810547, 0.004207615852355957, 0.004208352088928223, 0.004429215908050537, 0.00433622407913208, 0.004326784133911133, 0.004238048076629639, 0.004282336235046386, 0.00430511999130249, 0.004368095874786377, 0.0044234881401062015, 0.00438483190536499, 0.004407423973083496, 0.004435391902923584, 0.004421887874603271, 0.0043953280448913576, 0.004398208141326904, 0.004391776084899902, 0.004440095901489258, 0.00438588809967041, 0.004367584228515625, 0.004300127983093262, 0.004278624057769775, 0.004283648014068603, 0.004290976047515869, 0.004286752223968506, 0.004257855892181396, 0.004245503902435303, 0.004230495929718018, 0.004273087978363037, 0.004241024017333984, 0.004263328075408935, 0.004507775783538818, 0.004328000068664551, 0.004269599914550781, 0.004481472015380859, 0.004398848056793213, 0.004297279834747315, 0.004267744064331055, 0.004288512229919434, 0.004288512229919434, 0.004249663829803467, 0.004306879997253418, 0.004271327972412109, 0.0041943359375, 0.0042360639572143555, 0.004263904094696045, 0.004214528083801269, 0.004217088222503662, 0.004218880176544189, 0.0043069438934326175, 0.004380671977996826, 0.004302847862243653, 0.004292096138000488, 0.00421120023727417, 0.0041840639114379885, 0.004225024223327637, 0.004145023822784424, 0.004198527812957763, 0.004206431865692139, 0.004382880210876465, 0.0043089919090271, 0.0042434558868408205, 0.00445030403137207, 0.004463935852050781, 0.004420447826385498, 0.004380512237548828, 0.004460544109344483, 0.004381887912750244, 0.004395455837249756, 0.004415135860443115, 0.004372767925262451, 0.004354656219482422, 0.004298111915588379, 0.004302752017974853, 0.004274655818939209, 0.0042640318870544435, 0.004220511913299561, 0.004200863838195801, 0.004210527896881104, 0.004210080146789551, 0.004233759880065918, 0.004277535915374756, 0.004271039962768555, 0.004376031875610351, 0.004254240036010742, 0.004316383838653564, 0.004248127937316894, 0.004215007781982422, 0.004204544067382812, 0.0042147841453552244, 0.004227071762084961, 0.0041940159797668455, 0.0042007360458374025, 0.004267615795135498, 0.004260255813598633, 0.004286111831665039, 0.004270080089569092, 0.004425055980682373, 0.004374176025390625, 0.004316160202026367, 0.004272128105163574, 0.004288703918457031, 0.004265791893005371, 0.004315104007720947, 0.004272160053253174, 0.004316671848297119, 0.0043320322036743165, 0.004319231986999511, 0.004296160221099853, 0.004338208198547363, 0.004286464214324951, 0.004284416198730469, 0.004257791996002197, 0.00454150390625, 0.00427452802658081, 0.004382656097412109, 0.004530848026275635, 0.00446668815612793, 0.0043745279312133786, 0.004417535781860352, 0.0043024001121521, 0.004350399971008301, 0.004327424049377441, 0.00424563217163086, 0.004296576023101807, 0.00420579195022583, 0.004283167839050293, 0.004267871856689453, 0.00432758378982544, 0.0043376641273498535, 0.00430079984664917, 0.004270080089569092, 0.004254720211029053, 0.004279295921325684, 0.004320864200592041, 0.004596000194549561, 0.004356224060058594, 0.004313087940216064, 0.004325056076049804, 0.004308735847473145, 0.004377151966094971, 0.004378880023956299, 0.004308735847473145, 0.004304768085479737, 0.004374495983123779, 0.004267871856689453, 0.004296895980834961, 0.004366687774658203, 0.004411200046539307, 0.004368063926696778, 0.004331264019012451, 0.004352511882781983, 0.004523263931274414, 0.004498208045959472, 0.004490399837493896, 0.004428639888763428, 0.0044646081924438474, 0.0044440321922302245, 0.00441974401473999, 0.004290559768676758, 0.004517888069152832, 0.00445417594909668, 0.004497632026672364, 0.004507487773895264, 0.004411200046539307, 0.00444217586517334, 0.0046720638275146486, 0.0044152002334594724, 0.0044659838676452636, 0.004438720226287842, 0.004415487766265869, 0.004325376033782959, 0.004288512229919434, 0.004247519969940185, 0.004257120132446289, 0.004344511985778809, 0.004347904205322266, 0.00425164794921875, 0.004263264179229736, 0.004289184093475342, 0.004237311840057373, 0.004230336189270019, 0.004274144172668457, 0.004242080211639404, 0.004286719799041748, 0.004286208152770996, 0.004279776096343994, 0.00428217601776123, 0.004258304119110107, 0.004202112197875977, 0.004223775863647461, 0.004282368183135986, 0.00453004789352417, 0.004397183895111084, 0.0043786239624023435, 0.004296703815460205, 0.004263936042785644, 0.004331647872924805, 0.004425600051879883, 0.004467008113861084, 0.00436191987991333, 0.004352096080780029, 0.004315040111541748, 0.0043089919090271, 0.004353119850158692, 0.004350624084472656, 0.0043043198585510254, 0.004332352161407471, 0.004314752101898194, 0.004311423778533935, 0.004263872146606445, 0.004347968101501465, 0.004388864040374756, 0.004376575946807861, 0.004478975772857666, 0.004382719993591308, 0.004409215927124024, 0.004294303894042969, 0.004302720069885254, 0.004338272094726562, 0.004390912055969238, 0.004298751831054688, 0.004255712032318115, 0.004481247901916504, 0.004386112213134765, 0.004342688083648682, 0.004296703815460205, 0.004304895877838135, 0.0043069438934326175, 0.004310912132263184, 0.004253824234008789, 0.004229184150695801, 0.004314176082611084, 0.0043569917678833005, 0.0043151359558105465, 0.004263872146606445, 0.004400544166564942, 0.004316959857940674, 0.004242015838623047, 0.004243743896484375, 0.004318304061889648, 0.004322207927703857, 0.004236447811126709, 0.004261951923370361, 0.004249663829803467, 0.004291007995605469, 0.004354559898376465, 0.004337440013885498, 0.004331520080566406, 0.0045015039443969725, 0.004343455791473389, 0.004307295799255371, 0.004263936042785644, 0.00429260778427124, 0.004333568096160889, 0.0043207998275756835, 0.004285280227661133, 0.004384384155273437, 0.004308095932006836, 0.004307231903076172, 0.004417183876037597, 0.004405280113220215, 0.004332255840301513, 0.00431657600402832, 0.004332223892211914, 0.004333536148071289, 0.004333695888519287, 0.004362016201019287, 0.004364511966705322, 0.00433462381362915, 0.00431766414642334, 0.0043320322036743165, 0.0043435201644897465, 0.004329760074615479, 0.0042782721519470214, 0.004558847904205322, 0.004368383884429932, 0.0043450560569763185, 0.0043383359909057615, 0.004364384174346924, 0.004383872032165528, 0.004389791965484619, 0.004317279815673828, 0.004403103828430175, 0.004342944145202637, 0.004221568107604981, 0.004395264148712158, 0.004450016021728516, 0.004415520191192627, 0.004390912055969238, 0.004440063953399658, 0.004368383884429932, 0.004314752101898194, 0.004299232006072998, 0.004327328205108643, 0.004338816165924072, 0.0043160319328308105, 0.004259359836578369, 0.0043422398567199705, 0.004325376033782959, 0.00435814380645752, 0.004347648143768311, 0.004318912029266358, 0.004363935947418213, 0.004320159912109375, 0.004331232070922852, 0.0043318080902099605, 0.0045710082054138184, 0.0043721280097961425, 0.004407551765441895, 0.00439958381652832, 0.004386559963226318, 0.004349567890167236, 0.004421440124511719, 0.004364863872528076, 0.0043294401168823245, 0.004345600128173828, 0.00437391996383667, 0.004372352123260498, 0.00435097599029541, 0.004341824054718018, 0.004290207862854004, 0.004265696048736572, 0.004274816036224365, 0.004364223957061768, 0.0043786239624023435, 0.004441567897796631, 0.004468287944793701, 0.004328415870666504, 0.004327424049377441, 0.004341760158538818, 0.004319231986999511, 0.004384768009185791, 0.004408576011657715, 0.004408063888549804, 0.004394144058227539, 0.0044757437705993655, 0.004472832202911377, 0.004341760158538818, 0.004308351993560791, 0.004332159996032715, 0.004300672054290772, 0.00432755184173584, 0.004336832046508789, 0.004387423992156982, 0.004358367919921875, 0.004511744022369385, 0.004441088199615479, 0.004284160137176514, 0.004453728199005127, 0.004371359825134278, 0.004374559879302979, 0.0043663039207458496, 0.004327072143554688, 0.004288864135742187, 0.004327040195465088, 0.00433190393447876, 0.004337632179260254, 0.004327455997467041, 0.004319231986999511, 0.004304351806640625, 0.004317728042602539, 0.004323328018188476, 0.004359903812408447, 0.004360479831695557, 0.004333183765411377, 0.0042846078872680665, 0.0043006081581115725, 0.004355711936950684, 0.004309919834136963, 0.004357120037078857, 0.0044388799667358396, 0.00445363187789917, 0.0043976960182189944, 0.004395135879516602, 0.004393087863922119, 0.004552576065063476, 0.0043376641273498535, 0.004397056102752686, 0.00435814380645752, 0.004306399822235107, 0.004323488235473633, 0.004317696094512939, 0.004544223785400391, 0.004374688148498535, 0.0043574080467224125, 0.004348639965057373, 0.004327040195465088, 0.004350560188293457, 0.004404831886291504, 0.004329472064971924, 0.004278463840484619, 0.004261888027191162, 0.0043110399246215824, 0.0043251199722290036, 0.004335872173309326, 0.004290656089782715, 0.004298655986785888, 0.004284416198730469, 0.004255296230316162, 0.004233183860778808, 0.004194784164428711, 0.004253695964813233, 0.004296703815460205, 0.004253536224365234, 0.004251455783843994, 0.004233439922332763, 0.004230463981628418, 0.004293439865112304, 0.004339712142944336, 0.004348159790039063, 0.004331200122833252, 0.004456480026245117, 0.004385024070739746, 0.004335360050201416, 0.00430953598022461, 0.004335552215576172, 0.004311103820800782, 0.004345856189727783, 0.004370431900024414, 0.004349120140075683, 0.00429747200012207, 0.0043162240982055665, 0.004358208179473877, 0.004346816062927246, 0.004286464214324951, 0.004294655799865723, 0.004345856189727783, 0.004993023872375488, 0.004866047859191895, 0.005238783836364746, 0.00474505615234375, 0.004386975765228272, 0.0043640961647033695, 0.004293856143951416, 0.004364384174346924, 0.00427455997467041, 0.004223487854003906, 0.004347904205322266, 0.004539872169494629, 0.005039936065673828, 0.0043916478157043455, 0.004363264083862305, 0.004250624179840088, 0.004255743980407714, 0.0046059517860412595, 0.004868095874786377, 0.004449952125549317, 0.004568736076354981, 0.004972959995269775, 0.0047927041053771975, 0.005046175956726074, 0.004390912055969238, 0.004390655994415283, 0.004372735977172851, 0.004359263896942139, 0.004405375957489014, 0.004379424095153808, 0.004397056102752686, 0.004400608062744141, 0.004399648189544678, 0.004345536231994629, 0.0043435201644897465, 0.004366079807281494, 0.004395872116088867, 0.004376575946807861, 0.004290559768676758, 0.004550655841827392, 0.0044421119689941405, 0.004551807880401612, 0.00448367977142334, 0.004372767925262451, 0.004324960231781006, 0.0043851838111877445, 0.00454041576385498, 0.004755199909210205, 0.004427008152008057, 0.004420735836029053, 0.0043272957801818845, 0.004356095790863037, 0.004503551959991455, 0.004362080097198486, 0.004282527923583984, 0.004272128105163574, 0.004261888027191162, 0.004314400196075439, 0.004266719818115234, 0.00429257583618164, 0.00425545597076416, 0.0043093118667602535, 0.004288512229919434, 0.004331776142120361, 0.004296448230743408, 0.004700160026550293, 0.004585279941558838, 0.0043328962326049806, 0.004370336055755615, 0.004529088020324707, 0.0043705921173095704, 0.004347743988037109, 0.004259840011596679, 0.004333568096160889, 0.004280320167541504, 0.0043786239624023435, 0.004296224117279052, 0.004313568115234375, 0.0043069438934326175, 0.0042947521209716795, 0.004303967952728272, 0.004242239952087402, 0.004233535766601563, 0.004238336086273193, 0.004233920097351075, 0.004255136013031006, 0.004280608177185058, 0.004224480152130127, 0.004249663829803467, 0.0042310719490051265, 0.004201344013214111, 0.004403200149536133, 0.004263936042785644, 0.004331520080566406, 0.004347904205322266, 0.004286464214324951, 0.004357952117919922, 0.004325215816497803, 0.004333024024963379, 0.004364607810974121, 0.004306496143341065, 0.00433190393447876, 0.004304736137390137, 0.004309760093688965, 0.0043376960754394536, 0.004413440227508545, 0.004295807838439941, 0.0042462081909179685, 0.004225215911865234, 0.004391488075256348, 0.004448256015777588, 0.0043721280097961425, 0.00440496015548706, 0.004358784198760986, 0.004354112148284912, 0.004275296211242676, 0.004255648136138916, 0.0042956161499023435, 0.004259840011596679, 0.004454400062561035, 0.004363743782043457, 0.004321343898773193, 0.004298783779144287, 0.004313216209411621, 0.0044217281341552735, 0.004436192035675049, 0.004339903831481934, 0.004547840118408203, 0.004435904026031494, 0.004470880031585693, 0.004368447780609131, 0.004542943954467774, 0.005497920036315918, 0.004389823913574219, 0.004349279880523682, 0.004329951763153076, 0.00432966423034668, 0.004394752025604248, 0.004372543811798095, 0.004376319885253907, 0.004294303894042969, 0.004298975944519043, 0.004303328037261963, 0.0042510080337524415, 0.004463327884674072, 0.0043786239624023435, 0.004470240116119384, 0.004386911869049073, 0.004432320117950439, 0.004372096061706543, 0.004413824081420898, 0.004332831859588623, 0.004341663837432862, 0.004299935817718506, 0.0042648639678955075, 0.004270080089569092, 0.00426470422744751, 0.0042576961517333985, 0.0042041921615600586, 0.004415584087371826, 0.004384511947631836, 0.004254303932189942, 0.004268032073974609, 0.0042782721519470214, 0.00447488021850586, 0.004363296031951905, 0.004402143955230713, 0.0044011521339416505, 0.0043615040779113766, 0.004350207805633545, 0.0044316802024841305, 0.004467360019683838, 0.0043745279312133786, 0.004562719821929932, 0.00440342378616333, 0.004446208000183105, 0.004339712142944336, 0.004339583873748779, 0.004348031997680664, 0.004540512084960938, 0.004458655834197998, 0.004495071887969971, 0.004425695896148682, 0.004386528015136719, 0.004303199768066406, 0.004421631813049317, 0.004354047775268555, 0.004368319988250733, 0.004349408149719239, 0.0043628478050231935, 0.004361279964447021, 0.004325376033782959, 0.004303808212280273, 0.004253503799438477, 0.00431328010559082, 0.004331520080566406, 0.0042741761207580565, 0.004288512229919434, 0.004317247867584228, 0.004280255794525146, 0.0043373122215271, 0.004338016033172607, 0.004376575946807861, 0.005183680057525635, 0.005041984081268311, 0.00445417594909668, 0.0044198079109191895, 0.004415487766265869, 0.0043110399246215824, 0.004326464176177979, 0.00435916805267334, 0.004342976093292236, 0.004382944107055664, 0.004547103881835938, 0.004407360076904297, 0.004411327838897705, 0.0043089919090271, 0.004283391952514649, 0.00425267219543457, 0.00429036808013916, 0.004237504005432129, 0.004248960018157959, 0.004235648155212403, 0.004339807987213134, 0.00427023983001709, 0.004235263824462891, 0.00421884822845459, 0.004336863994598389, 0.0042503361701965335, 0.0042614398002624515, 0.004337759971618652, 0.004239808082580567, 0.004472832202911377, 0.004294655799865723, 0.004303936004638672, 0.004208640098571777, 0.004323328018188476, 0.004336703777313233, 0.004301663875579834, 0.004271679878234863, 0.004271935939788818, 0.004331840038299561, 0.004333631992340088, 0.004276576042175293, 0.004267551898956299, 0.004293087959289551, 0.004316800117492676, 0.00437286376953125, 0.004339712142944336, 0.0043089919090271, 0.00436633586883545, 0.004546559810638427, 0.0044132800102233884, 0.004338880062103271, 0.0043366079330444335, 0.004355679988861084, 0.004356671810150146, 0.004384607791900635, 0.00442303991317749, 0.004368768215179443, 0.00452780818939209, 0.004411935806274414, 0.004454432010650634, 0.004327424049377441, 0.004341248035430908, 0.004329984188079834, 0.004356095790863037, 0.004314432144165039, 0.004501952171325684, 0.004479231834411621, 0.004304671764373779, 0.004284063816070557, 0.004534848213195801, 0.004357696056365967, 0.004593760013580322, 0.004423840045928955, 0.004394911766052246, 0.004438047885894776, 0.004429696083068847, 0.004350336074829102, 0.004314847946166992, 0.004273952007293701, 0.004483583927154541, 0.004319231986999511, 0.004553823947906494, 0.004445439815521241, 0.0044559998512268065, 0.004341856002807617, 0.004317215919494629, 0.004338912010192871, 0.004374303817749023, 0.004342751979827881, 0.004335231781005859, 0.005366144180297852, 0.004562943935394287, 0.004496575832366944, 0.004369215965270996, 0.004331520080566406, 0.004266496181488037, 0.004407040119171143, 0.004352000236511231, 0.004363840103149414, 0.004323520183563232, 0.004336927890777588, 0.004334432125091553, 0.004344031810760498, 0.00432528018951416, 0.004301152229309082, 0.004318880081176758, 0.0043151359558105465, 0.004321280002593994, 0.004298751831054688, 0.004263936042785644, 0.004339712142944336, 0.004249440193176269, 0.0042436480522155765, 0.004295936107635498, 0.0044408001899719236, 0.004370016098022461, 0.0043373122215271, 0.004286464214324951, 0.004250368118286133, 0.004257791996002197, 0.004368383884429932, 0.004286303997039795, 0.004321280002593994, 0.0042245759963989256, 0.0042156481742858885, 0.004235136032104492, 0.004243328094482422, 0.00427782392501831, 0.004211135864257812, 0.004196352005004883, 0.004343808174133301, 0.004414944171905517, 0.0044774718284606935, 0.004670559883117676, 0.00428656005859375, 0.004248383998870849, 0.0043110399246215824, 0.004353151798248291, 0.004340191841125488, 0.004323520183563232, 0.004311071872711182, 0.004272319793701172, 0.004272128105163574, 0.0043069438934326175, 0.00428988790512085, 0.004280191898345947, 0.004305376052856445, 0.004319551944732666, 0.004388351917266845, 0.004364799976348877, 0.004389120101928711, 0.0043060479164123535, 0.004356736183166504, 0.004354047775268555, 0.004849343776702881, 0.004816415786743164, 0.004382656097412109, 0.004619135856628418, 0.0044924159049987795, 0.005227392196655274, 0.005246975898742676, 0.004525824069976807, 0.004439743995666504, 0.004383296012878418, 0.004359295845031738, 0.0044551358222961425, 0.004371840000152588, 0.004304800033569336, 0.004369279861450195, 0.004352191925048828, 0.004335423946380615, 0.004381696224212647, 0.004383743762969971, 0.004403200149536133, 0.004356095790863037, 0.004325376033782959, 0.004308191776275635, 0.00431609582901001, 0.00432316780090332, 0.004306464195251465, 0.004354527950286865, 0.0043639039993286135, 0.004363647937774658, 0.004322303771972656, 0.004984831809997559, 0.004399104118347168, 0.004360191822052002, 0.004339712142944336, 0.004351071834564209, 0.004612127780914307, 0.004411295890808105, 0.004428768157958984, 0.004388864040374756, 0.004395008087158203, 0.004361567974090576, 0.004344319820404053, 0.0044498882293701175, 0.0044548802375793456, 0.004349472045898438, 0.004342336177825928, 0.004280320167541504, 0.004282368183135986, 0.004296703815460205, 0.004347904205322266, 0.004355967998504639, 0.004391039848327637, 0.004327424049377441, 0.004322656154632568, 0.004366687774658203, 0.004353631973266601, 0.004299615859985352, 0.0042573437690734866, 0.004268127918243408, 0.004245728015899658, 0.0042434558868408205, 0.00432534408569336, 0.004313119888305664, 0.0043069438934326175, 0.004494751930236816, 0.004317791938781739, 0.004419583797454834, 0.004243328094482422, 0.004485856056213379, 0.005535999774932862, 0.0044124479293823245, 0.004338655948638916, 0.004325376033782959, 0.0043803520202636715, 0.004397215843200683, 0.0043647680282592775, 0.004378304004669189, 0.004351808071136475, 0.004325568199157715, 0.004352000236511231, 0.0043760638236999515, 0.004335296154022217, 0.004344639778137207, 0.004337247848510742, 0.004395264148712158, 0.0042947521209716795, 0.004296576023101807, 0.004325568199157715, 0.004281504154205322, 0.004257791996002197, 0.004209504127502441, 0.00420688009262085, 0.004249311923980713, 0.004485119819641113, 0.004330495834350586, 0.004199423789978027, 0.004263423919677735, 0.004297215938568116, 0.004265984058380127, 0.004294623851776123, 0.004300831794738769, 0.004253632068634034, 0.004244607925415039, 0.00421779203414917, 0.004280320167541504, 0.0042740478515625, 0.004306528091430664, 0.0042928318977355955, 0.004386752128601074, 0.004286848068237305, 0.004286079883575439, 0.0042417278289794925, 0.004315199851989746, 0.004322559833526612, 0.004325247764587402, 0.0042494721412658695, 0.00429363203048706, 0.00443775987625122, 0.004268288135528564, 0.004305215835571289, 0.004353087902069092, 0.004344319820404053, 0.0043541440963745116, 0.004303967952728272, 0.0043376960754394536, 0.0043112320899963376, 0.004248032093048096, 0.004208896160125732, 0.004228096008300781, 0.004221951961517334, 0.0041662721633911135, 0.004394688129425049, 0.004239552021026612, 0.004270080089569092, 0.004247744083404541, 0.004253727912902832, 0.004282144069671631, 0.004431168079376221, 0.004268735885620117, 0.00425980806350708, 0.00424121618270874, 0.0041924800872802735, 0.0042143998146057125, 0.004218400001525879, 0.0042115521430969236, 0.004198592185974121, 0.004294464111328125, 0.00425164794921875, 0.004259840011596679, 0.004229119777679443, 0.004483071804046631, 0.004427616119384766, 0.0044074559211730955, 0.004333568096160889, 0.004265984058380127, 0.004261888027191162, 0.0042800002098083495, 0.00430463981628418, 0.004309599876403809, 0.004288671970367431, 0.004267360210418701, 0.0044026880264282225, 0.004297696113586426, 0.0042631678581237795, 0.004457215785980225, 0.00441315221786499, 0.0044520959854125975, 0.004483871936798096, 0.004556320190429688, 0.004634848117828369, 0.0044520959854125975, 0.004483327865600586, 0.004446208000183105, 0.004413440227508545, 0.0043821439743042, 0.0043853440284729005, 0.0043786239624023435, 0.004403232097625732, 0.004372543811798095, 0.004406911849975586, 0.00439961576461792, 0.004472608089447021, 0.004447487831115723, 0.0045493760108947755, 0.004419583797454834, 0.004481023788452149, 0.004557119846343994, 0.004566175937652588, 0.004452352046966553, 0.004495903968811035, 0.004423232078552246, 0.004388864040374756, 0.0043647360801696775, 0.004316703796386719, 0.004583583831787109, 0.004407616138458252, 0.0043745279312133786, 0.00440012788772583, 0.004522719860076904, 0.004380959987640381, 0.004404831886291504, 0.004433375835418701, 0.004450975894927978, 0.00450767993927002, 0.004339968204498291, 0.004283967971801758, 0.0042759041786193846, 0.004272895812988281, 0.004317024230957031, 0.004274335861206055, 0.004317183971405029, 0.004398335933685302, 0.004364352226257324, 0.004307648181915283, 0.004356095790863037, 0.004603456020355225, 0.004686367988586426, 0.0045583038330078126, 0.004434368133544922, 0.004374368190765381, 0.004419199943542481, 0.004396959781646729, 0.004366975784301758, 0.004269983768463135, 0.004259935855865479, 0.004316671848297119, 0.004418047904968261, 0.00464896011352539, 0.004419583797454834, 0.004409023761749268, 0.004369887828826904, 0.004430016040802002, 0.004403872013092041, 0.00437440013885498, 0.0043582720756530765, 0.004362239837646485, 0.004405248165130615, 0.004363967895507813, 0.0043496317863464355, 0.004348544120788575, 0.004290559768676758, 0.004254015922546386, 0.004334496021270752, 0.00431167984008789, 0.0042882561683654785, 0.00426639986038208, 0.004316287994384766, 0.004333568096160889, 0.004336383819580078, 0.0042919678688049315, 0.004240384101867676, 0.004269824028015136, 0.004447231769561768, 0.004337920188903809, 0.004293280124664307, 0.00424560022354126]",tokens/s,229.77929338800072,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,808.411136,673.05472,0.0,270.532608,250.474496,s,1,9.21711328125,9.21711328125,0.0,9.21711328125,9.21711328125,9.21711328125,9.21711328125,[9.21711328125],,kWh,2.4853827249974833e-06,2.646889431531596e-07,9.225007379966288e-07,3.6725724061472718e-06,,MB,1312.763904,687.734784,0.0,272.62976,241.723904,s,28,0.21758457660675048,0.007770877735955375,0.0001492135713483806,0.007737183809280396,0.007974217700958252,0.008084897541999817,0.008137071199417114,"[0.007815584182739258, 0.007799071788787842, 0.007690783977508545, 0.00753059196472168, 0.007678207874298096, 0.007560128211975097, 0.007662144184112549, 0.007572768211364746, 0.007766848087310791, 0.007656032085418701, 0.007802271842956543, 0.007650400161743164, 0.007702847957611084, 0.007656767845153809, 0.007967584133148194, 0.007683807849884033, 0.007749695777893067, 0.0077246718406677245, 0.007858240127563476, 0.008137408256530762, 0.007767263889312744, 0.007723584175109864, 0.007989696025848389, 0.007718656063079834, 0.00783616018295288, 0.007915711879730225, 0.007831488132476807, 0.008136159896850586]",tokens/s,32943.51149233808,kWh,2.232900434774226e-07,2.4624810481177162e-08,1.3735452627390865e-07,3.852693802325084e-07,tokens/kWh,664470142.5415773,MB,1359.396864,700.317696,0.0,285.212672,241.726464,s,28,10.127438049316405,0.3616942160470144,0.008256971029795756,0.3600748291015625,0.37261807556152343,0.37342464904785155,0.3785968655395508,"[0.3676387939453125, 0.35597369384765626, 0.35633831787109377, 0.35540509033203127, 0.35322161865234375, 0.3503753662109375, 0.34918841552734375, 0.3538110046386719, 0.3496291809082031, 0.35676025390625, 0.35971484375, 0.35413973999023435, 0.3562896118164062, 0.360434814453125, 0.3583786926269531, 0.3672514038085937, 0.3694538269042969, 0.36279586791992186, 0.36673013305664065, 0.35510922241210935, 0.368740966796875, 0.37305081176757815, 0.36092425537109374, 0.3690310363769531, 0.3705570983886719, 0.3804354248046875, 0.3736259460449219, 0.3724326171875]",tokens/s,174.18028048259146,kWh,1.0251765661730653e-05,1.1305826473219277e-06,4.24205373915454e-06,1.562440204820712e-05,tokens/kWh,4032154.306169379,,s,1764,10.111822904109983,0.005732325909359384,0.00020658308166618465,0.0057102878093719485,0.00594552321434021,0.006026956915855407,0.0063823145580291715,"[0.005417119979858398, 0.005708288192749023, 0.005718336105346679, 0.005760191917419433, 0.005653056144714355, 0.005656064033508301, 0.005755392074584961, 0.005750783920288086, 0.005710015773773194, 0.005697343826293945, 0.0056763520240783695, 0.005716671943664551, 0.00587116813659668, 0.005892543792724609, 0.006025216102600098, 0.0059391999244689945, 0.005895743846893311, 0.005953983783721924, 0.00582041597366333, 0.005752064228057861, 0.005837567806243896, 0.005715199947357178, 0.005776127815246582, 0.00566476821899414, 0.00591871976852417, 0.005713920116424561, 0.0058379840850830075, 0.005817183971405029, 0.007306591987609863, 0.005962687969207764, 0.005774176120758057, 0.0058271679878234865, 0.005867807865142822, 0.005834752082824707, 0.005799935817718506, 0.005703680038452149, 0.005687295913696289, 0.005716063976287842, 0.005719967842102051, 0.005804031848907471, 0.0057379198074340824, 0.005820511817932129, 0.005779935836791992, 0.0063628478050231935, 0.005941023826599121, 0.005894656181335449, 0.006176767826080322, 0.0058388481140136715, 0.005977344036102295, 0.005792416095733642, 0.005815775871276856, 0.005863935947418213, 0.005820223808288574, 0.0056548800468444824, 0.00572979211807251, 0.005697152137756348, 0.005786464214324951, 0.005730303764343261, 0.005730303764343261, 0.005627903938293457, 0.005738495826721191, 0.005908480167388916, 0.005969056129455566, 0.0053628158569335935, 0.005683072090148926, 0.005723648071289063, 0.005700064182281494, 0.005697343826293945, 0.005652607917785645, 0.0056560959815979, 0.005775936126708984, 0.005690591812133789, 0.005769824028015137, 0.00572764778137207, 0.005742784023284912, 0.005677760124206543, 0.005697440147399902, 0.0057704000473022465, 0.005696224212646484, 0.005732511997222901, 0.005748703956604004, 0.005673247814178467, 0.005631103992462159, 0.005583136081695556, 0.005636767864227295, 0.00553334379196167, 0.005561408042907715, 0.005510079860687256, 0.00556441593170166, 0.005571807861328125, 0.005628352165222168, 0.0055008001327514645, 0.0055730881690979, 0.005474112033843994, 0.005585184097290039, 0.005581024169921875, 0.0055938239097595216, 0.005593247890472412, 0.0056003198623657225, 0.005598688125610351, 0.005572991847991943, 0.005511072158813477, 0.005576704025268555, 0.005720320224761963, 0.0057153282165527345, 0.00564195203781128, 0.005654848098754882, 0.0056548800468444824, 0.005643743991851807, 0.005612063884735108, 0.005591040134429932, 0.00556441593170166, 0.005694623947143555, 0.005685344219207764, 0.00576416015625, 0.005697184085845948, 0.005740575790405274, 0.005668863773345947, 0.005662720203399658, 0.0056442880630493165, 0.005678783893585205, 0.005720384120941162, 0.005660672187805176, 0.005623775959014893, 0.005615615844726562, 0.005617695808410645, 0.0054332160949707034, 0.0057627840042114254, 0.005875616073608398, 0.0056995201110839845, 0.0057391037940979, 0.005694816112518311, 0.005689248085021973, 0.005966239929199218, 0.005648799896240234, 0.005642240047454834, 0.0056053118705749515, 0.005716127872467041, 0.0056564159393310545, 0.005593088150024414, 0.0056258559226989744, 0.005548031806945801, 0.005588992118835449, 0.0055214080810546875, 0.005543871879577637, 0.0055931520462036135, 0.005545216083526611, 0.005670815944671631, 0.0055095357894897464, 0.005624224185943603, 0.005808159828186035, 0.005519360065460205, 0.005640192031860352, 0.005670911788940429, 0.005646143913269043, 0.005703199863433838, 0.005885759830474854, 0.005985119819641113, 0.005754464149475098, 0.005674848079681397, 0.0056365118026733396, 0.006010240077972412, 0.005576608180999756, 0.005532544136047363, 0.005674015998840332, 0.005585887908935547, 0.005699584007263184, 0.005627039909362793, 0.005559167861938477, 0.005544159889221191, 0.005567552089691162, 0.005446335792541504, 0.005494783878326416, 0.005446815967559814, 0.005561247825622558, 0.0063162240982055666, 0.005631840229034424, 0.005646240234375, 0.005552351951599121, 0.005584671974182129, 0.0055615358352661135, 0.005618624210357666, 0.00560319995880127, 0.005515327930450439, 0.005569536209106446, 0.005534848213195801, 0.005726016044616699, 0.005611487865447998, 0.005580832004547119, 0.005387807846069336, 0.005589119911193848, 0.005597184181213379, 0.005601280212402344, 0.0055623679161071774, 0.005623551845550537, 0.005543583869934082, 0.005551743984222412, 0.005536736011505127, 0.005574656009674072, 0.0056483840942382815, 0.005541888236999512, 0.005512735843658447, 0.0056315197944641114, 0.005563327789306641, 0.005591040134429932, 0.005524735927581787, 0.005505824089050293, 0.005562335968017578, 0.005525152206420899, 0.0055277118682861325, 0.005523327827453613, 0.0054674878120422364, 0.005483039855957031, 0.00640064001083374, 0.006931488037109375, 0.006371551990509033, 0.005603328227996827, 0.0055361919403076176, 0.005574783802032471, 0.005566463947296142, 0.005702655792236328, 0.005564608097076416, 0.005506080150604248, 0.005563327789306641, 0.005509312152862549, 0.005531487941741943, 0.005518144130706787, 0.005564576148986817, 0.005646240234375, 0.005597119808197022, 0.005634335994720459, 0.005612383842468262, 0.005550975799560547, 0.005646336078643799, 0.005781504154205322, 0.005795839786529541, 0.005648096084594726, 0.005612095832824707, 0.005639904022216797, 0.005600575923919678, 0.00573305606842041, 0.005678304195404053, 0.005632959842681885, 0.005635935783386231, 0.005560128211975097, 0.005683040142059326, 0.005572959899902343, 0.005668863773345947, 0.005631775856018067, 0.005580863952636719, 0.005578783988952637, 0.005546112060546875, 0.005210912227630616, 0.005552224159240723, 0.0055140161514282224, 0.005586944103240967, 0.005552127838134766, 0.005505023956298828, 0.005598239898681641, 0.0055342397689819335, 0.005575104236602783, 0.005529600143432617, 0.005503007888793945, 0.00562172794342041, 0.005478400230407715, 0.005527616024017334, 0.005525440216064453, 0.00546611213684082, 0.005605375766754151, 0.005650335788726806, 0.005553631782531738, 0.005644512176513672, 0.005552864074707032, 0.00562553596496582, 0.0056685757637023925, 0.005588799953460693, 0.00563804817199707, 0.005582719802856445, 0.005636832237243652, 0.005623072147369385, 0.0056245441436767575, 0.005634175777435303, 0.0055847358703613285, 0.005684703826904297, 0.005702271938323975, 0.005724063873291016, 0.00556547212600708, 0.00559990406036377, 0.005635968208312988, 0.0055382399559021, 0.00556387186050415, 0.005532192230224609, 0.005615615844726562, 0.005647488117218017, 0.005593567848205566, 0.005624032020568848, 0.005726528167724609, 0.005740416049957275, 0.005693215847015381, 0.005697760105133056, 0.005629824161529541, 0.005580927848815918, 0.005590943813323974, 0.00564847993850708, 0.00563644790649414, 0.005660255908966064, 0.0056219840049743655, 0.005631872177124023, 0.0055807681083679195, 0.005690656185150147, 0.005673696041107177, 0.005639423847198486, 0.0056655359268188476, 0.0055623679161071774, 0.005528672218322754, 0.0052204160690307614, 0.005486368179321289, 0.005465343952178955, 0.00549187183380127, 0.005471072196960449, 0.005708672046661377, 0.0055071678161621096, 0.005453440189361572, 0.005492928028106689, 0.005438784122467041, 0.0054169921875, 0.005475488185882569, 0.005533376216888428, 0.005573823928833008, 0.005483007907867431, 0.005559840202331543, 0.005529920101165772, 0.005459680080413819, 0.0054438719749450686, 0.005577280044555664, 0.0054841599464416505, 0.005476672172546387, 0.005459199905395508, 0.005464799880981445, 0.0055493440628051754, 0.00556924819946289, 0.005509119987487793, 0.005595136165618897, 0.005476352214813233, 0.005543295860290527, 0.005563007831573486, 0.0055316481590271, 0.005567647933959961, 0.005678175926208496, 0.00603872013092041, 0.005711552143096924, 0.005782591819763184, 0.005807936191558838, 0.00571395206451416, 0.005711840152740478, 0.005570591926574707, 0.0056770238876342775, 0.005597184181213379, 0.00555622386932373, 0.00554531192779541, 0.005520031929016113, 0.005603328227996827, 0.005582560062408448, 0.0056239042282104496, 0.005658815860748291, 0.005563551902770996, 0.005571296215057373, 0.0055177597999572755, 0.005498176097869873, 0.005531167984008789, 0.005530623912811279, 0.005535583972930908, 0.005486591815948487, 0.005511136054992676, 0.005578239917755127, 0.005531295776367188, 0.005527840137481689, 0.005530303955078125, 0.0052061758041381835, 0.0055294399261474605, 0.005484320163726806, 0.0054501757621765136, 0.005487648010253907, 0.0054720320701599125, 0.005515391826629639, 0.005567679882049561, 0.005483935832977295, 0.005520703792572021, 0.005523744106292724, 0.005482143878936767, 0.005602079868316651, 0.005654751777648926, 0.00564569616317749, 0.005573503971099854, 0.005515007972717285, 0.005555808067321777, 0.006273183822631836, 0.0055352959632873535, 0.00545414400100708, 0.005485151767730713, 0.005467360019683838, 0.005463903903961182, 0.005452640056610107, 0.005488512039184571, 0.005573952198028564, 0.005508031845092774, 0.005460031986236572, 0.0054382081031799315, 0.005488992214202881, 0.005497439861297608, 0.005493824005126953, 0.005467072010040284, 0.005429408073425293, 0.005455615997314453, 0.005545407772064209, 0.005444255828857422, 0.005469920158386231, 0.005447296142578125, 0.005449632167816162, 0.0054709758758544925, 0.005437439918518067, 0.005514560222625732, 0.005564479827880859, 0.005540480136871338, 0.005585919857025146, 0.005520576000213623, 0.005517119884490967, 0.005717055797576904, 0.0055797119140625, 0.005590623855590821, 0.005892032146453857, 0.005560832023620605, 0.0056135358810424805, 0.005552127838134766, 0.0055881600379943846, 0.005573440074920655, 0.005625472068786621, 0.005597311973571778, 0.0055294399261474605, 0.0055689277648925785, 0.005485983848571777, 0.005167679786682129, 0.00590880012512207, 0.005552159786224365, 0.005509151935577393, 0.00546608018875122, 0.00548857593536377, 0.005520703792572021, 0.005574944019317627, 0.005528031826019287, 0.005555359840393066, 0.0055222721099853515, 0.0055316481590271, 0.005450079917907715, 0.0054590082168579105, 0.005480991840362549, 0.005666463851928711, 0.00576688003540039, 0.005653183937072754, 0.005622015953063965, 0.0057825279235839844, 0.005594143867492676, 0.0056273918151855465, 0.005622015953063965, 0.005564191818237305, 0.0055316481590271, 0.005574912071228027, 0.0056769919395446775, 0.005537951946258545, 0.00556441593170166, 0.005517151832580567, 0.00547430419921875, 0.005484543800354004, 0.005484543800354004, 0.005476352214813233, 0.005637119770050049, 0.005598239898681641, 0.005578720092773438, 0.005521056175231934, 0.005566559791564942, 0.0055175042152404786, 0.00548633623123169, 0.005496384143829346, 0.005571328163146973, 0.005915679931640625, 0.0055874881744384765, 0.0055177597999572755, 0.005961760044097901, 0.005742720127105713, 0.006020959854125977, 0.005695487976074219, 0.005629951953887939, 0.005646336078643799, 0.005664415836334228, 0.00568339204788208, 0.005650239944458008, 0.005605599880218506, 0.005675136089324951, 0.005910304069519043, 0.005703872203826905, 0.005617695808410645, 0.00561897611618042, 0.005648928165435791, 0.005800127983093262, 0.005464223861694336, 0.005706111907958985, 0.005662432193756103, 0.005576863765716553, 0.005619711875915527, 0.005604864120483399, 0.005622047901153565, 0.0056562237739562985, 0.005599967956542969, 0.005623360157012939, 0.0055584640502929685, 0.005498976230621338, 0.005536767959594726, 0.005470816135406494, 0.00549724817276001, 0.005566463947296142, 0.005502975940704345, 0.005522463798522949, 0.005485119819641113, 0.005470719814300537, 0.005795743942260742, 0.005549503803253174, 0.00550275182723999, 0.005524256229400635, 0.005479743957519531, 0.005522111892700196, 0.00547430419921875, 0.005505023956298828, 0.005665855884552002, 0.005585855960845947, 0.005586944103240967, 0.005524576187133789, 0.005456064224243164, 0.0054975681304931644, 0.0054222078323364255, 0.005436416149139404, 0.005640063762664795, 0.005488639831542969, 0.005484543800354004, 0.005550079822540284, 0.005488128185272217, 0.005494719982147217, 0.00549126386642456, 0.005509119987487793, 0.005508831977844238, 0.005439648151397705, 0.005511295795440674, 0.00549235200881958, 0.0054971518516540524, 0.005539904117584229, 0.005566463947296142, 0.005578752040863037, 0.0055808000564575196, 0.005551167964935303, 0.00561023998260498, 0.005586400032043457, 0.005546144008636475, 0.005544511795043945, 0.005492735862731934, 0.005567808151245118, 0.005505727767944336, 0.00547219181060791, 0.005580863952636719, 0.005221759796142578, 0.0055445761680603025, 0.005529407978057861, 0.005553567886352539, 0.005536543846130371, 0.005477983951568604, 0.00546284818649292, 0.005588575839996338, 0.005484543800354004, 0.00551632022857666, 0.005479040145874023, 0.005474559783935547, 0.005556320190429688, 0.005502272129058838, 0.005520063877105713, 0.005552192211151123, 0.005492159843444824, 0.005521920204162598, 0.005543935775756836, 0.005609280109405518, 0.0056934719085693355, 0.0057259202003479, 0.005757472038269043, 0.005659552097320557, 0.005642271995544433, 0.005624735832214355, 0.00555014419555664, 0.005611519813537597, 0.005765120029449463, 0.005774464130401611, 0.00571673583984375, 0.00572380781173706, 0.005736927986145019, 0.005805247783660888, 0.0057292799949646, 0.005760928153991699, 0.005801568031311035, 0.005632319927215576, 0.005586944103240967, 0.005640192031860352, 0.0055808000564575196, 0.0056555838584899905, 0.0055593280792236325, 0.005603295803070068, 0.005664735794067383, 0.0056852798461914066, 0.005847008228302002, 0.005785183906555176, 0.005767839908599854, 0.005670656204223632, 0.0056910080909729, 0.005650911808013916, 0.005670815944671631, 0.005839039802551269, 0.005700960159301758, 0.0057656002044677735, 0.005693439960479736, 0.006318079948425293, 0.005736447811126709, 0.005945600032806396, 0.005795584201812744, 0.005688767910003662, 0.005775936126708984, 0.0054249920845031736, 0.005776639938354492, 0.005703584194183349, 0.005743231773376465, 0.00566703987121582, 0.005600768089294434, 0.005644991874694825, 0.0056349759101867675, 0.005669151782989502, 0.00568998384475708, 0.005613664150238037, 0.005623712062835694, 0.005574912071228027, 0.005619455814361572, 0.005560287952423096, 0.005629983901977539, 0.0055808000564575196, 0.005590144157409668, 0.005612736225128174, 0.00558409595489502, 0.005695775985717773, 0.00567519998550415, 0.0056483840942382815, 0.005740543842315674, 0.005650207996368408, 0.005688767910003662, 0.00565123176574707, 0.005762879848480225, 0.005961919784545898, 0.005715968132019043, 0.005761151790618897, 0.005750559806823731, 0.005712160110473633, 0.005725056171417236, 0.00577132797241211, 0.005773888111114502, 0.0057429118156433106, 0.005764895915985107, 0.005783775806427002, 0.005705728054046631, 0.005746016025543213, 0.005876319885253906, 0.005647456169128418, 0.00568828821182251, 0.005656576156616211, 0.00564851188659668, 0.005742464065551758, 0.0057274880409240725, 0.005773888111114502, 0.005691679954528808, 0.0057588801383972165, 0.005698592185974121, 0.00568828821182251, 0.00575212812423706, 0.005814496040344238, 0.005859807968139648, 0.0058197760581970214, 0.0058475837707519535, 0.005771135807037354, 0.0057571520805358885, 0.005683616161346435, 0.005631584167480469, 0.005673120021820069, 0.005327648162841797, 0.005668511867523193, 0.005621856212615967, 0.005684447765350342, 0.005882336139678955, 0.0056648321151733395, 0.005720320224761963, 0.005984096050262451, 0.005736063957214355, 0.005621344089508057, 0.005585631847381592, 0.005595359802246094, 0.00557366418838501, 0.005608511924743652, 0.005605152130126953, 0.005611648082733154, 0.005705344200134277, 0.005565120220184326, 0.00566044807434082, 0.0056011838912963866, 0.0056863040924072265, 0.0058213438987731935, 0.00562172794342041, 0.005589087963104248, 0.0055924801826477055, 0.005604000091552735, 0.005576223850250244, 0.005507487773895264, 0.005591040134429932, 0.005568511962890625, 0.005668735980987548, 0.005670752048492432, 0.0055883522033691405, 0.005587615966796875, 0.00555238389968872, 0.0055780158042907715, 0.005561215877532959, 0.00555401611328125, 0.005783552169799804, 0.005586944103240967, 0.005647712230682373, 0.0056080961227416995, 0.005624927997589111, 0.00566374397277832, 0.005633952140808106, 0.005643807888031006, 0.005576288223266602, 0.005569407939910889, 0.005656735897064209, 0.00559497594833374, 0.005597184181213379, 0.005549824237823487, 0.00556876802444458, 0.005566463947296142, 0.005525536060333252, 0.005567552089691162, 0.005505951881408691, 0.005485856056213379, 0.005552864074707032, 0.005519264221191407, 0.005530784130096436, 0.005594399929046631, 0.005527200222015381, 0.00536729621887207, 0.005548543930053711, 0.005571775913238526, 0.00565123176574707, 0.005638112068176269, 0.005636159896850586, 0.005619296073913574, 0.0055931200981140135, 0.005603136062622071, 0.005610047817230224, 0.0056852478981018065, 0.005588992118835449, 0.005681151866912842, 0.005643519878387451, 0.005579520225524903, 0.005582719802856445, 0.005488768100738526, 0.00551478385925293, 0.005513696193695068, 0.005478208065032959, 0.00553388786315918, 0.005507391929626465, 0.005473983764648437, 0.005515200138092041, 0.005478623867034912, 0.005511168003082275, 0.005797088146209717, 0.005681375980377198, 0.005642655849456787, 0.005571775913238526, 0.005784383773803711, 0.005750783920288086, 0.005731455802917481, 0.005833600044250488, 0.00574780797958374, 0.005812511920928955, 0.005714623928070068, 0.005754816055297852, 0.005668863773345947, 0.005679103851318359, 0.0056505918502807615, 0.005601119995117187, 0.005640192031860352, 0.005670368194580078, 0.0056243519783020016, 0.005703968048095703, 0.00574454402923584, 0.005792928218841553, 0.005745408058166504, 0.0057242240905761715, 0.005678592205047607, 0.005693600177764892, 0.005681344032287598, 0.005677248001098633, 0.005680831909179687, 0.005651743888854981, 0.0057086081504821775, 0.005805280208587647, 0.005747519969940186, 0.005687295913696289, 0.0056241598129272465, 0.005723904132843018, 0.005690464019775391, 0.005389215946197509, 0.005666816234588623, 0.005654655933380127, 0.005690303802490235, 0.00566476821899414, 0.005830751895904541, 0.005793791770935058, 0.005819231986999512, 0.005818399906158447, 0.005822783946990967, 0.005734047889709473, 0.005766560077667236, 0.006083168029785156, 0.005620800018310547, 0.005661632061004639, 0.005586495876312256, 0.005588672161102295, 0.005569439888000488, 0.005598336219787598, 0.005668672084808349, 0.005632095813751221, 0.005671743869781494, 0.005666816234588623, 0.0058429441452026365, 0.005768832206726074, 0.005751296043395996, 0.005887872219085694, 0.005758975982666016, 0.005895808219909668, 0.0057448000907897945, 0.00574505615234375, 0.005711808204650879, 0.005713791847229004, 0.005650432109832764, 0.0056146240234375, 0.005653471946716309, 0.005532767772674561, 0.005563295841217041, 0.005531136035919189, 0.005496575832366944, 0.005571328163146973, 0.005675007820129394, 0.005595104217529297, 0.005652575969696045, 0.005730239868164062, 0.005791776180267334, 0.005703392028808593, 0.005685376167297363, 0.005762688159942627, 0.0057502717971801755, 0.005775743961334228, 0.005728447914123535, 0.006043744087219238, 0.005730656147003174, 0.005776800155639648, 0.005677279949188233, 0.005725599765777588, 0.005721055984497071, 0.005787392139434814, 0.005761631965637207, 0.005693088054656983, 0.005808127880096436, 0.005830207824707031, 0.005630047798156739, 0.005914624214172363, 0.00597811222076416, 0.0058388481140136715, 0.005808032035827636, 0.005842432022094727, 0.00591318416595459, 0.005785056114196777, 0.00576691198348999, 0.005705535888671875, 0.005644512176513672, 0.005588895797729492, 0.005523583889007568, 0.005592864036560058, 0.005565375804901123, 0.005561855792999268, 0.005566976070404053, 0.005539487838745117, 0.005575007915496826, 0.005656767845153809, 0.0055697917938232425, 0.005671296119689941, 0.005703167915344238, 0.005783711910247803, 0.005736991882324218, 0.0057142720222473145, 0.0057844481468200684, 0.005681951999664306, 0.005726208209991455, 0.005662015914916992, 0.0057084159851074215, 0.005752352237701416, 0.005693727970123291, 0.005736703872680664, 0.005650015830993652, 0.005706143856048584, 0.005601247787475586, 0.005678880214691162, 0.0056302080154418946, 0.005789696216583252, 0.0057079682350158695, 0.005603136062622071, 0.005640063762664795, 0.005619840145111084, 0.0056442880630493165, 0.005712959766387939, 0.005636640071868896, 0.005681568145751953, 0.005598400115966797, 0.005696191787719727, 0.005635359764099121, 0.005599711894989014, 0.005583231925964356, 0.00557369613647461, 0.005587232112884521, 0.005579423904418945, 0.005527552127838135, 0.005625728130340576, 0.005617311954498291, 0.0056386241912841795, 0.006038784027099609, 0.005675007820129394, 0.005638912200927735, 0.005450143814086914, 0.00585536003112793, 0.005783103942871094, 0.005941535949707031, 0.005875743865966797, 0.005920735836029053, 0.005856544017791748, 0.005870336055755615, 0.005826464176177978, 0.005791584014892578, 0.005771520137786865, 0.005765024185180664, 0.005816415786743164, 0.005816319942474365, 0.005849408149719238, 0.00578323221206665, 0.0057325439453125, 0.005691199779510498, 0.005639359951019287, 0.005731135845184326, 0.005666143894195556, 0.005698207855224609, 0.00567248010635376, 0.005777408123016357, 0.005835487842559815, 0.005795584201812744, 0.005730463981628418, 0.005708831787109375, 0.005800992012023926, 0.005750304222106934, 0.005817632198333741, 0.006104127883911133, 0.005897759914398193, 0.005845119953155517, 0.005916927814483643, 0.005732351779937744, 0.005857279777526855, 0.005873663902282715, 0.0058839678764343265, 0.006039135932922364, 0.0058594560623168946, 0.005809663772583008, 0.005739232063293457, 0.0057933759689331055, 0.005753024101257324, 0.005935328006744385, 0.0057569599151611325, 0.005820576190948486, 0.005836607933044434, 0.005824287891387939, 0.005802207946777343, 0.005747968196868896, 0.005796319961547851, 0.005764863967895508, 0.005876255989074707, 0.005791103839874268, 0.005755712032318115, 0.005688191890716553, 0.0068720002174377445, 0.005893311977386475, 0.005855167865753174, 0.005753151893615723, 0.005767104148864746, 0.0054906878471374515, 0.005861472129821777, 0.005903999805450439, 0.005752768039703369, 0.005781599998474121, 0.006010528087615967, 0.005896480083465576, 0.0057675838470458984, 0.0057794561386108395, 0.005783071994781494, 0.005833183765411377, 0.005793727874755859, 0.005859615802764892, 0.005876512050628662, 0.0058743681907653805, 0.005853504180908203, 0.005975200176239014, 0.005894015789031983, 0.005798367977142334, 0.005860896110534668, 0.005780096054077148, 0.005893504142761231, 0.0058410558700561525, 0.005970367908477783, 0.005968255996704102, 0.006044991970062256, 0.0059480957984924315, 0.0060965437889099125, 0.0060358080863952635, 0.006006944179534912, 0.005928544044494629, 0.006019328117370605, 0.0061521921157836916, 0.006012928009033203, 0.0058856639862060545, 0.005925151824951172, 0.005895328044891357, 0.005944159984588623, 0.006039552211761475, 0.0060078401565551755, 0.005945343971252442, 0.005876639842987061, 0.00578115177154541, 0.005842879772186279, 0.005837279796600342, 0.005945343971252442, 0.005826335906982422, 0.005795328140258789, 0.005866208076477051, 0.0057487359046936035, 0.005740543842315674, 0.005853216171264649, 0.005847008228302002, 0.005990655899047852, 0.0057439360618591305, 0.0057041277885437015, 0.005689343929290771, 0.0057118721008300784, 0.0056483840942382815, 0.005652703762054443, 0.005633823871612549, 0.0056211199760437015, 0.005581151962280273, 0.005395135879516602, 0.005774911880493164, 0.005652416229248047, 0.005711904048919678, 0.005683680057525634, 0.005687295913696289, 0.005691391944885254, 0.005642240047454834, 0.005720384120941162, 0.005721792221069336, 0.005760575771331787, 0.005709695816040039, 0.0056626877784729, 0.005713696002960205, 0.0056778879165649415, 0.005668992042541504, 0.005562431812286377, 0.005692575931549072, 0.005581471920013428, 0.005666816234588623, 0.005746047973632812, 0.005632639884948731, 0.005636000156402588, 0.005596640110015869, 0.005615295886993409, 0.005684160232543946, 0.005852191925048828, 0.007543168067932129, 0.006300288200378418, 0.006293439865112304, 0.006346240043640137, 0.005773791790008545, 0.00594975996017456, 0.005795584201812744, 0.005695487976074219, 0.005849088191986084, 0.005771135807037354, 0.005634175777435303, 0.005602655887603759, 0.005708447933197022, 0.005635359764099121, 0.0059419841766357425, 0.0056442880630493165, 0.005631680011749267, 0.005852863788604736, 0.005622079849243164, 0.0056835198402404785, 0.0055790400505065915, 0.005632863998413086, 0.005583744049072265, 0.005619711875915527, 0.005586847782135009, 0.005681248188018799, 0.0057118721008300784, 0.00563753604888916, 0.005695199966430664, 0.005682047843933106, 0.005695712089538574, 0.005742464065551758, 0.005681056022644043, 0.005742591857910156, 0.005685056209564209, 0.005845183849334717, 0.005484608173370362, 0.005860991954803467, 0.005896575927734375, 0.005812160015106201, 0.0058921918869018555, 0.005777376174926758, 0.005844607830047608, 0.005763455867767334, 0.005865471839904785, 0.005839039802551269, 0.00585913610458374, 0.005758495807647705, 0.005820032119750977, 0.005741312026977539, 0.005746111869812011, 0.005925504207611084, 0.00574399995803833, 0.005752831935882568, 0.005673984050750733, 0.005865119934082032, 0.005771520137786865, 0.005877024173736573, 0.005833119869232178, 0.005929120063781738, 0.005900191783905029, 0.005802303791046143, 0.005801663875579834, 0.0058037757873535155, 0.005890495777130127, 0.005797823905944824, 0.005838719844818116, 0.005767168045043946, 0.00583292818069458, 0.005928095817565918, 0.006142591953277588, 0.005882880210876465, 0.005848095893859863, 0.006234079837799072, 0.0059435200691223145, 0.005795616149902344, 0.005891200065612793, 0.005730559825897217, 0.005701375961303711, 0.005729311943054199, 0.005684256076812744, 0.005792575836181641, 0.005781504154205322, 0.005779168128967285, 0.005694975852966308, 0.005880608081817627, 0.006268928050994873, 0.005707263946533203, 0.005742656230926514, 0.005687136173248291, 0.005695231914520264, 0.005619743824005127, 0.005623807907104492, 0.005825344085693359, 0.005740255832672119, 0.005749023914337158, 0.0058304319381713866, 0.005715968132019043, 0.005744607925415039, 0.00546611213684082, 0.005696608066558838, 0.005690271854400635, 0.005597184181213379, 0.005552224159240723, 0.005576064109802246, 0.005526048183441162, 0.005687295913696289, 0.005643839836120605, 0.005605120182037354, 0.0056685757637023925, 0.005751967906951904, 0.006071872234344482, 0.005613823890686035, 0.005590464115142822, 0.00555241584777832, 0.005521759986877441, 0.005670911788940429, 0.00558073616027832, 0.005541888236999512, 0.005558015823364258, 0.0055073280334472655, 0.005595136165618897, 0.005521471977233887, 0.005519167900085449, 0.005594336032867432, 0.005688223838806152, 0.005637824058532715, 0.005623424053192138, 0.0056350078582763675, 0.005561344146728516, 0.00553059196472168, 0.005596960067749023, 0.00555241584777832, 0.005553184032440185, 0.0055437440872192385, 0.005532192230224609, 0.005452127933502198, 0.005523615837097168, 0.005504992008209229, 0.005527135848999024, 0.005490335941314697, 0.0055875201225280766, 0.005860511779785156, 0.0055647997856140136, 0.005599775791168213, 0.0055214080810546875, 0.005629280090332031, 0.005534527778625488, 0.0057134079933166505, 0.005709887981414795, 0.005718368053436279, 0.005759935855865478, 0.005716991901397705, 0.0058429441452026365, 0.00572822380065918, 0.005736480236053467, 0.005734399795532226, 0.005740255832672119, 0.00581660795211792, 0.005664639949798584, 0.005750112056732178, 0.0057638721466064455, 0.005517312049865723, 0.005754816055297852, 0.0057283201217651365, 0.0057606081962585445, 0.005686880111694336, 0.005724991798400879, 0.005691391944885254, 0.005737887859344482, 0.005678815841674805, 0.005706624031066894, 0.0057630081176757815, 0.005734464168548584, 0.005789951801300049, 0.005750527858734131, 0.0057775359153747555, 0.005766655921936035, 0.005694975852966308, 0.005845248222351074, 0.005773503780364991, 0.005906752109527588, 0.0058919677734375, 0.006263040065765381, 0.005980160236358643, 0.006524640083312988, 0.006134047985076905, 0.00596566390991211, 0.006115615844726563, 0.006473599910736084, 0.005972256183624267, 0.005887167930603027, 0.006146848201751709, 0.006114528179168701, 0.005953760147094727, 0.00578931188583374, 0.0059699201583862304, 0.005925568103790283, 0.00601907205581665, 0.005836800098419189, 0.0059532799720764164, 0.005941728115081787, 0.005825759887695313, 0.005818943977355957, 0.005772799968719483, 0.0057935681343078615, 0.0056854400634765625, 0.005743135929107666, 0.0057704000473022465, 0.005769408226013183, 0.00574560022354126, 0.005801439762115479, 0.00592307186126709, 0.005808127880096436, 0.005808127880096436, 0.005705728054046631, 0.005809887886047364, 0.0057357120513916015, 0.005739520072937012, 0.005658624172210694, 0.005666368007659912, 0.005738944053649902, 0.0057729921340942385, 0.005769536018371582, 0.005680607795715332, 0.005614016056060791, 0.005783552169799804, 0.00574015998840332, 0.005783135890960693, 0.0058558077812194825, 0.005915999889373779, 0.005962944030761719, 0.006057663917541504, 0.005967072010040283, 0.00591542387008667, 0.005819871902465821, 0.005885983943939209, 0.005917183876037598, 0.006006368160247803, 0.005847455978393555, 0.00588595199584961, 0.005814271926879883, 0.00582041597366333, 0.00603545618057251, 0.005840960025787353, 0.005763072013854981, 0.005889984130859375, 0.005805056095123291, 0.005825535774230957, 0.005814271926879883, 0.005732096195220947, 0.00581990385055542, 0.005837471961975098, 0.007214911937713623, 0.005933311939239502, 0.005912608146667481, 0.005815904140472412, 0.005851744174957275, 0.005781439781188965, 0.005834080219268799, 0.005776192188262939, 0.006031072139739991, 0.005953152179718018, 0.006115712165832519, 0.005946400165557861, 0.005948383808135987, 0.005838719844818116, 0.005875872135162353, 0.005896160125732422, 0.005942815780639648, 0.00589251184463501, 0.005976128101348877, 0.005946752071380615, 0.005986944198608398, 0.006097023963928223, 0.006098207950592041, 0.0059848642349243164, 0.005980160236358643, 0.005976064205169678, 0.005879903793334961, 0.006219456195831299, 0.005929183959960937, 0.005955584049224853, 0.00585481595993042, 0.005786335945129394, 0.005801663875579834, 0.005744671821594239, 0.005713056087493897, 0.005523935794830322, 0.005804255962371826, 0.005803455829620361, 0.0057472000122070314, 0.005791744232177734, 0.005676415920257569, 0.0056674561500549316, 0.00556819200515747, 0.005641791820526123, 0.005679872035980225, 0.005658112049102783, 0.005647039890289306, 0.005623616218566894, 0.0055848960876464845, 0.005642144203186035, 0.005594528198242187, 0.005636864185333252, 0.005642176151275635, 0.005609280109405518, 0.005760672092437744, 0.00566864013671875, 0.0057331199645996095, 0.005689343929290771, 0.0056936640739440915, 0.005578080177307129, 0.005554624080657959, 0.0057487359046936035, 0.00557260799407959, 0.00572211217880249, 0.005807136058807373, 0.0057494077682495115, 0.005738272190093994, 0.005681568145751953, 0.005791327953338623, 0.005765344142913818, 0.005739903926849365, 0.005751039981842041, 0.005880512237548828, 0.005785727977752685, 0.005803840160369873, 0.005864672183990479, 0.005845695972442627, 0.005933216094970703, 0.005928287982940674, 0.005938975811004638, 0.005793727874755859, 0.005767712116241455, 0.00572492790222168, 0.00574454402923584, 0.005695231914520264, 0.005678143978118897, 0.0057355518341064456, 0.0057190399169921875, 0.005705567836761474, 0.005651423931121826, 0.00583081579208374, 0.0057216320037841795, 0.005677536010742188, 0.005703519821166992, 0.005666816234588623, 0.00572211217880249, 0.005758975982666016, 0.005791679859161377, 0.005496223926544189, 0.006043712139129639, 0.005788447856903076, 0.005748032093048095, 0.005765759944915771, 0.0057400321960449216, 0.00572054386138916, 0.005662559986114502, 0.005758624076843261, 0.0057515201568603515, 0.005799647808074951, 0.005756063938140869, 0.005800896167755127, 0.005855231761932373, 0.005804031848907471, 0.005879807949066162, 0.005763072013854981, 0.005807328224182129, 0.00575984001159668, 0.005850719928741455, 0.005783872127532959, 0.005741631984710694, 0.006146175861358643, 0.005865632057189941, 0.005856991767883301, 0.005798495769500732, 0.005978623867034912, 0.0058510079383850095, 0.005840352058410644, 0.005781439781188965, 0.005849696159362793, 0.005798975944519043, 0.005780416011810303, 0.005844992160797119, 0.005943295955657959, 0.005871424198150635, 0.005695680141448975, 0.0058839359283447265, 0.005767424106597901, 0.00580790376663208, 0.0057272639274597165, 0.005953792095184326, 0.005716415882110595, 0.005866015911102295, 0.005835455894470215, 0.005833727836608887, 0.0057952961921691895, 0.005743231773376465, 0.00585209608078003, 0.005759967803955078, 0.005818143844604492, 0.005711359977722168, 0.005802976131439209, 0.006612736225128174, 0.00612559986114502, 0.006526495933532715, 0.006050240039825439, 0.005809663772583008, 0.006027775764465332, 0.005751935958862304, 0.005782400131225586, 0.006044991970062256, 0.005876736164093017, 0.005533055782318115, 0.005964384078979492, 0.005862624168395996, 0.0058765439987182614, 0.005803487777709961, 0.0059212160110473635, 0.005863520145416259, 0.005981279850006104, 0.005952288150787353, 0.005965951919555664, 0.0059118719100952144, 0.005958335876464844, 0.005846655845642089, 0.0059129600524902345, 0.005844992160797119, 0.005765120029449463, 0.0058716158866882326, 0.00582041597366333, 0.00588159990310669, 0.005775936126708984, 0.005766367912292481, 0.005751391887664795, 0.0058245759010314944, 0.0057355198860168455, 0.005710559844970703, 0.005746528148651123, 0.005734367847442627, 0.005777440071105957, 0.005728415966033936, 0.005733856201171875, 0.005789728164672852, 0.005822976112365722, 0.005816319942474365, 0.005825952053070068, 0.006336192131042481, 0.005899295806884765, 0.005967967987060547, 0.005916448116302491, 0.005982207775115967, 0.006133664131164551, 0.005863520145416259, 0.005951231956481934, 0.005976160049438476, 0.005902495861053467, 0.005934271812438965, 0.005829440116882324, 0.005896224021911621, 0.00584496021270752, 0.005933055877685547, 0.005972000122070312, 0.005937119960784912, 0.0059023361206054685, 0.005908480167388916, 0.00589356803894043, 0.005924736022949219, 0.005800640106201172, 0.005880928039550782, 0.0058438401222229005, 0.005889440059661865, 0.00590502405166626, 0.005840415954589844, 0.0059683518409729, 0.005835807800292968, 0.005582623958587647, 0.005903359889984131, 0.0061145920753479, 0.005972959995269775, 0.005940991878509522, 0.00596556806564331, 0.00598041582107544, 0.0059699201583862304, 0.005963776111602783, 0.00593503999710083, 0.005976128101348877, 0.005955584049224853, 0.006090752124786377, 0.005986303806304932, 0.005975456237792969, 0.006012864112854004, 0.006068895816802979, 0.0058858880996704105, 0.00589734411239624, 0.005907392024993897, 0.006039775848388672, 0.005840288162231445, 0.0060381760597229, 0.006221216201782226, 0.007855455875396728, 0.006715424060821533, 0.007414720058441162, 0.006594783782958984, 0.0060282559394836425, 0.00596665620803833, 0.005885280132293701, 0.006173056125640869, 0.006070720195770264, 0.006070047855377197, 0.005982048034667968, 0.006048223972320557, 0.005961088180541993, 0.0060133118629455565, 0.005900288105010986, 0.0058072319030761715, 0.00591158390045166, 0.005850751876831054, 0.005906879901885986, 0.005840896129608154, 0.006143775939941407, 0.005961567878723145, 0.0061933121681213375, 0.005849440097808838, 0.006089983940124511, 0.0057972798347473145, 0.005856063842773438, 0.005836800098419189, 0.005893343925476074, 0.005764128208160401, 0.005854464054107666, 0.005788640022277832, 0.005901535987854004, 0.005814015865325928, 0.00585916805267334, 0.006048672199249268, 0.006051583766937256, 0.005989791870117188, 0.005911104202270508, 0.00551529598236084, 0.005862080097198487, 0.005979775905609131, 0.005927296161651612, 0.00597811222076416, 0.005936831951141358, 0.005912320137023926, 0.005888671875, 0.005994688034057618, 0.005827424049377442, 0.005965727806091309, 0.005864704132080078, 0.005938111782073974, 0.00585807991027832, 0.006020576000213623, 0.00601366376876831, 0.0059062399864196775, 0.005967872142791748, 0.005928671836853027, 0.005980127811431884, 0.005939743995666504, 0.005826335906982422, 0.005769216060638428, 0.0057647042274475094, 0.005910943984985351, 0.005887680053710938, 0.005812543869018555, 0.00578710412979126, 0.005737088203430176, 0.005714911937713623, 0.005755839824676513, 0.005752831935882568, 0.005753920078277588, 0.005734560012817383, 0.005825056076049805, 0.005719488143920899, 0.0058083200454711915, 0.00587820816040039, 0.005839039802551269, 0.0057662720680236815, 0.005781375885009766, 0.005764095783233642, 0.006317984104156494, 0.0069755840301513675, 0.005922848224639892, 0.005938432216644287, 0.006090911865234375, 0.00582860803604126, 0.006042208194732666, 0.0066304001808166506, 0.005932000160217285, 0.0059391999244689945, 0.005941055774688721, 0.005923168182373047, 0.006035327911376953, 0.005862751960754395, 0.005931647777557373, 0.00637116813659668, 0.005983871936798096, 0.0058475837707519535, 0.0059351038932800295, 0.005864831924438476, 0.005927552223205566, 0.005554175853729248, 0.005909855842590332, 0.005857952117919922, 0.005892255783081055, 0.005986144065856933, 0.006284351825714111, 0.005859807968139648, 0.005945824146270752, 0.006060192108154297, 0.005977952003479004, 0.005855552196502686, 0.00613372802734375, 0.006018784046173096, 0.0059762558937072755, 0.00588153600692749, 0.005930240154266357, 0.005864319801330567, 0.005894144058227539, 0.005818367958068848, 0.00585913610458374, 0.0057981438636779785, 0.005829792022705078, 0.005818943977355957, 0.005815904140472412, 0.005881663799285889, 0.005843776226043701, 0.006123551845550537, 0.005870783805847168, 0.0060423040390014645, 0.0057868800163269046, 0.005845151901245117, 0.005720831871032715, 0.005818624019622803, 0.006741695880889893, 0.006475776195526123, 0.005912576198577881, 0.005817567825317383, 0.005837600231170654, 0.0058369278907775875, 0.005869376182556152, 0.005828671932220459, 0.00602726411819458, 0.005909632205963135, 0.005810463905334472, 0.005849952220916748, 0.00573033618927002, 0.005891808032989502, 0.005805247783660888, 0.005899360179901123, 0.005852831840515137, 0.005935167789459229, 0.00587775993347168, 0.005836607933044434, 0.005863103866577148, 0.005800447940826416, 0.0058484477996826175, 0.005892735958099365, 0.0059064321517944334, 0.005832672119140625, 0.005848159790039062, 0.005745759963989258, 0.005776512145996094, 0.005839680194854736]",tokens/s,174.44925773799122,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,809.734144,673.05472,0.0,270.532608,250.474496,s,1,9.2024345703125,9.2024345703125,0.0,9.2024345703125,9.2024345703125,9.2024345703125,9.2024345703125,[9.2024345703125],,kWh,2.444902745831238e-06,2.6245123457329837e-07,0.0,2.7073539804045366e-06,,MB,1166.1312,685.637632,0.0,272.62976,241.723904,s,32,0.48172665500640877,0.01505395796895027,0.00012187819818072865,0.015046240329742432,0.015146150398254395,0.015172807836532593,0.015471358814239503,"[0.015595487594604492, 0.015053855895996093, 0.01503872013092041, 0.015008511543273926, 0.014970527648925782, 0.015017855644226074, 0.01500211238861084, 0.015063263893127442, 0.01499561595916748, 0.01511513614654541, 0.015195072174072266, 0.014922080039978027, 0.01504691219329834, 0.015062239646911621, 0.015112704277038574, 0.014956352233886718, 0.01505072021484375, 0.01515459156036377, 0.015088479995727539, 0.014915007591247559, 0.015059167861938477, 0.015045568466186524, 0.015147583961486816, 0.015100447654724122, 0.014910847663879394, 0.015133248329162598, 0.015078144073486328, 0.01489846420288086, 0.01497753620147705, 0.014950016021728515, 0.01504099178314209, 0.015019392013549805]",tokens/s,17005.494536919527,kWh,5.256407761390758e-07,5.795193480043237e-08,3.46876776302155e-07,9.304694872416632e-07,tokens/kWh,275129924.74250925,MB,1205.47328,698.220544,0.0,285.212672,241.726464,s,32,9.556181182861328,0.29863066196441657,0.005787232236621264,0.29829029846191407,0.30451669616699223,0.30942994689941405,0.3155907858276367,"[0.3114083557128906, 0.29995767211914065, 0.29213058471679687, 0.2944664306640625, 0.29024954223632815, 0.29509902954101563, 0.30114498901367187, 0.30218707275390627, 0.30164642333984376, 0.3004542541503906, 0.29701651000976564, 0.2997217407226562, 0.2986158752441406, 0.2968858947753906, 0.30246337890625, 0.3047448425292969, 0.3174698486328125, 0.3078112487792969, 0.2994154663085937, 0.2981750183105469, 0.3012138977050781, 0.29566336059570314, 0.2989388427734375, 0.2965276794433594, 0.2966563110351563, 0.29165484619140625, 0.29563214111328123, 0.29331182861328126, 0.29447366333007813, 0.29840557861328126, 0.2913766784667969, 0.29126217651367187]",tokens/s,210.9629318891132,kWh,8.33129267070427e-06,9.188074905038074e-07,3.6874867205463997e-06,1.2937586881754474e-05,tokens/kWh,4869532.516055771,,s,2016,9.53892659044265,0.004731610411925922,0.00019797361941729504,0.004702336072921753,0.004898144006729126,0.004990624070167542,0.005392337679862974,"[0.004808479785919189, 0.004839263916015625, 0.00498041582107544, 0.004848351955413818, 0.004861728191375732, 0.004798655986785889, 0.005070623874664307, 0.0047719359397888185, 0.0047247037887573244, 0.004675039768218994, 0.004893152236938476, 0.004747263908386231, 0.00491315221786499, 0.004981887817382812, 0.004952864170074463, 0.00512937593460083, 0.005069375991821289, 0.005003647804260254, 0.00494812822341919, 0.004951903820037842, 0.004915200233459473, 0.004778048038482666, 0.004908991813659668, 0.00477785587310791, 0.004851647853851318, 0.00482528018951416, 0.00487772798538208, 0.005074751853942871, 0.004882783889770508, 0.00496284818649292, 0.004997151851654053, 0.005009280204772949, 0.004916736125946045, 0.004954432010650635, 0.004958399772644043, 0.005167103767395019, 0.0050135040283203125, 0.00498041582107544, 0.0049851517677307125, 0.004895904064178467, 0.004903776168823242, 0.00494598388671875, 0.004962240219116211, 0.005009727954864502, 0.004942975997924805, 0.0049526081085205075, 0.004920383930206299, 0.0049151678085327145, 0.004966591835021973, 0.00502569580078125, 0.004977375984191895, 0.004982431888580322, 0.005114399909973145, 0.005234943866729736, 0.004996448040008545, 0.004905568122863769, 0.005299935817718506, 0.004867392063140869, 0.004825183868408203, 0.004822944164276123, 0.004891263961791992, 0.00481324815750122, 0.004877984046936035, 0.0049153599739074705, 0.005146463871002197, 0.004918655872344971, 0.004917791843414307, 0.00488047981262207, 0.004978687763214112, 0.004768064022064209, 0.004751264095306396, 0.004771135807037354, 0.005026048183441162, 0.0048232641220092775, 0.004759552001953125, 0.004683775901794434, 0.0047307519912719725, 0.004882559776306152, 0.004683040142059326, 0.004805408000946045, 0.004732863903045654, 0.005074399948120117, 0.005212704181671143, 0.0047495999336242675, 0.004697535991668701, 0.004647200107574463, 0.004681183815002442, 0.004634624004364014, 0.004620128154754639, 0.004647391796112061, 0.004641119956970215, 0.004810175895690918, 0.004768191814422607, 0.0046975998878479005, 0.004766240119934082, 0.004720863819122315, 0.004692927837371826, 0.0047747201919555666, 0.004931327819824219, 0.004690464019775391, 0.004672383785247803, 0.004668255805969238, 0.00475267219543457, 0.004764416217803955, 0.004761568069458008, 0.004708159923553467, 0.00470854377746582, 0.004779039859771728, 0.004815392017364502, 0.004868512153625488, 0.0047883839607238765, 0.004742591857910156, 0.004688576221466064, 0.004718336105346679, 0.004669536113739014, 0.004689536094665527, 0.0046943039894104, 0.004625760078430176, 0.004614816188812256, 0.004679743766784668, 0.004618175983428955, 0.004607647895812988, 0.004565343856811523, 0.00454860782623291, 0.004533823966979981, 0.004526527881622314, 0.004497471809387207, 0.00465340805053711, 0.004602015972137451, 0.00456496000289917, 0.004632448196411133, 0.004613823890686035, 0.00462659215927124, 0.004755583763122559, 0.004634399890899658, 0.004581920146942139, 0.004565951824188233, 0.004557375907897949, 0.004533823966979981, 0.0045677762031555175, 0.004609983921051025, 0.004679359912872314, 0.004625823974609375, 0.004627359867095947, 0.004696191787719727, 0.004572991847991943, 0.0046070399284362795, 0.004604032039642334, 0.0050225920677185055, 0.004648128032684326, 0.004612736225128174, 0.004578752040863037, 0.0046080961227416995, 0.004587615966796875, 0.004738944053649902, 0.004624864101409912, 0.004632736206054687, 0.004597824096679687, 0.00454860782623291, 0.004604000091552735, 0.004625984191894531, 0.004581727981567383, 0.00460319995880127, 0.004586175918579101, 0.004577631950378418, 0.004570784091949463, 0.004601535797119141, 0.004675903797149658, 0.004593664169311523, 0.004720096111297607, 0.004594207763671875, 0.004544191837310791, 0.004552735805511475, 0.004542496204376221, 0.004568448066711426, 0.004610400199890136, 0.004579872131347656, 0.004591616153717041, 0.004652959823608398, 0.004634848117828369, 0.004632287979125976, 0.004731008052825928, 0.004634655952453613, 0.004638400077819824, 0.004755296230316162, 0.004790495872497558, 0.004689407825469971, 0.004786943912506104, 0.004714144229888916, 0.004761600017547608, 0.0050316162109375, 0.004745823860168457, 0.004797920227050782, 0.004660799980163575, 0.004692671775817871, 0.0049780158996582035, 0.0047458882331848145, 0.004737023830413818, 0.004658783912658691, 0.004890495777130127, 0.004639264106750488, 0.0046244797706604, 0.004652639865875244, 0.004821216106414795, 0.004646783828735352, 0.004775455951690674, 0.004778048038482666, 0.004602528095245361, 0.004607967853546143, 0.004627456188201904, 0.004604608058929443, 0.004591936111450195, 0.004599711894989014, 0.004618336200714111, 0.004632736206054687, 0.004657120227813721, 0.004665472030639648, 0.004652800083160401, 0.004636672019958496, 0.004648575782775879, 0.004948351860046387, 0.004702400207519531, 0.004656767845153809, 0.004601823806762695, 0.0045979199409484865, 0.004581503868103028, 0.004562143802642822, 0.004554656028747559, 0.004574016094207763, 0.004572192192077637, 0.004539360046386718, 0.004544511795043945, 0.004585472106933594, 0.00455679988861084, 0.0046444802284240725, 0.004601215839385987, 0.004614975929260254, 0.00476793622970581, 0.005038047790527344, 0.004806687831878662, 0.004659167766571045, 0.004666975975036621, 0.004618688106536865, 0.004581471920013428, 0.004572864055633545, 0.004546720027923584, 0.0045517759323120115, 0.0046468801498413086, 0.004603072166442871, 0.0045545601844787596, 0.0045632319450378414, 0.0045577921867370605, 0.0049269118309021, 0.004936639785766602, 0.004806623935699463, 0.004760704040527344, 0.004768511772155762, 0.0047537279129028324, 0.004658880233764648, 0.0046284799575805665, 0.004681087970733642, 0.004564671993255615, 0.004606912136077881, 0.004597472190856934, 0.004542751789093018, 0.004560895919799805, 0.004562047958374024, 0.004585343837738037, 0.0045762557983398434, 0.004586783885955811, 0.004621024131774902, 0.004572991847991943, 0.004642975807189941, 0.00457862377166748, 0.0045677762031555175, 0.004587647914886475, 0.004601727962493896, 0.0045723519325256345, 0.004627264022827148, 0.004585472106933594, 0.004597184181213379, 0.004633088111877442, 0.004583487987518311, 0.004610047817230224, 0.004694015979766846, 0.004589568138122559, 0.004750559806823731, 0.004559296131134033, 0.004542816162109375, 0.004573184013366699, 0.004650496006011963, 0.004551519870758056, 0.004530848026275635, 0.004512095928192138, 0.0045062718391418455, 0.004494912147521972, 0.0045879678726196286, 0.004513696193695068, 0.00459171199798584, 0.00453657579421997, 0.004585216045379639, 0.004550655841827392, 0.00455679988861084, 0.004538368225097656, 0.004559040069580078, 0.004583231925964356, 0.0045277118682861325, 0.0045203518867492675, 0.004535808086395264, 0.004517759799957275, 0.004520576000213623, 0.004516992092132568, 0.00458841609954834, 0.004497407913208008, 0.004505663871765137, 0.00456924819946289, 0.004666944026947021, 0.0045875201225280765, 0.0045583038330078126, 0.0045617280006408694, 0.004571135997772217, 0.004531936168670654, 0.004509407997131348, 0.0044973759651184084, 0.004553567886352539, 0.00455244779586792, 0.004538271903991699, 0.004571296215057373, 0.004566976070404053, 0.004571360111236572, 0.004658976078033447, 0.00462614393234253, 0.004695456027984619, 0.00468665599822998, 0.004632639884948731, 0.004617760181427002, 0.004664063930511475, 0.004687583923339843, 0.0048023362159729, 0.004820608139038086, 0.0046434240341186525, 0.004626304149627686, 0.004581503868103028, 0.004551807880401612, 0.004565695762634278, 0.004552159786224365, 0.004594528198242188, 0.004581247806549072, 0.00465715217590332, 0.004779615879058838, 0.00486627197265625, 0.004871935844421387, 0.004734623908996582, 0.004659999847412109, 0.004644864082336426, 0.004624000072479248, 0.005073279857635498, 0.00474015998840332, 0.004639200210571289, 0.004637119770050049, 0.005344799995422363, 0.005241343975067139, 0.0048436479568481445, 0.00469536018371582, 0.004667967796325684, 0.0047036480903625486, 0.004759359836578369, 0.004670239925384521, 0.004671487808227539, 0.004672959804534912, 0.004631103992462159, 0.004612287998199463, 0.004607808113098144, 0.005059711933135987, 0.004620800018310547, 0.004645247936248779, 0.004608223915100098, 0.004585440158843994, 0.004642911911010742, 0.005025407791137695, 0.004777408123016357, 0.004660192012786865, 0.00485916805267334, 0.004937471866607666, 0.004830143928527832, 0.004759391784667969, 0.004771872043609619, 0.004676864147186279, 0.004881279945373535, 0.004661248207092285, 0.004585504055023193, 0.0046468801498413086, 0.00487388801574707, 0.004773695945739746, 0.004759552001953125, 0.00471504020690918, 0.005235904216766357, 0.0048865280151367185, 0.004955167770385742, 0.0047408962249755856, 0.005442624092102051, 0.005430047988891602, 0.004896927833557129, 0.004806655883789063, 0.004792384147644043, 0.004774943828582763, 0.004760159969329834, 0.005037888050079346, 0.004844031810760498, 0.0048455681800842285, 0.004769792079925537, 0.004791935920715332, 0.0048501439094543455, 0.004779168128967285, 0.004655871868133545, 0.004611551761627197, 0.004602367877960205, 0.004636288166046143, 0.004625152111053467, 0.004605599880218506, 0.00459980821609497, 0.004603231906890869, 0.004555744171142578, 0.004576735973358155, 0.00467193603515625, 0.004629471778869629, 0.004608352184295655, 0.004602335929870606, 0.004726784229278564, 0.0046059517860412595, 0.0046590399742126466, 0.004718080043792725, 0.004779871940612793, 0.004825920104980469, 0.004726912021636963, 0.004712639808654785, 0.004736703872680664, 0.004703743934631348, 0.0047818880081176756, 0.004749279975891113, 0.004757599830627441, 0.005004992008209228, 0.004893695831298828, 0.004820831775665283, 0.004833248138427735, 0.004841504096984864, 0.00485097599029541, 0.004844287872314453, 0.004822048187255859, 0.004789535999298096, 0.004791967868804932, 0.004769792079925537, 0.004765183925628662, 0.004777791976928711, 0.00474732780456543, 0.004784224033355713, 0.004878880023956299, 0.004835680007934571, 0.004769440174102783, 0.004798304080963134, 0.004776192188262939, 0.004810111999511719, 0.004749855995178223, 0.004765376091003418, 0.004768159866333008, 0.004735104084014893, 0.004799935817718506, 0.00474505615234375, 0.004812640190124512, 0.004778560161590576, 0.004831327915191651, 0.005039391994476318, 0.004879072189331055, 0.0048657598495483395, 0.004845888137817383, 0.004861023902893067, 0.0048542718887329105, 0.004822688102722168, 0.004877024173736573, 0.004958208084106445, 0.004927487850189209, 0.004761600017547608, 0.004753407955169678, 0.004711872100830078, 0.004696896076202392, 0.004740863800048828, 0.004696063995361328, 0.004658495903015137, 0.004660927772521973, 0.00486297607421875, 0.004662335872650147, 0.004701024055480957, 0.004665503978729248, 0.004659135818481445, 0.004632607936859131, 0.004638688087463379, 0.0047084159851074215, 0.004689856052398682, 0.004667391777038574, 0.0048347201347351074, 0.004760159969329834, 0.004739071846008301, 0.004814784049987793, 0.0047840638160705565, 0.004893375873565674, 0.004859968185424805, 0.00475654411315918, 0.004796832084655762, 0.004782495975494384, 0.004731200218200684, 0.004719776153564453, 0.00471724796295166, 0.0047532482147216795, 0.004861855983734131, 0.004794464111328125, 0.0047693119049072264, 0.004764256000518799, 0.004792384147644043, 0.00477891206741333, 0.004774816036224365, 0.004765696048736572, 0.004775712013244629, 0.004727200031280517, 0.004775743961334228, 0.004712512016296386, 0.004726719856262207, 0.0046774082183837894, 0.004702432155609131, 0.004902944087982177, 0.0048148159980773925, 0.004759071826934814, 0.00481935977935791, 0.004786303997039795, 0.004792255878448486, 0.004737088203430176, 0.0047431039810180665, 0.004826911926269531, 0.004775519847869873, 0.004761856079101562, 0.004757247924804688, 0.004731743812561035, 0.004722464084625244, 0.004739071846008301, 0.00481279993057251, 0.004816895961761475, 0.004769440174102783, 0.004734752178192139, 0.004694784164428711, 0.004760799884796142, 0.004833568096160889, 0.004774208068847656, 0.004874112129211426, 0.004769951820373535, 0.004738080024719238, 0.0048035202026367185, 0.004788479804992676, 0.004826687812805176, 0.0048182721138000486, 0.004803199768066407, 0.004839295864105224, 0.004796607971191406, 0.004745376110076904, 0.004761760234832764, 0.004846848011016845, 0.00488918399810791, 0.0047717437744140626, 0.004734623908996582, 0.004945087909698487, 0.004891456127166748, 0.004863903999328613, 0.004821055889129638, 0.004796448230743408, 0.004768767833709717, 0.0047523841857910155, 0.004808703899383545, 0.004827136039733886, 0.0047820801734924315, 0.004772895812988281, 0.004711103916168213, 0.004857823848724365, 0.004741439819335938, 0.004724736213684082, 0.004742784023284912, 0.004711935997009278, 0.0047010560035705565, 0.004743231773376465, 0.0048782720565795894, 0.004835328102111816, 0.0048148479461669925, 0.004759552001953125, 0.004747263908386231, 0.004746784210205078, 0.004880864143371582, 0.004876031875610352, 0.005043712139129639, 0.00482585620880127, 0.00497049617767334, 0.004866047859191895, 0.004843071937561035, 0.004825215816497802, 0.004856128215789795, 0.004653024196624756, 0.004614208221435547, 0.004603744029998779, 0.004810880184173584, 0.0047487678527832035, 0.004652800083160401, 0.004657440185546875, 0.004704768180847168, 0.004676703929901123, 0.004703328132629394, 0.004731935977935791, 0.004685599803924561, 0.004725152015686035, 0.004751232147216797, 0.004725183963775635, 0.004690207958221435, 0.004675648212432861, 0.004675519943237305, 0.0046694397926330565, 0.004810751914978028, 0.004726687908172608, 0.004714591979980469, 0.004642816066741944, 0.004655104160308838, 0.004651008129119873, 0.00465715217590332, 0.004667327880859375, 0.004728896141052246, 0.004760672092437744, 0.005274687767028809, 0.005106624126434326, 0.005130559921264649, 0.004881984233856202, 0.004900032043457031, 0.0050425281524658205, 0.004794943809509278, 0.00475926399230957, 0.004750783920288086, 0.005008416175842285, 0.004819071769714355, 0.0047606401443481446, 0.004727519989013672, 0.004882688045501709, 0.00471827220916748, 0.004708096027374268, 0.004709760189056397, 0.004899712085723877, 0.004622335910797119, 0.0046118078231811525, 0.004617760181427002, 0.00459558391571045, 0.0045966081619262696, 0.004571135997772217, 0.0045632638931274415, 0.004630112171173096, 0.004650400161743164, 0.004647615909576416, 0.004616159915924073, 0.00462553596496582, 0.004653632164001465, 0.004819295883178711, 0.004704351902008057, 0.004685728073120117, 0.004634496212005615, 0.004857312202453614, 0.004571807861328125, 0.004560704231262207, 0.004574687957763672, 0.004604640007019043, 0.004560895919799805, 0.004602015972137451, 0.004607840061187744, 0.004589568138122559, 0.00456931209564209, 0.0045855679512023925, 0.0045933442115783695, 0.004668479919433594, 0.004707263946533203, 0.004649087905883789, 0.004643871784210205, 0.00459990406036377, 0.004597504138946534, 0.004590591907501221, 0.004620512008666992, 0.004599584102630615, 0.004623392105102539, 0.0046267518997192385, 0.0046332478523254395, 0.004690112113952637, 0.004701888084411621, 0.004872543811798096, 0.004669248104095459, 0.004790272235870361, 0.004843520164489746, 0.004745215892791748, 0.004734975814819336, 0.0046546878814697264, 0.004773600101470947, 0.004868927955627441, 0.004927231788635254, 0.005003776073455811, 0.004877280235290527, 0.004852384090423584, 0.0048023362159729, 0.004780128002166748, 0.004841375827789307, 0.004794655799865722, 0.0048065919876098635, 0.004771872043609619, 0.004809760093688965, 0.004858816146850586, 0.004806719779968262, 0.004781472206115723, 0.004749279975891113, 0.004747583866119385, 0.0047680001258850096, 0.004728960037231445, 0.00469974422454834, 0.0048438081741333005, 0.004757599830627441, 0.004708255767822266, 0.004683135986328125, 0.004702847957611084, 0.004683775901794434, 0.004702239990234375, 0.004673696041107177, 0.004706111907958985, 0.0046593918800354, 0.004671328067779541, 0.004677599906921387, 0.004680768013000488, 0.004684832096099854, 0.004726528167724609, 0.0046880321502685545, 0.004669375896453857, 0.004687456130981446, 0.004667871952056884, 0.004673024177551269, 0.004733439922332764, 0.004757503986358643, 0.0046919679641723635, 0.004693568229675293, 0.004882559776306152, 0.004761663913726807, 0.004712096214294434, 0.004711167812347412, 0.004720640182495117, 0.004728256225585937, 0.004764063835144043, 0.00474124813079834, 0.00472051191329956, 0.004694015979766846, 0.004717887878417969, 0.004682432174682618, 0.004697440147399902, 0.004907584190368653, 0.005017600059509277, 0.004911104202270508, 0.004828959941864014, 0.004861440181732178, 0.004895455837249756, 0.004966176033020019, 0.00483951997756958, 0.004815999984741211, 0.0047916479110717775, 0.004783711910247803, 0.004814911842346192, 0.00481279993057251, 0.0047755842208862306, 0.00472870397567749, 0.0047450242042541505, 0.004737696170806885, 0.004777984142303467, 0.00471449613571167, 0.004826816082000732, 0.004849599838256836, 0.004759200096130371, 0.004752096176147461, 0.00471014404296875, 0.004737279891967774, 0.0047012481689453125, 0.004664256095886231, 0.004892672061920166, 0.004633632183074951, 0.0046212801933288574, 0.004685823917388916, 0.004686848163604736, 0.0046739840507507326, 0.004622911930084229, 0.004612095832824707, 0.004634624004364014, 0.004705728054046631, 0.004661151885986328, 0.004622911930084229, 0.004775455951690674, 0.0046835198402404785, 0.004630784034729004, 0.0047068800926208494, 0.004723008155822754, 0.004714335918426514, 0.0047235841751098635, 0.004711711883544922, 0.0047346558570861815, 0.004687871932983399, 0.004718016147613526, 0.004663904190063476, 0.004666528224945068, 0.004636864185333252, 0.004636608123779297, 0.004608704090118408, 0.004605152130126953, 0.004771679878234863, 0.004694975852966308, 0.00466534423828125, 0.004648064136505127, 0.004627232074737549, 0.004667488098144532, 0.004622015953063965, 0.004730879783630371, 0.004835360050201416, 0.005021247863769531, 0.004774528026580811, 0.004800191879272461, 0.004705503940582275, 0.004950175762176514, 0.004751264095306396, 0.004728640079498291, 0.004708831787109375, 0.004852255821228027, 0.004628543853759766, 0.004621664047241211, 0.004618752002716065, 0.004616288185119629, 0.004766047954559326, 0.004699295997619629, 0.004639232158660888, 0.004655104160308838, 0.004677631855010986, 0.0046523199081420896, 0.004661632061004639, 0.004723040103912354, 0.004661248207092285, 0.004663296222686767, 0.004816895961761475, 0.004614143848419189, 0.004830527782440186, 0.0046640000343322755, 0.004794112205505371, 0.004675839900970459, 0.00468396806716919, 0.004638527870178223, 0.0046179518699646, 0.00460214376449585, 0.004610047817230224, 0.0047680001258850096, 0.004725664138793945, 0.004629471778869629, 0.0046304001808166505, 0.004584447860717773, 0.00471347188949585, 0.004644576072692871, 0.004655424118041992, 0.004655263900756836, 0.004667200088500976, 0.004703584194183349, 0.0047276802062988284, 0.004679520130157471, 0.004687808036804199, 0.004653120040893554, 0.00467248010635376, 0.004647103786468506, 0.004641280174255371, 0.004713088035583496, 0.0047223358154296876, 0.004727871894836426, 0.004725279808044433, 0.004698527812957764, 0.004695648193359375, 0.004759967803955078, 0.004761280059814453, 0.004759424209594727, 0.004839424133300781, 0.0053688640594482424, 0.005164000034332275, 0.004837376117706299, 0.004839424133300781, 0.004816224098205566, 0.005002111911773682, 0.004892288208007812, 0.004805791854858399, 0.004791488170623779, 0.004752927780151367, 0.004822783946990967, 0.004790815830230713, 0.004782336235046387, 0.004847360134124756, 0.004900864124298096, 0.004927487850189209, 0.004865824222564697, 0.004788447856903076, 0.004810815811157227, 0.0048403840065002444, 0.004753503799438476, 0.004825888156890869, 0.004894720077514648, 0.0048477439880371094, 0.004847775936126709, 0.004774752140045166, 0.004932576179504394, 0.004778016090393067, 0.004808703899383545, 0.004898047924041748, 0.004837503910064697, 0.004776576042175293, 0.004726784229278564, 0.004663296222686767, 0.004671103954315185, 0.004592000007629394, 0.004599071979522705, 0.004666079998016357, 0.004566624164581299, 0.004546080112457275, 0.004606624126434326, 0.004658815860748291, 0.004628575801849365, 0.0046488637924194335, 0.004661824226379395, 0.004643871784210205, 0.0046622719764709475, 0.004667391777038574, 0.004642816066741944, 0.004681375980377198, 0.004745471954345703, 0.005009791851043701, 0.004992544174194336, 0.004933472156524658, 0.00486243200302124, 0.004830592155456543, 0.004821216106414795, 0.004799104213714599, 0.004759103775024414, 0.004737279891967774, 0.004726304054260254, 0.004684095859527588, 0.005152768135070801, 0.0056581439971923825, 0.0047784638404846196, 0.004728640079498291, 0.005021312236785889, 0.004922016143798828, 0.004763552188873291, 0.004704256057739258, 0.0053002238273620605, 0.005609151840209961, 0.0055730881690979, 0.00478982400894165, 0.004788512229919433, 0.004677951812744141, 0.0048492798805236815, 0.004812863826751709, 0.004732831954956055, 0.0047719359397888185, 0.004830560207366943, 0.004753503799438476, 0.004686399936676026, 0.00506060791015625, 0.00482857608795166, 0.00474780797958374, 0.0047470078468322754, 0.004920735836029053, 0.004694943904876709, 0.004720640182495117, 0.004669568061828613, 0.004675456047058105, 0.004683360099792481, 0.004807072162628174, 0.0047879362106323245, 0.004738463878631592, 0.004755392074584961, 0.004766655921936035, 0.004716608047485351, 0.004753183841705322, 0.004833439826965332, 0.004673535823822021, 0.004865248203277588, 0.004725535869598388, 0.00471449613571167, 0.004695839881896972, 0.004699872016906738, 0.004675839900970459, 0.004651584148406982, 0.0046302080154418945, 0.00473302412033081, 0.004722464084625244, 0.004683487892150879, 0.004704383850097657, 0.004683680057525634, 0.004618015766143799, 0.004610432147979736, 0.004620512008666992, 0.004660511970520019, 0.005222208023071289, 0.0055510721206665035, 0.004871263980865479, 0.004795584201812744, 0.004791679859161377, 0.004786303997039795, 0.004768447875976562, 0.0050503678321838375, 0.004755328178405762, 0.004767903804779053, 0.0047513279914855955, 0.004998464107513428, 0.0047515521049499515, 0.004760384082794189, 0.004742335796356201, 0.00496895980834961, 0.00474451208114624, 0.004643743991851807, 0.0046284480094909665, 0.004864160060882568, 0.004742815971374512, 0.00505782413482666, 0.004837535858154297, 0.005036608219146728, 0.004997119903564453, 0.0048230400085449215, 0.004794367790222168, 0.004790272235870361, 0.004808703899383545, 0.004685823917388916, 0.004650239944458008, 0.004641407966613769, 0.004567391872406006, 0.004597023963928222, 0.004617887973785401, 0.00470911979675293, 0.004612192153930664, 0.004663296222686767, 0.004673855781555176, 0.005121888160705567, 0.004812575817108154, 0.004638271808624268, 0.004618752002716065, 0.004647071838378906, 0.004710239887237549, 0.004673600196838379, 0.004601600170135498, 0.004608384132385254, 0.004746367931365967, 0.005509823799133301, 0.0049214081764221195, 0.0069836158752441405, 0.005617695808410645, 0.006002655982971191, 0.005990399837493897, 0.007858176231384278, 0.00811353588104248, 0.006455423831939697, 0.004780320167541504, 0.0047391681671142575, 0.004847775936126709, 0.005078368186950684, 0.00489740800857544, 0.00501145601272583, 0.0049909758567810054, 0.004988448143005371, 0.00493830394744873, 0.005368768215179443, 0.005006303787231445, 0.004902048110961914, 0.004890783786773682, 0.004897151947021484, 0.004981056213378906, 0.00491321611404419, 0.005703423976898193, 0.004905151844024658, 0.004847328186035157, 0.004905248165130615, 0.004907008171081543, 0.005281792163848877, 0.005034016132354736, 0.004895999908447266, 0.004913055896759033, 0.004924223899841308, 0.004882431983947754, 0.00485811185836792, 0.00475929594039917, 0.0047300481796264645, 0.004761792182922363, 0.0047676157951354985, 0.004813568115234375, 0.004738944053649902, 0.00474124813079834, 0.004747168064117431, 0.004738143920898438, 0.004746240139007568, 0.004765952110290527, 0.004914976119995117, 0.0049008321762084965, 0.004737120151519775, 0.004646815776824951, 0.004777984142303467, 0.004876416206359863, 0.0048652801513671875, 0.004790719985961914, 0.005046239852905273, 0.005009632110595703, 0.004908383846282959, 0.00487286376953125, 0.004990911960601806, 0.004952127933502197, 0.004865183830261231, 0.004865024089813232, 0.0048536000251770016, 0.004842720031738281, 0.004790080070495605, 0.004768735885620117, 0.0048005437850952145, 0.0056626877784729, 0.005396480083465576, 0.004833375930786133, 0.0047758398056030275, 0.00482041597366333, 0.0049301118850708005, 0.004755072116851807, 0.004735360145568848, 0.0046859197616577145, 0.004725791931152344, 0.0047003841400146485, 0.004732704162597657, 0.004739967823028565, 0.004740223884582519, 0.00480460786819458, 0.004847616195678711, 0.004718400001525879, 0.0046917757987976075, 0.004696383953094483, 0.004684927940368652, 0.0046908798217773435, 0.004738272190093994, 0.004722879886627197, 0.004700767993927002, 0.004671135902404785, 0.004700511932373047, 0.004759552001953125, 0.004775743961334228, 0.004712160110473633, 0.004946400165557861, 0.004683775901794434, 0.004676671981811523, 0.004733888149261475, 0.0047964158058166504, 0.0046859197616577145, 0.004697855949401856, 0.004905119895935058, 0.004767744064331054, 0.004715968132019043, 0.004761184215545654, 0.004783232212066651, 0.004793632030487061, 0.004917727947235107, 0.004939968109130859, 0.004840799808502197, 0.004845983982086181, 0.0047842879295349125, 0.004793600082397461, 0.0047767038345336915, 0.004794367790222168, 0.004730175971984863, 0.0046560959815979, 0.004689631938934326, 0.004775872230529785, 0.004648640155792237, 0.0046241598129272464, 0.0046208958625793455, 0.004788224220275879, 0.004869984149932861, 0.004716703891754151, 0.004701824188232422, 0.004675968170166016, 0.004626656055450439, 0.004690080165863037, 0.00469977617263794, 0.004714399814605713, 0.004751455783843994, 0.004745215892791748, 0.004734176158905029, 0.004733727931976318, 0.004719615936279297, 0.0048007678985595706, 0.004747615814208984, 0.004698527812957764, 0.004717984199523926, 0.004673664093017578, 0.004782559871673584, 0.005012288093566895, 0.004951295852661133, 0.004888768196105957, 0.004895296096801758, 0.004799839973449707, 0.004801055908203125, 0.004821152210235596, 0.004816160202026367, 0.004757503986358643, 0.004735680103302002, 0.004700160026550293, 0.00468387222290039, 0.004655007839202881, 0.004683775901794434, 0.004650015830993653, 0.004668384075164795, 0.004704224109649658, 0.004824927806854248, 0.005202335834503174, 0.004855584144592285, 0.004820032119750977, 0.004852896213531494, 0.004716224193572998, 0.00479753589630127, 0.004680704116821289, 0.004679488182067871, 0.004616191864013672, 0.0047597441673278805, 0.004679679870605469, 0.0047422399520874025, 0.004605184078216553, 0.004642464160919189, 0.004611968040466308, 0.004608128070831299, 0.004638976097106934, 0.004934495925903321, 0.0047190718650817875, 0.004690656185150147, 0.004738175868988037, 0.004755199909210205, 0.004802624225616455, 0.004680352210998535, 0.0046835517883300785, 0.004616543769836425, 0.004621568202972412, 0.004580192089080811, 0.004623583793640137, 0.004606112003326416, 0.004662975788116455, 0.004684031963348389, 0.00462611198425293, 0.004593855857849121, 0.004561439990997314, 0.004687871932983399, 0.004682943820953369, 0.004611072063446045, 0.0045669121742248535, 0.004554880142211914, 0.004700160026550293, 0.004805759906768799, 0.004575808048248291, 0.0050629119873046875, 0.004685887813568115, 0.004935776233673096, 0.005407328128814697, 0.004737023830413818, 0.004687871932983399, 0.004689792156219483, 0.004954239845275879, 0.004753407955169678, 0.004700160026550293, 0.0046713600158691405, 0.004655295848846435, 0.004820703983306885, 0.004595935821533203, 0.004705664157867432, 0.004617087841033936, 0.004709727764129638, 0.004650527954101562, 0.00466815996170044, 0.004636127948760987, 0.00477455997467041, 0.004685823917388916, 0.004749311923980713, 0.004849792003631592, 0.004851103782653809, 0.005073376178741455, 0.00482912015914917, 0.004784192085266113, 0.004783584117889404, 0.004888991832733155, 0.004747392177581787, 0.004898240089416504, 0.0047346558570861815, 0.004854015827178955, 0.004854400157928467, 0.004810272216796875, 0.004855679988861084, 0.0050571198463439946, 0.004827199935913086, 0.004853856086730957, 0.0047532482147216795, 0.004775936126708984, 0.004726816177368164, 0.0047103681564331056, 0.004716320037841797, 0.0046778879165649415, 0.0046837120056152344, 0.004700511932373047, 0.004742559909820557, 0.004874527931213379, 0.00486300802230835, 0.004795360088348389, 0.004761792182922363, 0.004718400001525879, 0.004663296222686767, 0.00466534423828125, 0.0046406397819519045, 0.004610176086425781, 0.004757472038269043, 0.0046592321395874025, 0.004591904163360596, 0.004591328144073486, 0.004871551990509033, 0.0050366721153259275, 0.004747263908386231, 0.004507296085357666, 0.004647488117218017, 0.00466102409362793, 0.00464896011352539, 0.0045957121849060055, 0.0046221761703491215, 0.004577504158020019, 0.004554687976837158, 0.004606048107147217, 0.004596767902374267, 0.004590464115142822, 0.004579360008239746, 0.00456496000289917, 0.004576704025268555, 0.004596288204193115, 0.004622335910797119, 0.004605472087860107, 0.004693568229675293, 0.004598688125610351, 0.004643104076385498, 0.0045559039115905765, 0.0045567679405212405, 0.0045246720314025875, 0.004675583839416504, 0.004694015979766846, 0.004643136024475097, 0.004697792053222656, 0.004722784042358399, 0.004918752193450927, 0.00472054386138916, 0.0046926398277282715, 0.004751232147216797, 0.004681375980377198, 0.004671775817871094, 0.004646975994110108, 0.004765696048736572, 0.004915200233459473, 0.00478384017944336, 0.004861408233642578, 0.00495900821685791, 0.004808735847473144, 0.004739071846008301, 0.004726784229278564, 0.004844575881958008, 0.00474019193649292, 0.004876160144805909, 0.0048059201240539555, 0.004817920207977295, 0.004760287761688233, 0.004716703891754151, 0.004649824142456055, 0.004671487808227539, 0.004664959907531738, 0.0046265277862548825, 0.004641215801239013, 0.004624224185943603, 0.004671487808227539, 0.004838912010192871, 0.004694143772125244, 0.004655231952667236, 0.004695680141448975, 0.0046592001914978025, 0.004668032169342041, 0.004745215892791748, 0.004746880054473877, 0.004685791969299316, 0.004649343967437744, 0.004621920108795166, 0.004737760066986084, 0.004644896030426025, 0.004642623901367188, 0.004594751834869385, 0.0047296957969665526, 0.004630527973175049, 0.004615359783172608, 0.004592351913452149, 0.004808767795562744, 0.004673535823822021, 0.004859039783477783, 0.0046377601623535155, 0.0046262722015380855, 0.004749248027801514, 0.00464896011352539, 0.004604959964752197, 0.004668288230895996, 0.004629951953887939, 0.004620960235595703, 0.004691232204437256, 0.004725344181060791, 0.004824319839477539, 0.004782976150512696, 0.004709375858306885, 0.004656032085418701, 0.004632607936859131, 0.004638783931732178, 0.004648896217346191, 0.00466537618637085, 0.004900159835815429, 0.004750048160552979, 0.0048172159194946286, 0.004928927898406982, 0.004848991870880127, 0.004769792079925537, 0.004715456008911133, 0.004739071846008301, 0.0047636480331420894, 0.004885983943939209, 0.00478876781463623, 0.004734975814819336, 0.004738431930541992, 0.0048419198989868165, 0.004962495803833008, 0.00493171215057373, 0.004816703796386719, 0.004750783920288086, 0.004747903823852539, 0.004797984123229981, 0.004795936107635498, 0.004815807819366455, 0.0047680001258850096, 0.004810400009155273, 0.00479036808013916, 0.00475551986694336, 0.004779776096343994, 0.004753344058990479, 0.004718815803527832, 0.005047711849212646, 0.005169407844543457, 0.005032288074493408, 0.004873824119567871, 0.0048475837707519535, 0.004747968196868896, 0.004779391765594483, 0.004904416084289551, 0.004813727855682373, 0.004863935947418213, 0.004735072135925293, 0.0046930241584777835, 0.0046564159393310545, 0.004692736148834228, 0.004759871959686279, 0.004711008071899414, 0.004680831909179687, 0.00467142391204834, 0.004631487846374511, 0.004693312168121338, 0.004628992080688477, 0.004585663795471192, 0.004570720195770264, 0.0046003518104553226, 0.004596896171569824, 0.004598495960235596, 0.004573184013366699, 0.0046179518699646, 0.004722976207733155, 0.004689919948577881, 0.004834400177001953, 0.004668032169342041, 0.004671008110046387, 0.004731679916381836, 0.004677599906921387, 0.004734079837799072, 0.0047420158386230465, 0.0046919679641723635, 0.004703743934631348, 0.004710912227630615, 0.004667679786682129, 0.004633567810058594, 0.0046128640174865725, 0.004605184078216553, 0.004643871784210205, 0.004660031795501709, 0.0046048321723937986, 0.004603231906890869, 0.0045797438621520995, 0.0045872960090637205, 0.0046394882202148435, 0.004647808074951172, 0.004649312019348145, 0.004643328189849853, 0.0046592001914978025, 0.004654367923736572, 0.004626239776611328, 0.0046375679969787595, 0.0046080322265625, 0.0047923197746276855, 0.004611711978912353, 0.004661920070648193, 0.00459875202178955, 0.004661248207092285, 0.004809792041778564, 0.00470143985748291, 0.004744895935058594, 0.00478982400894165, 0.004860352039337159, 0.004757599830627441, 0.004722239971160888, 0.004620192050933838, 0.004610015869140625, 0.004597760200500488, 0.004583903789520264, 0.004649151802062988, 0.00459552001953125, 0.004562943935394287, 0.004563136100769043, 0.0045352959632873535, 0.004606783866882324, 0.0047861762046813965, 0.00460588788986206, 0.004591296195983887, 0.004581535816192627, 0.004607840061187744, 0.004618944168090821, 0.004648799896240234, 0.004857567787170411, 0.004849696159362793, 0.004920639991760254, 0.0047931838035583495, 0.004900928020477295, 0.0050522880554199215, 0.004964352130889893, 0.004990528106689453, 0.004843967914581299, 0.00479804801940918, 0.004719007968902588, 0.004675583839416504, 0.0046976318359375, 0.004690400123596191, 0.0046752958297729495, 0.004888959884643555, 0.004781983852386475, 0.004613120079040528, 0.004599103927612305, 0.004669119834899902, 0.00465715217590332, 0.004622399806976318, 0.004591551780700684, 0.004720352172851563, 0.004577568054199219, 0.004630623817443847, 0.004609248161315918, 0.004601920127868652, 0.004604576110839843, 0.00458739185333252, 0.004577248096466064, 0.004630655765533447, 0.004675456047058105, 0.004720287799835205, 0.004722879886627197, 0.00473526382446289, 0.00476310396194458, 0.00469382381439209, 0.005211647987365723, 0.004792031764984131, 0.004760543823242188, 0.0046358718872070315, 0.004639328002929688, 0.004587647914886475, 0.004663167953491211, 0.004558080196380615, 0.004541183948516846, 0.004528160095214844, 0.004518976211547851, 0.004499551773071289, 0.004493152141571045, 0.004481823921203614, 0.004522175788879395, 0.004501855850219726, 0.004482079982757568, 0.004483712196350098, 0.004460544109344483, 0.004681119918823242, 0.004506207942962646, 0.004517087936401367, 0.004530975818634033, 0.004497407913208008, 0.004582752227783203, 0.004497695922851563, 0.004487232208251953, 0.004495327949523926, 0.004520287990570068, 0.0047185921669006346, 0.004703551769256592, 0.004534880161285401, 0.004697375774383545, 0.004600063800811767, 0.0046343040466308594, 0.004627327919006347, 0.004550111770629883, 0.004676032066345215, 0.004571231842041015, 0.004620543956756592, 0.004581120014190674, 0.004543871879577637, 0.004617887973785401, 0.004665408134460449, 0.004684703826904297, 0.004677055835723877, 0.004692543983459472, 0.0046059517860412595, 0.004591616153717041, 0.004597760200500488, 0.004618239879608154, 0.004583424091339112, 0.0045875201225280765, 0.004564767837524414, 0.004880576133728027, 0.004784416198730468, 0.004697855949401856, 0.004699840068817139, 0.0047168641090393065, 0.004771840095520019, 0.004831232070922851, 0.004754784107208252, 0.004737279891967774, 0.004818016052246093, 0.0049012799263000485, 0.004735648155212402, 0.004697951793670654, 0.0047185921669006346, 0.004878335952758789, 0.004793504238128662, 0.004717408180236816, 0.004734975814819336, 0.004719744205474854, 0.004618656158447265, 0.004633088111877442, 0.004677599906921387, 0.004777279853820801, 0.004651391983032226, 0.004704576015472412, 0.004717631816864014, 0.004864960193634033, 0.004568096160888672, 0.004590784072875977, 0.004566559791564942, 0.0045447678565979, 0.00464896011352539, 0.004550655841827392, 0.004720479965209961, 0.004534399986267089, 0.004579135894775391, 0.004526303768157959, 0.004507199764251709, 0.004501247882843017, 0.004543168067932129, 0.004622335910797119, 0.004802559852600098, 0.004651008129119873, 0.00461187219619751, 0.0045502080917358394, 0.004561567783355713, 0.0045647997856140135, 0.004570432186126709, 0.004704864025115967, 0.004567327976226806, 0.004532224178314209, 0.004519936084747315, 0.004563104152679443, 0.0045658559799194335, 0.004555424213409424, 0.004545919895172119, 0.0046367039680480955, 0.0046921601295471195, 0.004653600215911866, 0.004909279823303222, 0.005089280128479004, 0.005021696090698242, 0.0048261117935180665, 0.0048855037689208985, 0.004852831840515137, 0.004826079845428466, 0.004818880081176758, 0.004778048038482666, 0.004698048114776612, 0.004655360221862793, 0.004606783866882324, 0.004884479999542236, 0.004702688217163086, 0.004956319808959961, 0.004784319877624512, 0.0047552638053894045, 0.004785952091217041, 0.004692192077636719, 0.0046592001914978025, 0.004642816066741944, 0.004658624172210694, 0.004765920162200928, 0.004692319869995117, 0.0047021441459655765, 0.004759903907775879, 0.004732639789581299, 0.004802559852600098, 0.004761600017547608, 0.00480457592010498, 0.0046796159744262695, 0.004698368072509765, 0.004705535888671875, 0.004665952205657959, 0.004753407955169678, 0.0046728959083557125, 0.004653696060180664, 0.004581376075744629, 0.004548672199249267, 0.0047495999336242675, 0.0046291518211364745, 0.004633312225341797, 0.004583680152893067, 0.004592703819274902, 0.0045796799659729, 0.004563583850860596, 0.004612415790557861, 0.004633312225341797, 0.00474121618270874, 0.004635519981384277, 0.0046059517860412595, 0.004601856231689453, 0.004550655841827392, 0.0046061758995056155, 0.004532127857208252, 0.004550528049468994, 0.004552544116973877, 0.004550816059112549, 0.004632575988769531, 0.004632607936859131, 0.004650080204010009, 0.0045396161079406736, 0.0045257282257080075, 0.004743135929107666, 0.0045874881744384765, 0.004634367942810059, 0.004531712055206299, 0.004549439907073975, 0.0045957121849060055, 0.0045015039443969725, 0.004871935844421387, 0.004527552127838135, 0.0045119037628173824, 0.004582047939300537, 0.004527872085571289, 0.004563199996948242, 0.004410943984985352, 0.0046341118812561035, 0.00464518404006958, 0.00459827184677124, 0.0045296640396118165, 0.0046061439514160155, 0.004677631855010986, 0.004559296131134033, 0.004622591972351074, 0.004531968116760254, 0.004694015979766846, 0.004534560203552246, 0.004611135959625244, 0.004690591812133789, 0.004812384128570556, 0.004775424003601075, 0.004745759963989258, 0.004907392024993897, 0.004734623908996582, 0.004799136161804199, 0.004785888195037842, 0.004945888042449951, 0.004889632225036621, 0.004750336170196533, 0.004761151790618897, 0.004710815906524658, 0.0047077441215515135, 0.00464358377456665, 0.004685855865478515, 0.0047010560035705565, 0.004579360008239746, 0.0046312642097473145, 0.004622591972351074, 0.004569024085998535, 0.004623392105102539, 0.004565983772277832, 0.004600959777832031, 0.004556000232696533, 0.004560544013977051, 0.004584799766540528, 0.004558752059936524, 0.004790463924407959, 0.004577856063842773, 0.004561920166015625, 0.004562143802642822, 0.004548480033874511, 0.004623551845550537, 0.004805344104766846, 0.004792031764984131, 0.004716383934020996, 0.005761375904083252, 0.0046490559577941895, 0.00459980821609497, 0.004612351894378662, 0.004565951824188233, 0.004592031955718994, 0.0045385599136352536, 0.004517407894134522, 0.004592319965362549, 0.004726975917816162, 0.004579487800598144, 0.004529151916503906, 0.004567520141601562, 0.004498464107513428, 0.004791168212890625, 0.0046267518997192385, 0.004783008098602295, 0.0047717437744140626, 0.004608992099761963, 0.004633696079254151, 0.004631008148193359, 0.004593696117401123, 0.004669856071472168, 0.004581727981567383, 0.004617023944854737, 0.004592639923095703, 0.00458131217956543, 0.004566592216491699, 0.004772480010986328, 0.004572896003723145, 0.004532320022583008, 0.004523424148559571, 0.004536479949951172, 0.004525472164154052, 0.004538432121276855, 0.004581344127655029, 0.004549088001251221, 0.004534880161285401, 0.004672383785247803, 0.0046724481582641605, 0.0048005437850952145, 0.004793504238128662, 0.004831520080566407, 0.0049519681930541995, 0.0057432641983032225, 0.004973824024200439, 0.004741888046264648, 0.004749279975891113, 0.004702271938323975, 0.004680831909179687, 0.004702527999877929, 0.0046497597694396976, 0.004685056209564209, 0.004622367858886719, 0.004704736232757569, 0.004638336181640625, 0.004837759971618652, 0.00471449613571167, 0.004669151782989502, 0.00477836799621582, 0.004734367847442627, 0.004872704029083252, 0.004925439834594727, 0.004892672061920166, 0.004876287937164306, 0.0047281279563903805, 0.004885183811187744, 0.004878335952758789, 0.005062655925750732, 0.004956160068511963, 0.004846752166748047, 0.004754271984100342, 0.004730239868164062, 0.0047786240577697755, 0.00468393611907959, 0.004713759899139404, 0.00480188798904419, 0.004811423778533936, 0.004710624217987061, 0.004748544216156006, 0.00463318395614624, 0.0047205758094787595, 0.0047638721466064455, 0.004550111770629883, 0.0046039681434631346, 0.004532479763031006, 0.004663008213043213, 0.004491040229797363, 0.004534783840179443, 0.0045298562049865725, 0.0044930558204650875, 0.004613984107971192, 0.004509696006774902, 0.004567232131958008, 0.004563744068145752, 0.004514815807342529, 0.004643743991851807, 0.004564832210540772, 0.004607071876525879, 0.004547327995300293, 0.004550687789916992, 0.004595200061798096, 0.004522560119628906, 0.004652575969696045, 0.004780767917633056, 0.004610015869140625, 0.004591455936431885, 0.004597663879394531, 0.004781407833099365, 0.00478656005859375, 0.0047580480575561525, 0.004659296035766602, 0.004608831882476807, 0.00462886381149292, 0.004583680152893067, 0.004685344219207764, 0.004524320125579834, 0.004558784008026123, 0.004551167964935303, 0.004535391807556153, 0.004543456077575683, 0.00445030403137207, 0.0045322561264038086, 0.004532192230224609, 0.0044906878471374515, 0.004537983894348145, 0.004508607864379883, 0.004515679836273193, 0.00451094388961792, 0.004494272232055664, 0.004737215995788574, 0.004593760013580322, 0.004576992034912109, 0.0045257282257080075, 0.005543360233306885, 0.004665567874908448, 0.004594272136688233, 0.004663392066955567, 0.004579328060150147, 0.0046212158203125, 0.0047961602210998535, 0.004657216072082519, 0.004639200210571289, 0.0046219840049743655, 0.004646912097930909, 0.0047964158058166504, 0.004545567989349365, 0.004542943954467774, 0.004702688217163086, 0.004653247833251953, 0.004641983985900879, 0.00455241584777832, 0.004590528011322021, 0.004620287895202637, 0.004545728206634522, 0.004599967956542969, 0.0045062718391418455, 0.004495071887969971, 0.004504159927368164, 0.004490943908691406, 0.004531968116760254, 0.004507904052734375, 0.004602015972137451, 0.004535711765289306, 0.004637216091156006, 0.00467903995513916, 0.004565120220184326, 0.004594079971313477, 0.004534272193908692, 0.004603903770446777, 0.004736159801483154, 0.004598015785217285, 0.004778751850128174, 0.004654463768005371, 0.0048213119506835935, 0.00464089584350586, 0.004625696182250977, 0.004677440166473389, 0.004611008167266846, 0.004872191905975342, 0.004579328060150147, 0.004638720035552979, 0.004584832191467285, 0.004551487922668457, 0.004623648166656494, 0.0045584959983825685, 0.004590464115142822, 0.004585055828094482, 0.004573599815368652, 0.004671775817871094, 0.004648672103881836, 0.004630047798156739, 0.00455728006362915, 0.0045218877792358395, 0.004622432231903076, 0.0046159358024597166, 0.004671743869781494, 0.004589759826660157, 0.004617824077606201, 0.004588768005371093, 0.004578303813934326, 0.004596864223480225]",tokens/s,211.34453451186968,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,distilbert/distilgpt2,distilbert/distilgpt2,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,811.995136,933.101568,0.0,530.579456,489.934848,s,1,9.5486796875,9.5486796875,0.0,9.5486796875,9.5486796875,9.5486796875,9.5486796875,[9.5486796875],,kWh,2.7303050833343907e-06,2.9394941465236604e-07,9.338896360018556e-07,3.958144133988612e-06,,MB,1175.093248,947.781632,0.0,534.77376,462.069248,s,29,0.32594518280029294,0.011239489062079068,0.0001855605805196185,0.011215423583984375,0.011260230445861816,0.011289996337890625,0.011937617568969727,"[0.012187135696411134, 0.011255040168762206, 0.011234304428100587, 0.011123071670532227, 0.011248543739318847, 0.011249279975891114, 0.011134623527526855, 0.011148256301879883, 0.01124947166442871, 0.011179360389709472, 0.011215423583984375, 0.011107040405273437, 0.01119651222229004, 0.011251008033752442, 0.011210816383361816, 0.011219231605529786, 0.011163552284240723, 0.01118227195739746, 0.011137151718139648, 0.011145312309265137, 0.01124732780456543, 0.011211903572082519, 0.01115884780883789, 0.011207488059997559, 0.011240927696228028, 0.011218208312988281, 0.011295999526977539, 0.011246080398559571, 0.011280991554260255]",tokens/s,22776.836080896137,kWh,3.942231148616733e-07,4.3475665577032865e-08,2.597018121997321e-07,6.974005926384383e-07,tokens/kWh,367077405.29942614,MB,1187.196928,970.850304,0.0,557.842432,462.071808,s,29,9.723527984619139,0.3352940684351427,0.005269531625984053,0.3350185241699219,0.34111163330078126,0.3440031860351562,0.34963032958984375,"[0.351372314453125, 0.3395506591796875, 0.33557107543945314, 0.3355439758300781, 0.3399050598144531, 0.3451509399414063, 0.3366166381835938, 0.3350185241699219, 0.33530569458007814, 0.3307712707519531, 0.33366143798828124, 0.33041580200195314, 0.33661968994140623, 0.329400390625, 0.33085836791992185, 0.3339048767089844, 0.3302677307128906, 0.3284772033691406, 0.32579010009765624, 0.33391058349609376, 0.3344670104980469, 0.3353685302734375, 0.33172637939453126, 0.329951416015625, 0.3343885192871094, 0.33849850463867187, 0.34081915283203124, 0.3379145812988281, 0.34228155517578124]",tokens/s,187.8947644198673,kWh,9.276163314842499e-06,1.0230014920014538e-06,4.409307299348633e-06,1.4708472106192582e-05,tokens/kWh,4283245.706634318,,s,1827,9.711028357505796,0.0053152864573102355,0.0001932222244243295,0.005288959980010987,0.005460492992401123,0.005532880163192749,0.0059897022438049315,"[0.005257215976715088, 0.00535756778717041, 0.005287936210632324, 0.00541593599319458, 0.00537011194229126, 0.005316864013671875, 0.005390848159790039, 0.005606527805328369, 0.0053537278175354005, 0.005341728210449218, 0.00534771203994751, 0.005371615886688232, 0.00523638391494751, 0.0053599681854248045, 0.0052800002098083495, 0.0053777918815612795, 0.005461408138275147, 0.0053961601257324215, 0.005378975868225098, 0.005457824230194092, 0.005424575805664062, 0.0054421119689941405, 0.005367616176605225, 0.00534876823425293, 0.005392799854278565, 0.00550377607345581, 0.005438528060913086, 0.005443424224853516, 0.005540575981140137, 0.005497280120849609, 0.005587615966796875, 0.0055083842277526855, 0.005416160106658936, 0.005410912036895752, 0.005395135879516602, 0.005395936012268066, 0.00534281587600708, 0.005445824146270752, 0.005630591869354248, 0.005472064018249512, 0.00560038423538208, 0.005440063953399658, 0.005511168003082275, 0.005534016132354737, 0.005504255771636963, 0.00565119981765747, 0.005670368194580078, 0.0054605121612548825, 0.0055352959632873535, 0.005419456005096436, 0.0054618239402770994, 0.005447072029113769, 0.006446080207824707, 0.0073367681503295895, 0.007031712055206299, 0.007058720111846924, 0.007221375942230225, 0.005696159839630127, 0.005482687950134278, 0.0054271998405456545, 0.005860735893249511, 0.005367392063140869, 0.005391200065612793, 0.005273600101470947, 0.005400896072387695, 0.005439167976379395, 0.00536294412612915, 0.005308320045471191, 0.005388383865356445, 0.005229311943054199, 0.0052285118103027346, 0.005367839813232422, 0.005326848030090332, 0.005257215976715088, 0.005292384147644043, 0.005226016044616699, 0.005246175765991211, 0.005314815998077393, 0.005278687953948974, 0.005338816165924072, 0.005273600101470947, 0.005283648014068603, 0.005249216079711914, 0.005364831924438477, 0.005335968017578125, 0.005322751998901368, 0.005312511920928955, 0.00532908821105957, 0.0053266558647155765, 0.0053309440612792965, 0.005283840179443359, 0.0052674560546875, 0.005345280170440674, 0.005349376201629639, 0.005392384052276611, 0.005349535942077637, 0.005426400184631348, 0.005314847946166992, 0.005390431880950928, 0.005480639934539795, 0.0055071358680725095, 0.005643775939941406, 0.0054048638343811035, 0.005372223854064942, 0.00546611213684082, 0.005429247856140137, 0.005453824043273926, 0.005386240005493164, 0.005404672145843506, 0.005402624130249023, 0.0054579200744628905, 0.005459968090057373, 0.005506559848785401, 0.005470816135406494, 0.005509119987487793, 0.005476255893707275, 0.005445631980895996, 0.005537792205810547, 0.005410912036895752, 0.005461919784545899, 0.005523392200469971, 0.00542460823059082, 0.005552127838134766, 0.005458111763000488, 0.0054225921630859375, 0.0053994240760803225, 0.005268640041351318, 0.005366752147674561, 0.005317567825317383, 0.005422016143798828, 0.005302271842956543, 0.005345280170440674, 0.005287936210632324, 0.005296127796173096, 0.005265408039093018, 0.0053534722328186036, 0.005253215789794922, 0.005295392036437988, 0.00529472017288208, 0.005273920059204101, 0.005168799877166748, 0.005233856201171875, 0.005186592102050781, 0.005193535804748535, 0.005238368034362793, 0.005317024230957031, 0.005339263916015625, 0.005304192066192627, 0.005359615802764893, 0.005219967842102051, 0.005212543964385986, 0.005318816184997559, 0.0052503042221069334, 0.005263008117675781, 0.005377024173736572, 0.005484479904174805, 0.005336991786956787, 0.0053842878341674805, 0.005325856208801269, 0.00547324800491333, 0.005464064121246338, 0.005426815986633301, 0.005392767906188965, 0.0053678078651428224, 0.005384191989898681, 0.005316768169403076, 0.00537724781036377, 0.005281888008117676, 0.0054973759651184085, 0.005347040176391602, 0.0053281598091125485, 0.005368832111358642, 0.005306367874145508, 0.005336544036865234, 0.005360159873962402, 0.005269504070281982, 0.005257343769073487, 0.005363584041595459, 0.005269504070281982, 0.0052367358207702636, 0.0052674880027771, 0.005488607883453369, 0.005283648014068603, 0.005443935871124268, 0.005279583930969238, 0.005302271842956543, 0.005394176006317138, 0.005261280059814453, 0.005202015876770019, 0.005076767921447754, 0.005263967990875244, 0.005232096195220947, 0.005184415817260742, 0.005203968048095703, 0.005220511913299561, 0.005252031803131103, 0.00529699182510376, 0.00521017599105835, 0.005202015876770019, 0.005191584110260009, 0.005277696132659912, 0.005226047992706298, 0.0052576642036437984, 0.005234687805175781, 0.005281311988830566, 0.005425824165344238, 0.005433152198791504, 0.005396192073822022, 0.0053864002227783205, 0.005412576198577881, 0.005564832210540772, 0.005484543800354004, 0.00555404806137085, 0.005402495861053467, 0.005414783954620361, 0.005327040195465088, 0.005284319877624512, 0.005353184223175049, 0.005344511985778809, 0.005358335971832276, 0.005375904083251953, 0.005281888008117676, 0.005306367874145508, 0.005408768177032471, 0.00533296012878418, 0.005283040046691894, 0.005276480197906494, 0.005301631927490234, 0.005260255813598633, 0.005424895763397217, 0.005265312194824219, 0.005279551982879638, 0.005304736137390137, 0.0053796801567077635, 0.005333183765411377, 0.0053465600013732914, 0.005206111907958984, 0.005218976020812989, 0.005275648117065429, 0.005246431827545166, 0.005245312213897705, 0.005249216079711914, 0.005408160209655762, 0.0053393921852111816, 0.005409088134765625, 0.0053673601150512696, 0.005449952125549317, 0.005343167781829834, 0.005327136039733887, 0.005312511920928955, 0.005404672145843506, 0.00535481595993042, 0.005176703929901123, 0.005290592193603515, 0.005294079780578613, 0.00517302417755127, 0.005214367866516113, 0.00567471981048584, 0.005441664218902588, 0.005357247829437256, 0.0054113597869873045, 0.0053137922286987304, 0.005378367900848388, 0.005439807891845703, 0.005466303825378418, 0.00551091194152832, 0.0054618558883666995, 0.005413216114044189, 0.005447679996490478, 0.005429247856140137, 0.0053821439743042, 0.005429247856140137, 0.005449056148529053, 0.0053901119232177734, 0.005450143814086914, 0.005359903812408447, 0.005353407859802246, 0.005419616222381592, 0.005401311874389648, 0.00546233606338501, 0.005440127849578857, 0.0055502400398254395, 0.005529088020324707, 0.0054336957931518555, 0.005391808032989502, 0.005537471771240234, 0.0054362878799438475, 0.005447679996490478, 0.005363423824310303, 0.005308703899383545, 0.00528380823135376, 0.005543424129486084, 0.005462560176849365, 0.005422912120819092, 0.00530617618560791, 0.005286272048950195, 0.005285568237304687, 0.005407040119171143, 0.005362783908843994, 0.005425888061523438, 0.005366015911102295, 0.005308159828186035, 0.005307680130004883, 0.005301152229309082, 0.005314559936523438, 0.005462016105651855, 0.005392384052276611, 0.005303647994995117, 0.005361631870269775, 0.005327040195465088, 0.005340799808502197, 0.005353792190551758, 0.005347008228302002, 0.005370495796203613, 0.005392064094543457, 0.0053613119125366215, 0.005392127990722656, 0.005429855823516846, 0.005419007778167725, 0.005437312126159668, 0.005404799938201904, 0.0053678078651428224, 0.005408576011657715, 0.0054661760330200196, 0.005353600025177002, 0.005315936088562011, 0.0054624958038330075, 0.005351615905761719, 0.005378047943115235, 0.005292031764984131, 0.005289824008941651, 0.005314720153808594, 0.005369184017181397, 0.0052947521209716795, 0.005288959980010987, 0.005346240043640137, 0.005277760028839111, 0.005263423919677735, 0.005337024211883545, 0.005274911880493164, 0.005321343898773193, 0.005451263904571534, 0.005398303985595703, 0.005351967811584472, 0.005316864013671875, 0.005650464057922363, 0.006758399963378906, 0.006215072154998779, 0.008266336441040039, 0.00716377592086792, 0.005437568187713623, 0.005381279945373535, 0.005358016014099121, 0.00536956787109375, 0.005257952213287354, 0.005303487777709961, 0.005376768112182617, 0.005305632114410401, 0.005386271953582764, 0.00546070384979248, 0.005400383949279785, 0.005376192092895507, 0.0054150400161743165, 0.0053105602264404295, 0.005265183925628662, 0.005375999927520752, 0.0053002238273620605, 0.005281792163848877, 0.005303647994995117, 0.00533135986328125, 0.005302591800689697, 0.005412511825561524, 0.005314847946166992, 0.005447231769561768, 0.005388415813446045, 0.00533897590637207, 0.005337855815887451, 0.005383232116699219, 0.005201888084411621, 0.0053637118339538575, 0.005390175819396973, 0.005375711917877197, 0.005375584125518799, 0.005411680221557617, 0.005265408039093018, 0.005260992050170899, 0.0053536958694458, 0.005336639881134034, 0.005405216217041015, 0.005381696224212647, 0.0054514241218566895, 0.005341119766235351, 0.005294943809509277, 0.005193088054656982, 0.0052839360237121585, 0.005376543998718262, 0.005326528072357178, 0.00520627212524414, 0.005275807857513428, 0.005197728157043457, 0.005267199993133545, 0.005488992214202881, 0.005291584014892578, 0.0052432317733764645, 0.005332128047943115, 0.005277567863464355, 0.005282688140869141, 0.005288032054901123, 0.005654528141021729, 0.005326047897338867, 0.005634463787078857, 0.005992447853088379, 0.005744736194610595, 0.005295519828796387, 0.005186431884765625, 0.005226719856262207, 0.005202816009521484, 0.00521507215499878, 0.005261375904083252, 0.005260704040527343, 0.005251039981842041, 0.005419104099273681, 0.005341663837432862, 0.005402751922607422, 0.005422111988067627, 0.005700511932373047, 0.005398528099060058, 0.005371903896331787, 0.005269504070281982, 0.0053002238273620605, 0.005208320140838623, 0.005216063976287842, 0.0052754878997802734, 0.005230239868164063, 0.005175648212432861, 0.005165023803710938, 0.0051773757934570315, 0.005324160099029541, 0.005245183944702149, 0.005310080051422119, 0.005440351963043213, 0.005328320026397705, 0.005421855926513672, 0.005371903896331787, 0.005340447902679443, 0.005324543952941895, 0.005426015853881836, 0.005257343769073487, 0.005221407890319824, 0.005199135780334473, 0.00522547197341919, 0.005219007968902588, 0.005293407917022705, 0.005241375923156738, 0.005296256065368652, 0.0051998720169067385, 0.0051831998825073245, 0.005144159793853759, 0.005182144165039063, 0.005177472114562988, 0.005275743961334229, 0.005424736022949218, 0.005252799987792969, 0.005181888103485108, 0.005220384120941162, 0.005257247924804687, 0.0052276158332824705, 0.005164959907531738, 0.005104640007019043, 0.005162528038024902, 0.005093855857849121, 0.0051404800415039064, 0.005175295829772949, 0.005253119945526123, 0.005201920032501221, 0.005469183921813964, 0.00591158390045166, 0.005593056201934814, 0.005511168003082275, 0.005705728054046631, 0.005452896118164062, 0.005624512195587158, 0.005584671974182129, 0.005489088058471679, 0.005341184139251709, 0.005251039981842041, 0.005338912010192871, 0.005321983814239502, 0.005211232185363769, 0.0052815361022949215, 0.005263487815856934, 0.005195807933807373, 0.005265312194824219, 0.005154911994934082, 0.00517091178894043, 0.005253407955169678, 0.005356800079345703, 0.005312416076660156, 0.005508959770202637, 0.005302656173706055, 0.005440447807312012, 0.005228223800659179, 0.005377888202667236, 0.005390495777130127, 0.005287487983703613, 0.00534991979598999, 0.005261472225189209, 0.005219935894012451, 0.005145088195800781, 0.005152224063873291, 0.0051470079421997075, 0.005211775779724121, 0.00516480016708374, 0.005179872035980224, 0.0051485438346862795, 0.005269792079925537, 0.006273024082183838, 0.005276800155639649, 0.005204864025115967, 0.005150720119476319, 0.005156864166259765, 0.005178719997406006, 0.005249152183532715, 0.005229087829589843, 0.005272672176361084, 0.005247903823852539, 0.005283840179443359, 0.00527942419052124, 0.005281184196472168, 0.005853280067443848, 0.0054522562026977535, 0.005255231857299805, 0.005260735988616943, 0.00522057580947876, 0.005263296127319336, 0.005255263805389404, 0.005392992019653321, 0.00524729585647583, 0.005275328159332276, 0.005230559825897217, 0.0052285118103027346, 0.005203199863433838, 0.005296127796173096, 0.005280576229095459, 0.005318751811981201, 0.0054061121940612794, 0.005321216106414795, 0.005273600101470947, 0.0053285441398620605, 0.005357920169830322, 0.005347328186035156, 0.0053309440612792965, 0.005232639789581299, 0.005294079780578613, 0.0053002238273620605, 0.0053508801460266115, 0.0052986879348754885, 0.0053366079330444335, 0.0053324799537658694, 0.005374847888946533, 0.005386367797851562, 0.005420896053314209, 0.005299615859985352, 0.005481056213378906, 0.0056137280464172365, 0.005590176105499268, 0.005501791954040527, 0.005197855949401855, 0.005386335849761963, 0.005473504066467285, 0.005310688018798828, 0.0052820158004760745, 0.0052082881927490235, 0.00516864013671875, 0.0051717119216918945, 0.005203968048095703, 0.005251071929931641, 0.005182464122772217, 0.005268479824066162, 0.005154816150665284, 0.00517033576965332, 0.005163167953491211, 0.005143231868743896, 0.005107583999633789, 0.005116032123565674, 0.005111616134643554, 0.005100768089294433, 0.005074912071228027, 0.00509449577331543, 0.005226399898529053, 0.005126143932342529, 0.005275680065155029, 0.0052551040649414064, 0.005367839813232422, 0.005232639789581299, 0.00518723201751709, 0.005194079875946045, 0.005144576072692871, 0.0051404800415039064, 0.005279744148254394, 0.005275680065155029, 0.005276735782623291, 0.0052551040649414064, 0.005231200218200683, 0.005187744140625, 0.005873951911926269, 0.0052221441268920895, 0.005255519866943359, 0.005289824008941651, 0.005204991817474365, 0.005209375858306884, 0.005224160194396973, 0.005214208126068115, 0.005314432144165039, 0.005281919956207275, 0.005187327861785889, 0.005202112197875977, 0.005240896224975586, 0.0052091522216796875, 0.005145535945892334, 0.005165056228637695, 0.005287936210632324, 0.005153088092803955, 0.005299647808074951, 0.005291679859161377, 0.0051651840209960935, 0.0051712322235107425, 0.005849088191986084, 0.0054131197929382325, 0.0054048638343811035, 0.0053320322036743166, 0.005437535762786865, 0.0053439679145812985, 0.005470431804656982, 0.005324639797210693, 0.00536681604385376, 0.0053933758735656735, 0.005359615802764893, 0.0052899842262268066, 0.005389408111572265, 0.005342112064361572, 0.00532480001449585, 0.005351424217224121, 0.005379936218261719, 0.005268703937530517, 0.0053216638565063475, 0.005441184043884277, 0.00548038387298584, 0.005388544082641601, 0.0053838081359863284, 0.005367455959320068, 0.005462560176849365, 0.005373600006103515, 0.005402847766876221, 0.0053968319892883304, 0.0053146882057189945, 0.005314559936523438, 0.0052501440048217775, 0.005241759777069092, 0.005329984188079834, 0.005363743782043457, 0.005415359973907471, 0.005433824062347412, 0.0053311681747436526, 0.00531660795211792, 0.00528166389465332, 0.005229663848876953, 0.005217088222503662, 0.005275648117065429, 0.005201920032501221, 0.005211520195007324, 0.005106143951416016, 0.005183648109436035, 0.005111807823181152, 0.0051099519729614256, 0.005238592147827148, 0.005190976142883301, 0.005147264003753662, 0.005107264041900635, 0.0051942400932312015, 0.005185535907745361, 0.005232255935668945, 0.00519820785522461, 0.005343232154846191, 0.005146624088287354, 0.005195583820343018, 0.0052715520858764645, 0.005257247924804687, 0.005193952083587646, 0.005152575969696045, 0.005176640033721923, 0.005194655895233154, 0.005236639976501465, 0.005128608226776123, 0.005260863780975342, 0.005165503978729248, 0.005197120189666748, 0.005144927978515625, 0.00518998384475708, 0.005197184085845947, 0.005208384037017822, 0.005158432006835937, 0.005185344219207764, 0.005179423809051514, 0.005172160148620606, 0.0053678078651428224, 0.005300352096557617, 0.005307680130004883, 0.005273952007293701, 0.00529641580581665, 0.0051439361572265625, 0.005143360137939453, 0.0051951041221618656, 0.005271743774414062, 0.005109024047851562, 0.005189792156219482, 0.005159743785858154, 0.005197760105133057, 0.005285952091217041, 0.00532480001449585, 0.005189631938934326, 0.005210112094879151, 0.005244927883148193, 0.005144576072692871, 0.0051848959922790525, 0.005169792175292969, 0.005215871810913086, 0.0053864002227783205, 0.005445856094360352, 0.005244927883148193, 0.005320608139038086, 0.0053937921524047855, 0.0054358081817626955, 0.005335231781005859, 0.005408895969390869, 0.005301536083221436, 0.005229279994964599, 0.005250527858734131, 0.005297696113586426, 0.0052008957862854, 0.005148575782775879, 0.005256480216979981, 0.005169280052185058, 0.0051719040870666505, 0.005158432006835937, 0.005122047901153564, 0.005156608104705811, 0.005300000190734864, 0.0053482880592346195, 0.005294432163238525, 0.005381855964660644, 0.005218111991882325, 0.005176799774169922, 0.0052005438804626465, 0.005230591773986816, 0.005326848030090332, 0.005152736186981201, 0.005261280059814453, 0.005360064029693604, 0.005286399841308594, 0.005246975898742676, 0.005263232231140137, 0.005273407936096192, 0.00524729585647583, 0.005306367874145508, 0.0051560959815979005, 0.005176064014434815, 0.005266687870025635, 0.005257408142089844, 0.0052106881141662596, 0.005293439865112304, 0.005265600204467773, 0.0053448319435119625, 0.005382527828216552, 0.005323520183563232, 0.005316351890563965, 0.005349376201629639, 0.005269504070281982, 0.0052401599884033204, 0.00523740816116333, 0.005261312007904053, 0.005276671886444092, 0.00528227186203003, 0.005445759773254394, 0.005290400028228759, 0.005285888195037842, 0.0052592320442199705, 0.005234687805175781, 0.005195807933807373, 0.005243936061859131, 0.005147168159484863, 0.0056501121520996095, 0.005206399917602539, 0.005161344051361084, 0.0051296639442443845, 0.005224160194396973, 0.005180255889892578, 0.005179391860961914, 0.005828351974487305, 0.006453504085540771, 0.006078464031219482, 0.005867775917053223, 0.005389535903930664, 0.0052864961624145505, 0.005215807914733887, 0.005217728137969971, 0.005317567825317383, 0.0053002238273620605, 0.005314911842346191, 0.005145823955535889, 0.005295743942260742, 0.005337920188903809, 0.005316544055938721, 0.005331103801727295, 0.005293983936309815, 0.0054839677810668945, 0.005468736171722412, 0.00556819200515747, 0.005460288047790527, 0.005151584148406983, 0.005372064113616943, 0.005236703872680664, 0.005290016174316407, 0.0053309440612792965, 0.00537395191192627, 0.005314559936523438, 0.0055316481590271, 0.005375520229339599, 0.005271967887878418, 0.0052388482093811035, 0.005204192161560059, 0.0051992001533508305, 0.005218751907348633, 0.005228352069854736, 0.005120192050933838, 0.005145984172821045, 0.005141119956970214, 0.005180863857269287, 0.005278048038482666, 0.005273824214935303, 0.005166816234588623, 0.005196159839630127, 0.005246880054473877, 0.005234591960906983, 0.005337183952331543, 0.005228544235229492, 0.005250175952911377, 0.005274496078491211, 0.005222335815429687, 0.005258463859558106, 0.0052048320770263674, 0.0051710400581359865, 0.005275807857513428, 0.00528988790512085, 0.005277791976928711, 0.005310463905334473, 0.005230591773986816, 0.005207615852355957, 0.0051504640579223635, 0.005209951877593994, 0.005181759834289551, 0.005106080055236817, 0.005148928165435791, 0.00511568021774292, 0.005105279922485352, 0.005116096019744873, 0.005318943977355957, 0.005139743804931641, 0.005100255966186524, 0.005169151782989502, 0.005265376091003418, 0.00525110387802124, 0.005148672103881836, 0.005201087951660156, 0.005161312103271485, 0.005204448223114014, 0.005253312110900879, 0.005145599842071533, 0.005138495922088623, 0.0051288318634033206, 0.005230720043182373, 0.005183487892150879, 0.005196095943450928, 0.005419680118560791, 0.005191584110260009, 0.005131487846374512, 0.005190176010131836, 0.005150176048278808, 0.005255392074584961, 0.005187615871429443, 0.0051959362030029295, 0.005140799999237061, 0.005105728149414062, 0.00516096019744873, 0.005090303897857666, 0.005387263774871826, 0.005154719829559326, 0.005213823795318604, 0.005091807842254639, 0.005119999885559082, 0.005236544132232666, 0.005232831954956055, 0.005201920032501221, 0.005265376091003418, 0.005191711902618408, 0.005261184215545655, 0.005254848003387451, 0.005210559844970703, 0.005165056228637695, 0.005216159820556641, 0.005232736110687256, 0.005179391860961914, 0.005140639781951904, 0.0052091522216796875, 0.005216159820556641, 0.0052130560874938964, 0.005406335830688476, 0.0054316802024841305, 0.005397727966308594, 0.005446623802185058, 0.005410592079162598, 0.005302303791046142, 0.005353663921356201, 0.0052789440155029295, 0.005349408149719239, 0.005353759765625, 0.005291552066802979, 0.005241600036621094, 0.0052977919578552244, 0.005251455783843994, 0.005217631816864013, 0.005350368022918701, 0.005367104053497314, 0.0052432317733764645, 0.0052674880027771, 0.005250912189483642, 0.005204127788543701, 0.005250463962554932, 0.005304927825927734, 0.005230016231536865, 0.0052700481414794925, 0.005296160221099853, 0.0052144317626953126, 0.005221248149871826, 0.005194655895233154, 0.0051765117645263675, 0.005321568012237549, 0.005312448024749756, 0.005285888195037842, 0.005259263992309571, 0.005312479972839355, 0.005252448081970215, 0.005308256149291992, 0.005806464195251465, 0.005436960220336914, 0.005311423778533935, 0.006332448005676269, 0.006514976024627686, 0.0053942399024963375, 0.005267327785491944, 0.005275648117065429, 0.005281792163848877, 0.005219327926635742, 0.0051896958351135255, 0.0052700481414794925, 0.005170944213867187, 0.005173376083374023, 0.005167263984680176, 0.005153151988983154, 0.005132160186767578, 0.0051651840209960935, 0.0050709757804870605, 0.005262368202209473, 0.005243296146392822, 0.005218751907348633, 0.005156352043151856, 0.005239264011383056, 0.00536188793182373, 0.0052510080337524415, 0.005358687877655029, 0.006160448074340821, 0.005308896064758301, 0.005218560218811035, 0.005244927883148193, 0.0051998720169067385, 0.005179200172424316, 0.005200160026550293, 0.005175199985504151, 0.005148032188415527, 0.00523638391494751, 0.005141439914703369, 0.005185567855834961, 0.00514576005935669, 0.005195712089538574, 0.005212543964385986, 0.0051164479255676265, 0.005265408039093018, 0.005141791820526123, 0.005145311832427979, 0.0051404800415039064, 0.00522438383102417, 0.005226272106170654, 0.005221727848052978, 0.005364672183990478, 0.0053474240303039555, 0.005312416076660156, 0.0053002238273620605, 0.005171199798583984, 0.005158912181854248, 0.005227519989013672, 0.005191872119903565, 0.006351071834564209, 0.0052230081558227535, 0.0051374402046203615, 0.0051058239936828615, 0.005130688190460205, 0.005116288185119629, 0.005127744197845459, 0.005118400096893311, 0.005170944213867187, 0.005112063884735107, 0.005121952056884766, 0.005167200088500977, 0.005201920032501221, 0.005222400188446045, 0.005159039974212646, 0.005218431949615478, 0.005292895793914795, 0.0054624958038330075, 0.005338784217834473, 0.00521017599105835, 0.005239744186401368, 0.005257184028625488, 0.00515667200088501, 0.005128191947937012, 0.005146624088287354, 0.005127808094024658, 0.005109504222869873, 0.0051708478927612305, 0.0052804799079895015, 0.0051857919692993165, 0.005178847789764404, 0.0051799359321594235, 0.005210400104522705, 0.005175007820129395, 0.0052367358207702636, 0.005191679954528809, 0.0051404800415039064, 0.005132287979125977, 0.005261023998260498, 0.005539167881011963, 0.00534009599685669, 0.005343232154846191, 0.005270815849304199, 0.00520851182937622, 0.0052975997924804685, 0.005243743896484375, 0.0053534722328186036, 0.005339136123657226, 0.005257215976715088, 0.005261248111724854, 0.005398591995239258, 0.005307744026184082, 0.005206687927246094, 0.005212160110473632, 0.0051623997688293455, 0.00516096019744873, 0.005161056041717529, 0.005216095924377441, 0.005179615974426269, 0.005235136032104492, 0.005350944042205811, 0.005410816192626953, 0.00528985595703125, 0.005380256175994873, 0.00533465576171875, 0.005278016090393066, 0.005234752178192139, 0.005248703956604004, 0.005207007884979248, 0.0053981437683105465, 0.005318272113800049, 0.005249536037445069, 0.005271071910858154, 0.005229055881500244, 0.005228544235229492, 0.005105663776397705, 0.005165152072906494, 0.005070752143859864, 0.005064703941345215, 0.005089568138122559, 0.005101247787475586, 0.005076576232910157, 0.005107935905456543, 0.005191391944885254, 0.005171455860137939, 0.00510591983795166, 0.005082367897033691, 0.00512281608581543, 0.005148672103881836, 0.005173247814178467, 0.005170720100402832, 0.005145055770874023, 0.005148672103881836, 0.005824543952941894, 0.005637472152709961, 0.005229087829589843, 0.005147840023040771, 0.005235616207122803, 0.005130239963531494, 0.005079040050506592, 0.005085184097290039, 0.005113759994506836, 0.005086688041687012, 0.0051943678855896, 0.005179391860961914, 0.0051827201843261715, 0.005209983825683594, 0.005294496059417724, 0.0052939200401306155, 0.00529472017288208, 0.0053639039993286135, 0.005281599998474121, 0.005230463981628418, 0.005222432136535645, 0.005181375980377197, 0.00521833610534668, 0.0052055039405822755, 0.005132959842681885, 0.005171167850494385, 0.005090432167053223, 0.005065599918365479, 0.00504585599899292, 0.0050438718795776365, 0.005258207798004151, 0.005353343963623047, 0.00523686408996582, 0.005197824001312256, 0.0052367358207702636, 0.005230080127716064, 0.0052408318519592285, 0.005175583839416504, 0.0050438718795776365, 0.005122623920440674, 0.005089280128479004, 0.005117951869964599, 0.0053116798400878905, 0.005312704086303711, 0.00530291223526001, 0.0052367358207702636, 0.005240479946136475, 0.005314879894256592, 0.005255199909210205, 0.005262720108032227, 0.005196447849273682, 0.0051298880577087405, 0.0050832958221435545, 0.0050566082000732425, 0.005078176021575928, 0.00504310417175293, 0.005027743816375732, 0.005082880020141602, 0.00519817590713501, 0.005240543842315674, 0.00506879997253418, 0.00514086389541626, 0.005086719989776611, 0.005091008186340332, 0.005158976078033447, 0.005218239784240722, 0.005187424182891845, 0.0051814718246459964, 0.005349855899810791, 0.005167488098144531, 0.0052899842262268066, 0.0052674560546875, 0.00515664005279541, 0.005140031814575195, 0.0051636800765991215, 0.005130239963531494, 0.0051933760643005375, 0.005058784008026123, 0.0051979198455810545, 0.0050503997802734376, 0.005081439971923828, 0.005061344146728515, 0.00510368013381958, 0.005038015842437744, 0.005079552173614502, 0.005065343856811524, 0.0052938880920410155, 0.005145792007446289, 0.0050841598510742185, 0.005113503932952881, 0.0050564160346984865, 0.00515667200088501, 0.0051634559631347655, 0.005395040035247802, 0.0053777918815612795, 0.0052882561683654785, 0.0052733120918273925, 0.005139999866485596, 0.005122752189636231, 0.005156320095062256, 0.005185632228851318, 0.005159520149230957, 0.005172255992889404, 0.005197984218597412, 0.005150400161743164, 0.005108704090118408, 0.005167232036590576, 0.005128064155578613, 0.005139776229858398, 0.005151423931121826, 0.0053146882057189945, 0.005193568229675293, 0.005289087772369385, 0.005346208095550537, 0.005212160110473632, 0.0052633600234985355, 0.005352928161621094, 0.005489183902740479, 0.005361216068267822, 0.0053367681503295895, 0.0052846078872680665, 0.005306367874145508, 0.005435328006744385, 0.0054002881050109865, 0.005415264129638672, 0.005318655967712403, 0.005346496105194092, 0.005385216236114502, 0.005359615802764893, 0.005311615943908691, 0.005238592147827148, 0.005299071788787842, 0.005170688152313232, 0.005107391834259034, 0.005413695812225342, 0.005350783824920654, 0.005171807765960693, 0.005357503890991211, 0.0052486081123352055, 0.005127776145935059, 0.005360544204711914, 0.005841152191162109, 0.005682943820953369, 0.005331103801727295, 0.00521776008605957, 0.0051773757934570315, 0.005274015903472901, 0.005298111915588379, 0.005272831916809082, 0.005331711769104004, 0.005254367828369141, 0.0053396477699279785, 0.00540496015548706, 0.005398528099060058, 0.0053432960510253905, 0.005435296058654785, 0.0051792640686035155, 0.005306496143341065, 0.005257376194000244, 0.005586368083953857, 0.00523638391494751, 0.005391104221343994, 0.005175295829772949, 0.0051684479713439944, 0.0052841281890869144, 0.005185664176940918, 0.005190176010131836, 0.0051363840103149415, 0.00512505578994751, 0.005172095775604248, 0.00517523193359375, 0.0052715520858764645, 0.00523203182220459, 0.005187456130981445, 0.005245759963989258, 0.005359519958496093, 0.005646527767181397, 0.006288479804992675, 0.005474239826202393, 0.005421855926513672, 0.005275648117065429, 0.005453824043273926, 0.005402624130249023, 0.005312511920928955, 0.005287295818328858, 0.005358208179473877, 0.005292031764984131, 0.005238111972808838, 0.005288608074188232, 0.00514796781539917, 0.0051452798843383786, 0.005269760131835937, 0.005303264141082764, 0.00527235221862793, 0.005281792163848877, 0.0054744000434875486, 0.005354944229125976, 0.005418975830078125, 0.005306879997253418, 0.0053981437683105465, 0.00533951997756958, 0.005378047943115235, 0.005223584175109863, 0.005231232166290283, 0.005215712070465088, 0.005270271778106689, 0.005298431873321533, 0.005466879844665528, 0.005315584182739258, 0.005400576114654541, 0.005247039794921875, 0.0052072639465332035, 0.0051979198455810545, 0.005437920093536377, 0.005176544189453125, 0.005145535945892334, 0.0052239041328430175, 0.0052024641036987306, 0.00512172794342041, 0.005294303894042968, 0.00533951997756958, 0.005241824150085449, 0.0052518720626831055, 0.005359615802764893, 0.005206208229064941, 0.005218111991882325, 0.005310080051422119, 0.005220960140228272, 0.005348703861236572, 0.005235040187835693, 0.0052185277938842775, 0.005711264133453369, 0.005337600231170654, 0.005214176177978516, 0.005167103767395019, 0.005256576061248779, 0.005202144145965576, 0.005202335834503174, 0.005171199798583984, 0.005386240005493164, 0.005201920032501221, 0.005193727970123291, 0.005244128227233887, 0.005288735866546631, 0.0053311362266540525, 0.0052483839988708494, 0.005175392150878907, 0.0051244478225708, 0.0052566719055175785, 0.005208608150482178, 0.005269279956817627, 0.005433472156524658, 0.005210207939147949, 0.0053678078651428224, 0.005302271842956543, 0.0053556480407714845, 0.005500319957733154, 0.005347360134124756, 0.005202367782592774, 0.005251359939575195, 0.005341184139251709, 0.005424255847930908, 0.005433792114257812, 0.005349535942077637, 0.0052912960052490235, 0.005340127944946289, 0.005356927871704101, 0.005421472072601318, 0.005414048194885254, 0.005396575927734375, 0.005374144077301026, 0.005321023941040039, 0.005359839916229248, 0.005494175910949707, 0.005323359966278076, 0.005375999927520752, 0.005341184139251709, 0.005605375766754151, 0.005347328186035156, 0.005414912223815918, 0.005464064121246338, 0.005341472148895264, 0.005214144229888916, 0.005351424217224121, 0.005338399887084961, 0.005313248157501221, 0.005299263954162597, 0.005331935882568359, 0.005287903785705567, 0.0053944320678710935, 0.005294079780578613, 0.005309919834136963, 0.005245183944702149, 0.005331232070922852, 0.005228127956390381, 0.005402719974517823, 0.005349696159362793, 0.005385600090026856, 0.005347775936126709, 0.0054683518409729005, 0.005238016128540039, 0.005239552021026612, 0.00532480001449585, 0.005257279872894287, 0.005196735858917236, 0.005232672214508057, 0.0052267518043518065, 0.005397215843200683, 0.0053350400924682614, 0.005190656185150146, 0.005171584129333496, 0.005214752197265625, 0.005406239986419678, 0.005413440227508545, 0.005380064010620117, 0.005281824111938477, 0.00522649621963501, 0.00530025577545166, 0.005279712200164795, 0.005293536186218262, 0.005261856079101563, 0.005311776161193848, 0.005194111824035645, 0.005220831871032715, 0.005216000080108643, 0.005148799896240235, 0.005119999885559082, 0.0051560959815979005, 0.00517196798324585, 0.005287936210632324, 0.005276735782623291, 0.005249983787536621, 0.0051998720169067385, 0.005167103767395019, 0.005206016063690186, 0.005146048069000244, 0.005167232036590576, 0.005273215770721436, 0.0051448321342468266, 0.005146944046020508, 0.005144959926605225, 0.005144192218780518, 0.005124351978302002, 0.005148672103881836, 0.005191679954528809, 0.005269792079925537, 0.005338784217834473, 0.0053108158111572264, 0.005281472206115722, 0.005216896057128906, 0.00521120023727417, 0.005280096054077149, 0.00519817590713501, 0.005146560192108155, 0.005295455932617188, 0.005165567874908447, 0.005146527767181396, 0.00515718412399292, 0.005214144229888916, 0.005311935901641846, 0.005262080192565918, 0.005240640163421631, 0.005220479965209961, 0.005201791763305664, 0.005254432201385498, 0.005173984050750732, 0.005111807823181152, 0.0051673917770385745, 0.005158143997192383, 0.005155392169952392, 0.0051528639793396, 0.0052089600563049315, 0.005147583961486816, 0.00516096019744873, 0.005246751785278321, 0.005183104038238525, 0.00515712022781372, 0.005191199779510498, 0.005186367988586426, 0.0054203200340271, 0.005219039916992188, 0.0052091522216796875, 0.005206975936889648, 0.005148672103881836, 0.005262432098388672, 0.005230847835540771, 0.005178016185760498, 0.005468160152435303, 0.005297279834747314, 0.005247871875762939, 0.005275648117065429, 0.005209472179412842, 0.005214848041534424, 0.0052854719161987305, 0.005274015903472901, 0.005190847873687744, 0.005245120048522949, 0.005357344150543213, 0.0052206401824951175, 0.005186111927032471, 0.005249023914337158, 0.005238783836364746, 0.005197824001312256, 0.005227744102478027, 0.005208896160125732, 0.0052278399467468265, 0.0054579520225524905, 0.00525171184539795, 0.005353312015533448, 0.005427231788635254, 0.0053431038856506345, 0.005270495891571045, 0.005357279777526856, 0.0053136320114135745, 0.005258431911468506, 0.005348864078521728, 0.005277696132659912, 0.005220032215118408, 0.005193632125854492, 0.005362304210662841, 0.005439487934112549, 0.005433343887329102, 0.005310495853424072, 0.005236512184143067, 0.00526796817779541, 0.005383872032165528, 0.0052173762321472165, 0.005178271770477295, 0.005232639789581299, 0.005202976226806641, 0.0052306241989135745, 0.00543225622177124, 0.005272960186004639, 0.0052271361351013184, 0.005281792163848877, 0.0052408318519592285, 0.005334976196289062, 0.005214272022247314, 0.005212160110473632, 0.005207808017730713, 0.005257567882537842, 0.0054999361038208005, 0.005392672061920166, 0.005273952007293701, 0.005276095867156982, 0.0053491201400756834, 0.005380159854888916, 0.005318975925445556, 0.005291711807250976, 0.005374271869659424, 0.005328735828399658, 0.005332831859588623, 0.005379263877868652, 0.005339263916015625, 0.005294655799865723, 0.005334591865539551, 0.005325439929962158, 0.0052919678688049315, 0.005283423900604248, 0.005261663913726807, 0.00524070405960083, 0.005210112094879151, 0.005293504238128662, 0.005223167896270752, 0.005265151977539063, 0.005366015911102295, 0.005354623794555664, 0.0052941122055053715, 0.005299392223358154, 0.005248672008514405, 0.005260672092437744, 0.0054618239402770994, 0.0056444802284240725, 0.005473951816558838, 0.005498976230621338, 0.005428575992584229, 0.005274335861206055, 0.005309663772583008, 0.005282944202423096, 0.00525705623626709, 0.005344736099243164, 0.005221183776855469, 0.005188608169555664, 0.005227231979370117, 0.005216032028198242, 0.005296192169189453, 0.005218560218811035, 0.005345183849334717, 0.0051690878868103024, 0.005232704162597656, 0.005289408206939697, 0.005273344039916993, 0.005280416011810302, 0.005247168064117432, 0.005232192039489746, 0.0052046399116516115, 0.005368800163269043, 0.005418975830078125, 0.0055075201988220215, 0.0053203201293945315, 0.0053788161277770995, 0.0053331518173217775, 0.0052326078414916995, 0.005336959838867187, 0.00524399995803833, 0.005400576114654541, 0.005389215946197509, 0.005422336101531982, 0.005309184074401855, 0.0052609601020812986, 0.005231167793273926, 0.005381728172302246, 0.005283967971801758, 0.005245120048522949, 0.00529804801940918, 0.005230591773986816, 0.005234687805175781, 0.005371583938598633, 0.005421599864959717, 0.0057075519561767575, 0.0054579200744628905, 0.005349376201629639, 0.005623807907104492, 0.005545695781707763, 0.005427487850189209, 0.005570559978485107, 0.005938591957092285, 0.005557119846343994, 0.005432576179504395, 0.005403103828430175, 0.005473760128021241, 0.005414624214172364, 0.0053805441856384275, 0.0054685440063476564, 0.0054041919708251954, 0.005431104183197022, 0.005607168197631836, 0.005460480213165283, 0.005474143981933593, 0.005359295845031738, 0.005286784172058106, 0.0053422718048095705, 0.005650752067565918, 0.0054048957824707035, 0.005455584049224854, 0.005392288208007813, 0.005386047840118409, 0.005546239852905274, 0.005516160011291504, 0.005471968173980713, 0.005425280094146729, 0.005397664070129394, 0.005448544025421142, 0.005482175827026367, 0.005395040035247802, 0.005424191951751709, 0.005318367958068847, 0.005268415927886963, 0.005273600101470947, 0.005281343936920166, 0.005294496059417724, 0.005348383903503418, 0.005272192001342773, 0.005251455783843994, 0.005359072208404541, 0.0054646081924438475, 0.005418528079986573, 0.005374368190765381, 0.00537391996383667, 0.005380032062530518, 0.0053638720512390135, 0.005314559936523438, 0.005274623870849609, 0.0052740797996521, 0.005322336196899414, 0.005360000133514405, 0.0054830718040466304, 0.005349376201629639, 0.005582848072052002, 0.005528895854949951, 0.00541103982925415, 0.005437695980072021, 0.005375391960144043, 0.0052928638458251956, 0.0053628478050231935, 0.005438303947448731, 0.005362720012664795, 0.005378176212310791, 0.005378143787384033, 0.005288703918457031, 0.005425183773040771, 0.0056312642097473145, 0.005812928199768066, 0.005390336036682129, 0.005303328037261963, 0.005379007816314697, 0.005403679847717285, 0.005257023811340332, 0.005336544036865234, 0.005346015930175781, 0.005395904064178467, 0.0059818878173828124, 0.005403520107269287, 0.005543039798736572, 0.005474783897399902, 0.005482912063598633, 0.005492735862731934, 0.0054988799095153805, 0.005453248023986817, 0.005386112213134765, 0.005366208076477051, 0.005289696216583252, 0.005317503929138183, 0.005395423889160156, 0.005329599857330322, 0.005343232154846191, 0.0053608322143554685, 0.005230527877807617, 0.005270368099212647, 0.005226208209991455, 0.005220672130584717, 0.005167168140411377, 0.005263232231140137, 0.005244991779327393, 0.005213600158691406, 0.005268064022064209, 0.005327904224395752, 0.005261600017547607, 0.005315264225006103, 0.005197824001312256, 0.005180575847625732, 0.005275968074798584, 0.005328959941864013, 0.0052962880134582516, 0.0054963197708129885, 0.005325632095336914, 0.005349376201629639, 0.005375967979431152, 0.005341216087341309, 0.005322080135345459, 0.005309088230133057, 0.005503007888793945, 0.005332128047943115, 0.005261951923370361, 0.0052143998146057125, 0.005253119945526123, 0.005507296085357666, 0.005484320163726806, 0.005428800106048584, 0.00543993616104126, 0.0053834238052368165, 0.005310656070709229, 0.005333248138427734, 0.0053431358337402346, 0.005457824230194092, 0.005404831886291504, 0.005435743808746338, 0.0053637118339538575, 0.005400576114654541, 0.005339136123657226, 0.005264544010162354, 0.005479263782501221, 0.005496831893920898, 0.005346399784088135, 0.005364640235900879, 0.0053043198585510255, 0.005261312007904053, 0.0053637118339538575, 0.006127232074737548, 0.005709856033325195, 0.0053702077865600585, 0.00542464017868042, 0.005533408164978027, 0.005831200122833252, 0.0054069762229919435, 0.005709919929504394, 0.005508512020111084, 0.005324831962585449, 0.0053079681396484375, 0.005280511856079102, 0.005420415878295898, 0.005410816192626953, 0.005317503929138183, 0.005445536136627197, 0.005412928104400635, 0.005455840110778809, 0.005361248016357422, 0.005439871788024902, 0.00546611213684082, 0.005414912223815918, 0.005566463947296142, 0.005502975940704345, 0.005425151824951172, 0.00550707197189331, 0.00548473596572876, 0.0054448962211608885, 0.005409440040588379, 0.0054282560348510745, 0.005515135765075683, 0.005567455768585205, 0.0054271998405456545, 0.0053534722328186036, 0.005410816192626953, 0.00532480001449585, 0.005582208156585693, 0.005528319835662842, 0.005477503776550293, 0.005374144077301026, 0.005316383838653564, 0.0052594242095947265, 0.005304927825927734, 0.005328767776489258, 0.00559500789642334, 0.005335008144378662, 0.00524118423461914, 0.005215775966644287, 0.005220096111297607, 0.005550271987915039, 0.0053887038230896, 0.005359712123870849, 0.005298175811767578, 0.005240479946136475, 0.0052923521995544434]",tokens/s,188.13661465501582,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 891, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 823, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 374, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 117, in __init__ self.q_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 120.12 MiB is free. Process 126459 has 14.62 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 2.29 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1121, in __init__ self.embed_out = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 592.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 300.12 MiB is free. Process 193276 has 14.45 GiB memory in use. Of the allocated memory 14.33 GiB is allocated by PyTorch, and 1.52 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,738.89792,6315.507712,0.0,5920.260096,5695.433728,s,1,7.3876904296875,7.3876904296875,0.0,7.3876904296875,7.3876904296875,7.3876904296875,7.3876904296875,[7.3876904296875],,kWh,8.986639549997715e-06,9.709685918221336e-07,4.50611471600193e-06,1.4463722857821777e-05,,MB,1064.28416,6328.090624,0.0,5922.357248,5577.220096,s,10,5.196808197021484,0.5196808197021484,0.003407965845916442,0.5197416381835938,0.5229087463378906,0.5238181121826172,0.5245456048583984,"[0.5115663757324219, 0.520348388671875, 0.51844873046875, 0.5185545654296875, 0.5213436279296875, 0.5227066650390625, 0.5247274780273438, 0.5191348876953125, 0.5178452758789063, 0.5221322021484375]",tokens/s,492.6100604342579,kWh,1.5207874129583086e-05,1.6765977565631868e-06,1.0062230271999998e-05,2.6946702158146272e-05,tokens/kWh,9500234.889507936,MB,1090.33472,6328.090624,0.0,5922.357248,5663.963136,s,10,20.073852661132815,2.007385266113281,0.007085642304722359,2.0098646240234377,2.013493786621094,2.0144458068847655,2.015207423095703,"[2.0055675048828125, 1.990516845703125, 2.0153978271484374, 2.0097249755859377, 2.0074752197265626, 2.0100042724609377, 1.9990238037109376, 2.0132822265625, 2.0117939453125, 2.0110660400390623]",tokens/s,31.384109997968263,kWh,5.835305238583259e-05,6.436982631482033e-06,3.8695558734199933e-05,0.00010348559375151456,tokens/kWh,608780.3888072871,,s,630,20.070110181808488,0.03185731774890234,0.00032581729772583455,0.031819424629211425,0.032132120513916014,0.03226419200897217,0.03332832782745361,"[0.03364457702636719, 0.032511905670166014, 0.031821855545043944, 0.03167440032958985, 0.03160035133361817, 0.03163164710998535, 0.031692384719848636, 0.03164780807495117, 0.03161692810058594, 0.031658432006835935, 0.03155763244628906, 0.03162112045288086, 0.031893503189086916, 0.03163750457763672, 0.03157318305969238, 0.031683391571044925, 0.03163875198364258, 0.031537952423095705, 0.03178275108337402, 0.0316921272277832, 0.031609664916992186, 0.031647743225097655, 0.031681631088256834, 0.03170121574401855, 0.03179795265197754, 0.03204217529296875, 0.03178486442565918, 0.031685535430908206, 0.03170099258422852, 0.03175155258178711, 0.032068225860595705, 0.032020481109619144, 0.031970752716064456, 0.03193449592590332, 0.03193091201782226, 0.03188531112670898, 0.032010238647460935, 0.031859935760498045, 0.031705888748168945, 0.031645696640014646, 0.03176038360595703, 0.03180748748779297, 0.03198953628540039, 0.0317606086730957, 0.03176198387145996, 0.03175673675537109, 0.031821823120117186, 0.031926271438598636, 0.03189491271972656, 0.03185523223876953, 0.03183206367492676, 0.03185647964477539, 0.03193395233154297, 0.032012958526611325, 0.03196425628662109, 0.031870880126953126, 0.031716352462768556, 0.03179929542541504, 0.031757535934448244, 0.031742752075195314, 0.03178291130065918, 0.03192422485351563, 0.032128158569335936, 0.03323545455932617, 0.03220479965209961, 0.03161907196044922, 0.03136531257629394, 0.031268672943115236, 0.03119094467163086, 0.03125257682800293, 0.03129343986511231, 0.03142860794067383, 0.0313055362701416, 0.03136531257629394, 0.03120742416381836, 0.031238143920898437, 0.031320064544677735, 0.03128639984130859, 0.0314081916809082, 0.03133523178100586, 0.031297407150268554, 0.03127900886535644, 0.03128956794738769, 0.03134806442260742, 0.03133872032165527, 0.0313450870513916, 0.03139776039123535, 0.03132019233703613, 0.0313956470489502, 0.031393983840942385, 0.03136511993408203, 0.0314748477935791, 0.031529823303222654, 0.03161622428894043, 0.0316563835144043, 0.03176883125305176, 0.03214960098266602, 0.031538240432739256, 0.03141932868957519, 0.03154431915283203, 0.03152899169921875, 0.03147651290893555, 0.03152607917785644, 0.03195337677001953, 0.03204150390625, 0.03173782348632812, 0.03154249572753906, 0.03160108757019043, 0.03157439994812012, 0.0314839038848877, 0.03145491218566895, 0.03176684761047363, 0.03164713668823242, 0.03174870491027832, 0.03176985549926758, 0.03173862457275391, 0.0319815673828125, 0.03183103942871094, 0.03170809555053711, 0.03170076751708984, 0.03169513511657715, 0.031747808456420896, 0.03189545631408691, 0.03225433731079102, 0.03203440093994141, 0.031951263427734376, 0.03333001708984375, 0.032143360137939454, 0.03180544090270996, 0.031597759246826174, 0.031690784454345707, 0.031598976135253906, 0.03160895919799805, 0.032005630493164065, 0.031691551208496094, 0.0318599681854248, 0.031759103775024417, 0.031716543197631834, 0.03188585662841797, 0.03184259223937988, 0.03176355171203613, 0.03165072059631348, 0.03174195289611816, 0.03171520042419434, 0.03182198333740234, 0.03174332809448242, 0.03172211265563965, 0.031752191543579104, 0.03174399948120117, 0.031649248123168945, 0.03164825630187988, 0.031704479217529294, 0.03171801567077637, 0.031808895111083986, 0.031928255081176755, 0.03219731140136719, 0.03217366409301758, 0.03227689743041992, 0.032069633483886716, 0.031936511993408204, 0.031903743743896484, 0.03194495964050293, 0.03185024070739746, 0.03177881622314453, 0.031825504302978515, 0.031951263427734376, 0.03189145660400391, 0.03189760017395019, 0.03199532890319824, 0.03189123153686523, 0.03187824058532715, 0.031848127365112305, 0.03422604751586914, 0.03305449676513672, 0.03204745483398438, 0.03199542427062988, 0.031930591583251955, 0.032069889068603516, 0.032094207763671875, 0.03196256065368652, 0.03200057601928711, 0.03209011077880859, 0.03209011077880859, 0.03214131164550781, 0.03209830474853516, 0.03224576187133789, 0.03235158538818359, 0.03224233627319336, 0.032405502319335935, 0.033595169067382816, 0.03245078277587891, 0.03198361587524414, 0.03163955116271973, 0.03162521553039551, 0.03153715133666992, 0.03158220863342285, 0.03165798377990723, 0.03172473526000977, 0.031646528244018556, 0.031657312393188475, 0.03177948760986328, 0.03174604797363281, 0.03163955116271973, 0.03172966384887695, 0.03166566467285156, 0.03164825630187988, 0.03169475173950195, 0.03178508758544922, 0.03177264022827148, 0.031850496292114255, 0.03180339241027832, 0.031774400711059574, 0.031932735443115236, 0.03185868835449219, 0.03182086372375488, 0.03178191947937012, 0.0317807674407959, 0.03178256034851074, 0.03189174461364746, 0.03197513580322266, 0.03192188835144043, 0.03188800048828125, 0.031936511993408204, 0.03198512077331543, 0.03185103988647461, 0.03173948860168457, 0.03179151916503906, 0.03180691146850586, 0.03181011199951172, 0.031971328735351565, 0.03179520034790039, 0.03176227188110352, 0.03176041603088379, 0.03179088020324707, 0.03217343902587891, 0.03187401580810547, 0.032478782653808595, 0.03190940856933594, 0.031846847534179684, 0.031789535522460936, 0.0321223030090332, 0.032096832275390626, 0.03199702453613281, 0.03188591957092285, 0.03198582458496094, 0.032014400482177734, 0.032030208587646485, 0.031998559951782225, 0.03224921417236328, 0.0320579833984375, 0.0320184326171875, 0.03218841552734375, 0.033562625885009766, 0.03228876876831055, 0.03167334365844727, 0.03148886489868164, 0.0315098876953125, 0.03154780769348144, 0.031790496826171875, 0.03160323143005371, 0.031513023376464847, 0.031528959274291994, 0.031620992660522464, 0.03166425514221191, 0.031663455963134766, 0.03159721565246582, 0.03155558395385742, 0.031494144439697266, 0.03177676773071289, 0.031627264022827145, 0.03159404754638672, 0.03158060836791992, 0.0318047046661377, 0.031816415786743164, 0.03166630363464355, 0.03164665603637695, 0.03173472023010254, 0.03175014305114746, 0.031719423294067385, 0.0317706241607666, 0.031753759384155276, 0.032018207550048826, 0.03195964813232422, 0.03200214385986328, 0.032010238647460935, 0.0320145263671875, 0.03197420883178711, 0.03186345672607422, 0.031848703384399414, 0.03181167984008789, 0.03182083129882812, 0.03175699234008789, 0.03176380729675293, 0.031777727127075196, 0.0317640323638916, 0.031651487350463865, 0.03183603286743164, 0.03178793525695801, 0.03172944068908692, 0.031768672943115236, 0.03186700820922852, 0.031993600845336916, 0.03188966369628906, 0.03191398429870605, 0.03208806228637695, 0.03222083282470703, 0.03216524887084961, 0.03247817611694336, 0.03197542381286621, 0.03211075210571289, 0.03207900619506836, 0.032154304504394535, 0.03211468887329102, 0.03230892944335938, 0.03227225494384765, 0.03355865478515625, 0.032326431274414064, 0.031844127655029295, 0.03173548889160156, 0.03172831916809082, 0.03156771278381348, 0.03159040069580078, 0.03158835220336914, 0.031524864196777344, 0.031784959793090824, 0.03174959945678711, 0.031830560684204104, 0.0318317756652832, 0.03165417671203613, 0.0316331844329834, 0.03172988891601562, 0.03171123123168945, 0.03168460845947266, 0.031649791717529296, 0.03160883140563965, 0.03187302398681641, 0.031741056442260746, 0.031654399871826173, 0.03162252807617188, 0.03171612739562988, 0.0318047046661377, 0.0318591365814209, 0.03189311981201172, 0.03184934425354004, 0.03194009590148926, 0.0326426887512207, 0.031996608734130856, 0.031919551849365235, 0.03184934425354004, 0.03179471969604492, 0.0317604808807373, 0.031993215560913085, 0.032549663543701174, 0.031817344665527346, 0.03176259231567383, 0.0319550724029541, 0.03198176002502441, 0.03178000068664551, 0.03182691192626953, 0.03199324798583984, 0.03192265510559082, 0.03188531112670898, 0.03195020866394043, 0.03189769554138184, 0.031830560684204104, 0.03182796859741211, 0.032018241882324217, 0.03197766494750977, 0.032045280456542966, 0.03193014335632324, 0.03198111915588379, 0.03199750328063965, 0.03195788764953613, 0.03204095840454101, 0.032116161346435544, 0.032078399658203124, 0.032142654418945316, 0.03215020751953125, 0.03330598449707031, 0.03207619094848633, 0.03159791946411133, 0.03177353668212891, 0.031531007766723636, 0.03138559913635254, 0.0313753604888916, 0.03138553619384766, 0.03126217651367187, 0.03138179206848145, 0.031612543106079104, 0.03155612754821777, 0.03156368064880371, 0.0315346565246582, 0.0315516471862793, 0.0314619197845459, 0.03136307144165039, 0.03144499206542969, 0.0314768009185791, 0.03151558494567871, 0.03145840072631836, 0.03149663925170899, 0.031496383666992187, 0.03162345504760742, 0.031512575149536134, 0.0315043830871582, 0.031536832809448245, 0.031586624145507815, 0.031514623641967776, 0.031733760833740236, 0.031784032821655275, 0.03170528030395508, 0.03171180725097656, 0.03203702545166016, 0.0318525447845459, 0.03173318481445313, 0.031734336853027345, 0.0316146240234375, 0.0316964168548584, 0.03167519950866699, 0.03157811164855957, 0.03157401657104492, 0.032292865753173826, 0.032005599975585934, 0.031765024185180665, 0.032126625061035155, 0.03200649642944336, 0.031784959793090824, 0.031874719619750976, 0.03187337684631348, 0.031786272048950195, 0.031731712341308595, 0.0318450870513916, 0.03188732719421387, 0.03175638389587403, 0.03185164833068848, 0.031802175521850586, 0.03169251251220703, 0.031991615295410156, 0.03175472068786621, 0.03193804740905762, 0.03233433532714844, 0.03222937774658203, 0.03330867385864258, 0.03218022537231445, 0.0318832950592041, 0.031659231185913086, 0.03170995140075684, 0.03162931251525879, 0.03157811164855957, 0.031676416397094724, 0.03172537612915039, 0.03169907188415527, 0.03177273559570312, 0.0319180793762207, 0.03174720001220703, 0.03172543907165527, 0.03184687995910645, 0.031814176559448244, 0.032099998474121094, 0.03182947158813477, 0.03184115219116211, 0.03181740760803223, 0.03184854316711426, 0.03185481643676758, 0.031838207244873046, 0.031866880416870115, 0.03179238319396973, 0.03185113525390625, 0.03177894401550293, 0.0318951358795166, 0.03191001510620117, 0.03193065643310547, 0.03213523101806641, 0.03200121688842773, 0.03210521697998047, 0.032010494232177736, 0.031923967361450194, 0.03198975944519043, 0.031919359207153324, 0.03187788772583008, 0.03193391990661621, 0.03202921676635742, 0.03187449645996094, 0.03184848022460938, 0.03202835083007813, 0.03201849746704102, 0.03246160125732422, 0.032097793579101565, 0.03180118370056152, 0.031868896484375, 0.03213177490234375, 0.03217407989501953, 0.03196928024291992, 0.03205254364013672, 0.03206828689575195, 0.03207721710205078, 0.03190025520324707, 0.03192793655395508, 0.03191231918334961, 0.03194175910949707, 0.03200294494628906, 0.03209625625610352, 0.03220012664794922, 0.032285247802734375, 0.03220275115966797, 0.0336558723449707, 0.03234255981445312, 0.03182022476196289, 0.03171327972412109, 0.031719423294067385, 0.031579967498779296, 0.03161734390258789, 0.031737344741821286, 0.031721216201782226, 0.03154598426818848, 0.03174617576599121, 0.031712352752685545, 0.03171817588806152, 0.031718816757202145, 0.03173391914367676, 0.03160518455505371, 0.03174399948120117, 0.031741439819335936, 0.03187148857116699, 0.031763999938964844, 0.03173606491088867, 0.03172710418701172, 0.03182665634155273, 0.03188060760498047, 0.031879776000976565, 0.03188051223754883, 0.03179999923706055, 0.03172336006164551, 0.031806751251220705, 0.03199884796142578, 0.032014209747314455, 0.032083393096923825, 0.03202732849121094, 0.032015872955322267, 0.032010753631591796, 0.03191193580627441, 0.03193391990661621, 0.03188995170593262, 0.031938304901123045, 0.031749504089355465, 0.03181862449645996, 0.032102081298828126, 0.0319768009185791, 0.03183244705200195, 0.03195881652832031, 0.03186054420471191, 0.03190003204345703, 0.03187366485595703, 0.032059391021728514, 0.031940031051635745, 0.032008094787597655, 0.03202320098876953, 0.03205238342285156, 0.03214422225952149, 0.03213625717163086, 0.03207632064819336, 0.03211920166015625, 0.032115745544433597, 0.03207215881347656, 0.032045726776123044, 0.032180065155029296, 0.03215359878540039, 0.032249855041503905, 0.03332419204711914, 0.03208687973022461, 0.03204428863525391, 0.0317652473449707, 0.03178291130065918, 0.031858047485351565, 0.03163983917236328, 0.031678815841674805, 0.03183001518249512, 0.03170508766174317, 0.03165705680847168, 0.03168108749389648, 0.03164534378051758, 0.03162796783447266, 0.03176038360595703, 0.03183404731750488, 0.03171743965148926, 0.03163340759277344, 0.0317923526763916, 0.03177529525756836, 0.03180771255493164, 0.03171846389770508, 0.031802175521850586, 0.03169452857971192, 0.03179295921325684, 0.03189823913574219, 0.0319180793762207, 0.03292095947265625, 0.032139808654785156, 0.03182601547241211, 0.031938560485839845, 0.03201612854003906, 0.03217184066772461, 0.03257769775390625, 0.03200780868530274, 0.03189561653137207, 0.03185510444641113, 0.03192563247680664, 0.031800031661987305, 0.03177388763427735, 0.031884096145629884, 0.031677600860595706, 0.031734624862670896, 0.03173776054382324, 0.031717472076416016, 0.031735807418823245, 0.03191791915893555, 0.031799455642700196, 0.03181059265136719, 0.031910688400268554, 0.03188723182678223, 0.03197932815551758, 0.03212457656860351, 0.031923040390014645, 0.032046497344970705, 0.03191456031799316, 0.032087200164794924, 0.03199830436706543, 0.03201257705688477, 0.03200153732299805, 0.032107425689697267, 0.03210764694213867, 0.03224649429321289]",tokens/s,31.3899622021523,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,879.869952,14942.076928,0.0,14539.554816,14487.489536,s,1,7.2382421875,7.2382421875,0.0,7.2382421875,7.2382421875,7.2382421875,7.2382421875,[7.2382421875],,kWh,9.87832265001695e-06,1.082318677769969e-06,4.503336935995916e-06,1.5463978263782834e-05,,MB,1214.672896,14956.756992,0.0,14543.74912,13944.436736,s,10,12.923662963867189,1.2923662963867188,0.004900274819533397,1.293234375,1.2971669677734377,1.2983127319335939,1.2992293432617188,"[1.2824945068359375, 1.2874075927734374, 1.28919921875, 1.2896575927734375, 1.2933482666015625, 1.2931204833984375, 1.2968511962890625, 1.296912353515625, 1.29945849609375, 1.2952132568359376]",tokens/s,198.08625520159518,kWh,3.7675854273750244e-05,4.155190616411881e-06,2.488674213159958e-05,6.67177870217617e-05,tokens/kWh,3837057.7236996647,MB,1239.863296,14965.1456,0.0,14552.137728,13944.439296,s,10,45.02463085937501,4.502463085937499,0.006244778245450704,4.503966552734375,4.5091982910156245,4.509848901367188,4.510369389648438,"[4.50863720703125, 4.50670458984375, 4.49281982421875, 4.5090537109375, 4.50442333984375, 4.51049951171875, 4.503509765625, 4.494205078125, 4.49941650390625, 4.495361328125]",tokens/s,13.99234125800327,kWh,0.00013144823281624588,1.4499759325514158e-05,8.756023671480095e-05,0.00023350822885656093,tokens/kWh,269797.77247464604,,s,630,45.02154116058347,0.07146276374695792,0.0003002489394997939,0.07139748764038087,0.07178511886596681,0.07203483543395997,0.07246180793762208,"[0.07297843170166016, 0.07192166137695312, 0.07160569763183594, 0.07141999816894531, 0.07444732666015624, 0.07138841247558594, 0.07165574645996094, 0.07146336364746093, 0.07130521392822266, 0.07211756896972656, 0.07179116821289062, 0.07157548522949218, 0.071419677734375, 0.0713853759765625, 0.07128457641601563, 0.07135874938964844, 0.07142195129394531, 0.0713359375, 0.07132736206054688, 0.07127078247070312, 0.07125138854980469, 0.07124777221679687, 0.07139766693115235, 0.07122982025146485, 0.07141375732421874, 0.0717844467163086, 0.07156735992431641, 0.07167151641845704, 0.07138438415527344, 0.07172956848144531, 0.07134588623046875, 0.07127699279785156, 0.07120732879638672, 0.0712573471069336, 0.07127069091796875, 0.07143583679199218, 0.07138396453857422, 0.07165952301025391, 0.07159926605224609, 0.0716355209350586, 0.07135238647460937, 0.07198137664794922, 0.07139900970458984, 0.07132588958740234, 0.07167155456542969, 0.07148371124267579, 0.07126022338867187, 0.07122943878173828, 0.07114752197265625, 0.07111484527587891, 0.07110851287841796, 0.07132316589355468, 0.07152819061279297, 0.07249539184570312, 0.07183200073242188, 0.07183372497558593, 0.07140975952148437, 0.07157078552246093, 0.0715074234008789, 0.0715513916015625, 0.07150601959228516, 0.07180335998535156, 0.07163862609863281, 0.07227977752685547, 0.07164752197265625, 0.0715683822631836, 0.07161126708984375, 0.07160620880126953, 0.07135250854492188, 0.0713154525756836, 0.07155900573730468, 0.07127609252929687, 0.07132176208496094, 0.07128876495361328, 0.07172557067871094, 0.07176121520996094, 0.07162131500244141, 0.07149321746826172, 0.0716395492553711, 0.07164256286621094, 0.07142243194580078, 0.07134003448486329, 0.0712900161743164, 0.07142486572265624, 0.07141375732421874, 0.07135769653320312, 0.0714688949584961, 0.0711767349243164, 0.07129913330078125, 0.0712192611694336, 0.07135257720947266, 0.07139667510986328, 0.071242431640625, 0.07128473663330079, 0.07152025604248047, 0.07128678131103515, 0.07111894226074218, 0.07228963470458985, 0.07144076538085938, 0.07131088256835938, 0.07138790130615234, 0.07138240051269532, 0.07163549041748046, 0.07171446228027344, 0.07170604705810547, 0.07147203063964844, 0.07152639770507813, 0.07140966033935547, 0.07143218994140625, 0.0716060791015625, 0.07143443298339844, 0.07132537841796875, 0.0713497314453125, 0.07146908569335937, 0.07138579559326172, 0.07168013000488281, 0.07177011108398437, 0.07273677062988282, 0.07169229125976563, 0.07191961669921874, 0.07156253051757812, 0.07143843078613281, 0.0722537612915039, 0.07177043151855468, 0.07148339080810547, 0.0714629135131836, 0.07207692718505859, 0.07139897918701171, 0.0713382110595703, 0.07128662109375, 0.07139968109130859, 0.0714032974243164, 0.07137353515625, 0.07128473663330079, 0.07129046630859374, 0.07113155364990234, 0.07112525177001953, 0.0712941436767578, 0.07118685150146484, 0.07133180999755859, 0.0712081298828125, 0.07137088012695313, 0.07121331024169922, 0.0711863021850586, 0.07113394927978516, 0.07142400360107422, 0.07108566284179688, 0.07106784057617188, 0.07129724884033203, 0.07121292877197266, 0.07105958557128907, 0.07140476989746093, 0.07111555480957031, 0.07124742126464843, 0.07124422454833984, 0.07126834869384766, 0.07126834869384766, 0.07172096252441407, 0.07229587554931641, 0.07161907196044921, 0.07118649291992188, 0.07127244567871094, 0.07129888153076172, 0.07133817291259766, 0.07126834869384766, 0.07139299011230468, 0.07126761627197266, 0.07119769287109375, 0.07130831909179687, 0.071384033203125, 0.07137836456298828, 0.0711665267944336, 0.0710572509765625, 0.07136271667480469, 0.07127449798583985, 0.0711740493774414, 0.07108803558349609, 0.07127263641357422, 0.07214284515380859, 0.07122889709472656, 0.07125478363037109, 0.07120790100097656, 0.07122726440429687, 0.07109932708740234, 0.0712509765625, 0.0715230712890625, 0.07107593536376954, 0.07133606719970703, 0.07114342498779297, 0.07219673919677734, 0.071525634765625, 0.07111328125, 0.07107807922363281, 0.07120236968994141, 0.07117459106445312, 0.07127654266357422, 0.07131494140625, 0.07139785766601563, 0.071446044921875, 0.07132621002197266, 0.07122870635986328, 0.07188880157470703, 0.07161529541015625, 0.07209881591796875, 0.07256476593017579, 0.07136764526367187, 0.0713024673461914, 0.07165638732910157, 0.07169407653808593, 0.0713828125, 0.07153641510009766, 0.07176451110839843, 0.07142390441894532, 0.07167346954345703, 0.07153190612792969, 0.07145897674560547, 0.07144719696044922, 0.07174787139892579, 0.07153632354736328, 0.07134435272216796, 0.07150940704345703, 0.072040771484375, 0.0716943359375, 0.07157174682617187, 0.07139942169189453, 0.07133773040771485, 0.07222454071044922, 0.07212486267089843, 0.07141788482666016, 0.0714629135131836, 0.07160320281982421, 0.07173577880859375, 0.07166825866699218, 0.07157299041748047, 0.07164323425292969, 0.07158006286621094, 0.07140735626220703, 0.07146278381347657, 0.07144681549072265, 0.0715549087524414, 0.0714345932006836, 0.07163894653320313, 0.07175337219238281, 0.07163324737548828, 0.07172201538085937, 0.07163085174560548, 0.07176092529296875, 0.07158777618408203, 0.07146701049804688, 0.07139737701416016, 0.07150592041015626, 0.07144857788085937, 0.07232415771484375, 0.07186528015136719, 0.07140767669677735, 0.0713930892944336, 0.07148966217041015, 0.07144790649414062, 0.07133650970458984, 0.07133503723144531, 0.07162569427490234, 0.07162879943847657, 0.07214083099365234, 0.07136844635009766, 0.07119872283935547, 0.07167203521728516, 0.07144857788085937, 0.0712806396484375, 0.0713359375, 0.07135231781005859, 0.07129686737060546, 0.07130332946777344, 0.07137289428710937, 0.07130915069580078, 0.07127251434326172, 0.07165692901611329, 0.07133238220214844, 0.07171481323242188, 0.07136566162109376, 0.0715110092163086, 0.07153778839111329, 0.07135852813720703, 0.07139974212646484, 0.07136102294921876, 0.07162480163574218, 0.07149148559570312, 0.07137894439697266, 0.07164672088623047, 0.07159171295166016, 0.07144316864013672, 0.0715255355834961, 0.07155388641357421, 0.07191929626464844, 0.07185446166992188, 0.07195600128173828, 0.07139100646972656, 0.07140006256103516, 0.07135343933105469, 0.07145913696289062, 0.07143280029296875, 0.07214195251464844, 0.07165142059326172, 0.07138713836669922, 0.07150006103515626, 0.07133439636230468, 0.07165132904052735, 0.07132978820800781, 0.07175945281982422, 0.07150390625, 0.07133184051513672, 0.07121759796142578, 0.07127033233642578, 0.07118438720703125, 0.07118006134033203, 0.0712217254638672, 0.07208444976806641, 0.0714076156616211, 0.0713175048828125, 0.07137689971923829, 0.07141990661621093, 0.07136460876464844, 0.0713707504272461, 0.07160012817382813, 0.07157965087890625, 0.07222467041015625, 0.07194630432128907, 0.07164112091064453, 0.07155033874511718, 0.07161100769042969, 0.07215923309326172, 0.07237958526611328, 0.07181552124023438, 0.07149136352539062, 0.07251599884033202, 0.07191769409179688, 0.07190707397460938, 0.07165094757080079, 0.07176630401611328, 0.07194432067871094, 0.07189952087402343, 0.07179264068603515, 0.07163699340820312, 0.07183702087402344, 0.07213549041748046, 0.07171788787841797, 0.07189180755615235, 0.0717496337890625, 0.07154032135009766, 0.07148582458496094, 0.07202758026123048, 0.0715920639038086, 0.07169245147705078, 0.07158611297607421, 0.07158988952636719, 0.0716759033203125, 0.071550048828125, 0.07121193695068359, 0.0711817626953125, 0.07119315338134766, 0.07123967742919922, 0.07119222259521485, 0.07125609588623047, 0.07102188873291015, 0.07132672119140625, 0.07108975982666016, 0.07157328033447266, 0.07138317108154296, 0.07153433227539062, 0.07142272186279297, 0.07123353576660156, 0.07129702758789062, 0.07113056182861328, 0.0715269775390625, 0.07175759887695313, 0.07152275085449218, 0.07123948669433594, 0.07119017791748047, 0.07122978973388672, 0.07204863739013671, 0.07154291534423828, 0.07147299194335938, 0.0711618881225586, 0.07112703704833985, 0.07143218994140625, 0.0715219497680664, 0.07133197021484375, 0.07158601379394532, 0.07188480377197265, 0.07145267486572265, 0.0717265625, 0.0714939193725586, 0.07166553497314453, 0.07186061096191407, 0.0717291488647461, 0.0719299545288086, 0.07141168212890625, 0.07138706970214843, 0.07146054077148438, 0.07146937561035156, 0.07145641326904296, 0.07168656158447266, 0.07176290893554688, 0.07143740844726562, 0.07153600311279297, 0.07200819396972656, 0.07169404602050782, 0.07165789031982422, 0.07145664215087891, 0.0711923828125, 0.07142214202880859, 0.07130691528320313, 0.07131171417236327, 0.07137206268310547, 0.07119129943847656, 0.07127645111083984, 0.07146198272705079, 0.07128572845458984, 0.07134413146972657, 0.0713524169921875, 0.07156931304931641, 0.07154483032226562, 0.07145216369628907, 0.07140812683105469, 0.07126630401611328, 0.07134620666503906, 0.07159375762939453, 0.07221266937255859, 0.07143440246582031, 0.07112815856933594, 0.07145667266845702, 0.07121778869628906, 0.07125775909423829, 0.07134444427490234, 0.07139936065673828, 0.07147523498535156, 0.07130089569091796, 0.07129126739501954, 0.07128294372558594, 0.07161011505126953, 0.07142332458496094, 0.07127484893798829, 0.07189788818359374, 0.07162879943847657, 0.07119990539550781, 0.07130403137207031, 0.0712642593383789, 0.07112703704833985, 0.07125398254394531, 0.07124950408935547, 0.07143023681640626, 0.07127279663085938, 0.07113318634033203, 0.07125609588623047, 0.07124169921875, 0.0712449951171875, 0.07141468811035157, 0.0715324478149414, 0.07117823791503906, 0.0712656021118164, 0.071168701171875, 0.07136483001708985, 0.07111650848388672, 0.0711470718383789, 0.07112137603759766, 0.0713436508178711, 0.07114704132080078, 0.07118287658691407, 0.07130707550048829, 0.07131574249267578, 0.07141001892089843, 0.07142809295654297, 0.07121257781982422, 0.07140547180175781, 0.07127677154541015, 0.0712825927734375, 0.07109219360351562, 0.07111901092529296, 0.07106588745117187, 0.07119868469238282, 0.07116614532470703, 0.07128256225585937, 0.07118611145019531, 0.07112239837646485, 0.0711975326538086, 0.07133503723144531, 0.07129792022705078, 0.07157917022705078, 0.07124147033691407, 0.07283740997314453, 0.07134662628173828, 0.07150784301757812, 0.07110054779052734, 0.07146086120605469, 0.0713993911743164, 0.07141574096679687, 0.07137824249267578, 0.07123023986816407, 0.07132959747314453, 0.0713843231201172, 0.07152054595947266, 0.07163795471191406, 0.07136841583251953, 0.07154688262939453, 0.07140668487548828, 0.07221965026855469, 0.0714700164794922, 0.07117385864257812, 0.0716366424560547, 0.07180352020263672, 0.07131974029541016, 0.07126207733154297, 0.07138508605957031, 0.07154073333740234, 0.07116915130615234, 0.07124457550048828, 0.07123977661132813, 0.07112703704833985, 0.07133379364013671, 0.07142396545410157, 0.07141798400878906, 0.07130944061279297, 0.07161228942871094, 0.07123942565917969, 0.07129254150390625, 0.07140412902832032, 0.07131926727294922, 0.07141177368164063, 0.07117823791503906, 0.07119657897949219, 0.07123903656005859, 0.07201660919189454, 0.07148102569580078, 0.07169065856933594, 0.07164256286621094, 0.07137276458740234, 0.07136752319335937, 0.07149539184570312, 0.07149785614013672, 0.07151827239990234, 0.07143218994140625, 0.07163289642333984, 0.07132160186767578, 0.07112675476074219, 0.07121775817871094, 0.07139295959472657, 0.07139759826660157, 0.07146678161621094, 0.07136854553222656, 0.07114278411865234, 0.07101497650146485, 0.07224342346191406, 0.07130217742919921, 0.07157849884033203, 0.0712537612915039, 0.07131763458251954, 0.07191574096679687, 0.07142550659179688, 0.07119721221923828, 0.07122943878173828, 0.07136051177978515, 0.07132118225097656, 0.07155958557128907, 0.07140771484375, 0.07132150268554688, 0.07123932647705078, 0.07148783874511719, 0.07140310668945313, 0.07174713897705078, 0.07135097503662109, 0.07121011352539063, 0.07114765167236328, 0.07117222595214844, 0.07123824310302734, 0.07137439727783203, 0.07150227355957031, 0.07132275390625, 0.07151468658447266, 0.07127216339111328, 0.07146966552734375, 0.07136224365234375, 0.07140930938720703, 0.07136969757080078, 0.07160390472412109, 0.07140147399902344, 0.0714136962890625, 0.07126156616210938, 0.071242431640625, 0.0712573471069336, 0.07130294036865234, 0.07132189178466797, 0.07172761535644531, 0.07128697967529297, 0.07231696319580078, 0.07129904174804688, 0.07120611572265625, 0.0712281951904297, 0.07132147216796875, 0.07124185943603516, 0.07119667053222656, 0.07106559753417968, 0.07115366363525391, 0.07122239685058594, 0.07133916473388671, 0.0712264633178711, 0.0713058853149414, 0.07117756652832032, 0.07145664215087891, 0.07133267211914063, 0.07139321899414063, 0.07129702758789062, 0.07126220703125, 0.07114546966552734, 0.07137484741210938, 0.07136822509765625, 0.07118921661376953, 0.07126707458496094, 0.07133695983886719, 0.07137283325195312, 0.07159804534912109, 0.07126188659667969, 0.07139059448242188, 0.07136726379394531, 0.0712542724609375, 0.07117417907714843, 0.0712930908203125, 0.07121705627441406, 0.07151001739501953, 0.07137715148925781, 0.07132144165039063, 0.07187446594238281]",tokens/s,13.993301512111875,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.322752,13880.918016,0.0,13478.395904,13476.849152,s,1,7.84144775390625,7.84144775390625,0.0,7.84144775390625,7.84144775390625,7.84144775390625,7.84144775390625,[7.84144775390625],,kWh,8.621965362495606e-06,9.424628212579448e-07,4.358614598007349e-06,1.39230427817609e-05,,MB,1198.010368,14115.79904,0.0,13702.791168,13671.637504,s,10,12.223399047851562,1.2223399047851562,0.005921184715524074,1.2216842041015625,1.2291810302734376,1.2301181640625,1.23086787109375,"[1.209767578125, 1.2170430908203125, 1.219740478515625, 1.22176416015625, 1.22103955078125, 1.221604248046875, 1.2245751953125, 1.227836669921875, 1.2310552978515625, 1.2289727783203126]",tokens/s,209.4343799116954,kWh,3.57631581766672e-05,3.943985671045247e-06,2.365871337140016e-05,6.336585721911262e-05,tokens/kWh,4040030.5659052054,MB,1246.298112,14115.79904,0.0,13702.791168,13671.640064,s,10,37.69563793945312,3.769563793945312,0.0025166596317956593,3.76928466796875,3.772784130859375,3.7732475341796876,3.7736182568359378,"[3.76535888671875, 3.767607421875, 3.767574951171875, 3.76868505859375, 3.76764892578125, 3.76988427734375, 3.77268115234375, 3.77062158203125, 3.77186474609375, 3.7737109375]",tokens/s,16.712809079180687,kWh,0.00011038785765291471,1.2176792092787767e-05,7.359714221099948e-05,0.00019616179195670197,tokens/kWh,321163.4608940856,,s,630,37.691916431427025,0.05982843878004286,0.0002636580632996298,0.05981742477416992,0.06006805496215821,0.06013891525268555,0.06110158271789551,"[0.06104489517211914, 0.05959686279296875, 0.059349311828613284, 0.05935475158691406, 0.05928992080688476, 0.05951286315917969, 0.059420928955078126, 0.05935923385620117, 0.05947596740722656, 0.05966233444213867, 0.05959065628051758, 0.05953926467895508, 0.05943116760253906, 0.05944723129272461, 0.059509857177734375, 0.0597410888671875, 0.05977439880371094, 0.05976895904541016, 0.05957814407348633, 0.05961161422729492, 0.059565631866455075, 0.05954009628295898, 0.05953910446166992, 0.059578720092773436, 0.059571456909179685, 0.05975449752807617, 0.05976959991455078, 0.059822078704833984, 0.05982617568969727, 0.059866878509521486, 0.05963801574707031, 0.059703102111816404, 0.05970307159423828, 0.05972009658813476, 0.05985849761962891, 0.0597050895690918, 0.0595667839050293, 0.05959372711181641, 0.05958127975463867, 0.05966864013671875, 0.059684864044189455, 0.06059823989868164, 0.05979961776733399, 0.05981164932250976, 0.05989187240600586, 0.05998729705810547, 0.05985478210449219, 0.05982896041870117, 0.059998207092285157, 0.06005145645141602, 0.06002601623535156, 0.059880287170410156, 0.059924480438232425, 0.06007743835449219, 0.05983615875244141, 0.05998067092895508, 0.0599285774230957, 0.05989990234375, 0.060087520599365236, 0.06003769683837891, 0.05996156692504883, 0.060129280090332034, 0.06010060882568359, 0.06112944030761719, 0.059589759826660156, 0.05942118453979492, 0.05941900634765625, 0.05933670425415039, 0.059436351776123046, 0.05963232040405273, 0.05947187042236328, 0.0594411506652832, 0.05946345520019531, 0.05947619247436523, 0.059668033599853516, 0.059531841278076175, 0.05958438491821289, 0.05963980865478516, 0.05973385620117187, 0.05982223892211914, 0.06039692687988281, 0.059660606384277344, 0.05960736083984375, 0.05950259017944336, 0.059535358428955076, 0.0595722541809082, 0.05979132843017578, 0.05966342544555664, 0.05975676727294922, 0.05970198440551758, 0.05974739074707031, 0.0598004150390625, 0.05982217788696289, 0.05981798553466797, 0.05983027267456055, 0.0599552001953125, 0.05974630355834961, 0.05969075012207031, 0.05967276763916016, 0.05964716720581055, 0.059689502716064456, 0.05969955062866211, 0.05966233444213867, 0.059840511322021485, 0.059746143341064456, 0.06000656127929688, 0.05985039901733399, 0.05998153686523437, 0.059959487915039064, 0.05996713638305664, 0.05994780731201172, 0.06008627319335937, 0.06003507232666016, 0.06000435256958008, 0.060087745666503906, 0.05996316909790039, 0.05996787261962891, 0.05997110366821289, 0.059848831176757815, 0.06002969741821289, 0.059931713104248045, 0.05999852752685547, 0.060004833221435544, 0.060010303497314454, 0.060213600158691406, 0.06002687835693359, 0.06121881484985352, 0.059721569061279296, 0.059465534210205076, 0.05952956771850586, 0.05959270477294922, 0.059461631774902345, 0.05949433517456055, 0.059609153747558596, 0.059379711151123046, 0.059582462310791014, 0.05943910217285156, 0.059478015899658204, 0.05957632064819336, 0.059600894927978515, 0.05961523056030273, 0.05963507080078125, 0.05981600189208985, 0.05973459243774414, 0.05974425506591797, 0.059635711669921876, 0.05956198501586914, 0.05955126571655273, 0.0595318717956543, 0.05965785598754883, 0.05976089477539062, 0.05980089569091797, 0.05978307342529297, 0.05967542266845703, 0.05976268768310547, 0.059930622100830076, 0.05982812881469726, 0.05982012939453125, 0.059784576416015624, 0.05976947021484375, 0.05972079849243164, 0.0597308464050293, 0.05977907180786133, 0.059719680786132816, 0.05981798553466797, 0.059705280303955076, 0.05971974563598633, 0.05998748779296875, 0.05995363235473633, 0.05985388946533203, 0.05976079940795898, 0.059885887145996096, 0.059971839904785156, 0.05992380905151367, 0.06003497695922851, 0.06003993606567383, 0.05996976089477539, 0.05999971389770508, 0.05997417449951172, 0.05996854400634766, 0.05990848159790039, 0.05986371231079102, 0.05995868682861328, 0.059935264587402344, 0.06001663970947266, 0.06009446334838867, 0.060036510467529294, 0.06014012908935547, 0.06017753601074219, 0.06126787185668945, 0.05967728042602539, 0.059504959106445314, 0.059334590911865236, 0.05943519973754883, 0.059582271575927735, 0.05961484909057617, 0.059518497467041014, 0.05963657760620117, 0.05963161468505859, 0.05969715118408203, 0.05960704040527344, 0.05972172927856445, 0.059566078186035154, 0.059622657775878905, 0.05972627258300781, 0.06002719879150391, 0.05995110321044922, 0.05983583831787109, 0.05965407943725586, 0.059679359436035154, 0.05969295883178711, 0.05970748901367187, 0.05966153717041016, 0.05964879989624024, 0.059776897430419924, 0.05972304153442383, 0.05980556869506836, 0.05972681427001953, 0.060028926849365234, 0.059772926330566405, 0.05987724685668945, 0.05990822219848633, 0.0598364143371582, 0.0598911361694336, 0.059859390258789065, 0.05985836791992188, 0.05975724792480469, 0.05964966583251953, 0.05970281600952149, 0.05968777465820312, 0.05972582244873047, 0.059852161407470704, 0.05979609680175781, 0.059789310455322264, 0.06000137710571289, 0.059935649871826174, 0.05988483047485352, 0.05991856002807617, 0.059971614837646486, 0.05995158386230469, 0.059979328155517576, 0.05999660873413086, 0.05993881607055664, 0.05988505554199219, 0.05998134231567383, 0.0599582405090332, 0.05995264053344727, 0.059884033203125, 0.06013894271850586, 0.05989958572387695, 0.06004006576538086, 0.059977630615234374, 0.06096201705932617, 0.059420928955078126, 0.059509281158447266, 0.05946777725219726, 0.05943036651611328, 0.059499038696289065, 0.05947747039794922, 0.0595863037109375, 0.05957712173461914, 0.059868415832519534, 0.05967744064331055, 0.05963679885864258, 0.05961539077758789, 0.059740638732910155, 0.05961964797973633, 0.059635711669921876, 0.05979033660888672, 0.05963423919677734, 0.059444896697998045, 0.059587360382080075, 0.05964972686767578, 0.059695423126220705, 0.05967219161987305, 0.05959718322753906, 0.05963776016235352, 0.05974832153320313, 0.05989993667602539, 0.05970534515380859, 0.05976883316040039, 0.059827678680419924, 0.05986681747436524, 0.05981884765625, 0.059789310455322264, 0.05990195083618164, 0.05989376068115235, 0.05995110321044922, 0.05981990432739258, 0.05986316680908203, 0.05996294403076172, 0.05990790557861328, 0.05979199981689453, 0.059799552917480465, 0.059963390350341796, 0.05979340744018555, 0.05979318237304688, 0.05999967956542969, 0.059939167022705075, 0.059805343627929684, 0.059973983764648436, 0.0599365119934082, 0.05983097457885742, 0.059972705841064455, 0.059816864013671874, 0.05992243194580078, 0.05984460830688477, 0.059940864562988284, 0.05994220733642578, 0.059883392333984375, 0.059921215057373044, 0.060060672760009766, 0.060091392517089844, 0.060045310974121094, 0.06005350494384765, 0.06096441650390625, 0.05961772918701172, 0.059469825744628904, 0.059477184295654295, 0.059486431121826173, 0.05957606506347656, 0.05956070327758789, 0.059660385131835934, 0.05960028839111328, 0.059635807037353515, 0.059599231719970704, 0.05966783905029297, 0.0596484489440918, 0.05982595062255859, 0.059736606597900394, 0.060007423400878904, 0.060115745544433594, 0.05992585754394531, 0.059751487731933596, 0.0597149772644043, 0.05966281509399414, 0.05961625671386719, 0.059601760864257815, 0.05963683319091797, 0.05963183975219727, 0.059775070190429686, 0.05974700927734375, 0.05987868881225586, 0.05977571105957031, 0.059789310455322264, 0.059778465270996096, 0.059822368621826175, 0.05993715286254883, 0.06006131362915039, 0.06005996704101563, 0.05989990234375, 0.05982595062255859, 0.05989807891845703, 0.059807743072509766, 0.05986412811279297, 0.05985171127319336, 0.05986099243164063, 0.059908096313476565, 0.05989318466186523, 0.05988195037841797, 0.05999216079711914, 0.059942142486572265, 0.0600228157043457, 0.059962078094482424, 0.05983747100830078, 0.060031200408935545, 0.059986209869384764, 0.05986099243164063, 0.0597979850769043, 0.05993379211425781, 0.059853729248046876, 0.05987456130981445, 0.05993548965454101, 0.059858943939208986, 0.05997772979736328, 0.06006784057617188, 0.060055553436279295, 0.06002246475219727, 0.061300735473632816, 0.05976063919067383, 0.059518623352050784, 0.05938751983642578, 0.05938454437255859, 0.0595250244140625, 0.05953305435180664, 0.05944736099243164, 0.059465919494628906, 0.0596583366394043, 0.05975996780395508, 0.05965686416625977, 0.059641857147216794, 0.05966438293457031, 0.05968694305419922, 0.0597786865234375, 0.05999836730957031, 0.059959487915039064, 0.059805694580078124, 0.059774974822998046, 0.05971494293212891, 0.059798145294189455, 0.05979260635375976, 0.05974505615234375, 0.0597212142944336, 0.05967513656616211, 0.059676673889160155, 0.05971686553955078, 0.05993139266967774, 0.059792896270751954, 0.05983283233642578, 0.05988351821899414, 0.059928478240966795, 0.06003311920166016, 0.059844097137451174, 0.05994137573242188, 0.05973811340332031, 0.059812992095947266, 0.059698047637939455, 0.05986304092407226, 0.059774272918701174, 0.059792064666748045, 0.060985343933105465, 0.059848705291748044, 0.0599101448059082, 0.06007376098632813, 0.060039390563964845, 0.06000400161743164, 0.06002518463134766, 0.06032572937011719, 0.06008374404907227, 0.0601032943725586, 0.06009775924682617, 0.06000883102416992, 0.060073951721191406, 0.06001708984375, 0.060045089721679684, 0.060164321899414064, 0.06005311965942383, 0.06012556838989258, 0.06001036834716797, 0.05990208053588867, 0.0600002555847168, 0.06116057586669922, 0.05966729736328125, 0.05947110366821289, 0.05937171173095703, 0.05943091201782227, 0.059455230712890626, 0.05943961715698242, 0.05954592132568359, 0.05945062255859375, 0.05952793502807617, 0.05970281600952149, 0.05972630310058594, 0.05970534515380859, 0.05978316879272461, 0.05978694534301758, 0.059636032104492184, 0.05990176010131836, 0.05987142562866211, 0.05967788696289063, 0.05962937545776367, 0.059589630126953126, 0.059625473022460934, 0.059581695556640626, 0.059703712463378904, 0.05965974426269531, 0.05975129699707031, 0.059652095794677736, 0.05983001708984375, 0.05982643127441406, 0.06009036636352539, 0.05999004745483399, 0.05991011047363281, 0.05998723220825195, 0.06004937744140625, 0.06000006484985351, 0.05993699264526367, 0.05981635284423828, 0.05984902572631836, 0.059703296661376956, 0.059660064697265626, 0.05968099212646484, 0.05976473617553711, 0.059805343627929684, 0.05997951889038086, 0.059824737548828125, 0.06001049423217773, 0.06000230407714844, 0.060071937561035155, 0.05999980926513672, 0.0600964469909668, 0.06002924728393555, 0.06005548858642578, 0.06017254257202148, 0.06002687835693359, 0.06007187271118164, 0.05993590545654297, 0.05985782241821289, 0.059979774475097655, 0.060071937561035155, 0.060089759826660157, 0.06023433685302734, 0.060111934661865235, 0.060146625518798826, 0.061335262298583985, 0.05982831954956055, 0.05957855987548828, 0.05952048110961914, 0.05950313568115234, 0.05948416137695312, 0.05952716827392578, 0.059504638671875, 0.05944899368286133, 0.05951871871948242, 0.05950729751586914, 0.0595865592956543, 0.059633663177490234, 0.05986304092407226, 0.05976601409912109, 0.05975936126708985, 0.060217342376708984, 0.06010268783569336, 0.05976367950439453, 0.059757408142089845, 0.059691169738769534, 0.059800735473632814, 0.0597218246459961, 0.05972864151000976, 0.05963302230834961, 0.059752544403076174, 0.059748897552490236, 0.05965334320068359, 0.0596651840209961, 0.05972377777099609, 0.05989580917358398, 0.05989971160888672, 0.05979916763305664, 0.05988943862915039, 0.05989251327514648, 0.059850753784179686, 0.059791007995605466, 0.05988143920898437, 0.059840896606445315, 0.05986304092407226, 0.05979750442504883, 0.05978902435302735, 0.06023606491088867, 0.05996748733520508, 0.059893310546875, 0.05990444946289063, 0.0598422737121582, 0.06018812942504883, 0.06017516708374023, 0.060277824401855466, 0.06009299087524414, 0.06010508728027344, 0.06015078353881836, 0.06005452728271484, 0.06002889633178711, 0.059985950469970704, 0.06000576019287109, 0.06004579162597656, 0.06007779312133789, 0.060083808898925783, 0.05996131134033203, 0.0599634895324707, 0.059951904296875, 0.06112473678588867, 0.059756542205810545, 0.059578369140625, 0.05955705642700195, 0.05951776123046875, 0.059784446716308594, 0.0597122573852539, 0.05968025588989258, 0.05967865753173828, 0.05971350479125977, 0.05970595169067383, 0.05963078308105469, 0.05971337509155274, 0.05976972961425781, 0.05986518478393555, 0.05983846282958984, 0.059940864562988284, 0.059875328063964846, 0.05971148681640625, 0.05961318588256836, 0.05967871856689453, 0.059676673889160155, 0.059702880859375, 0.05976515197753906, 0.059772926330566405, 0.05981350326538086, 0.05970108795166015, 0.05990790557861328, 0.05985763168334961, 0.05982003021240234, 0.05986860656738281, 0.06014012908935547, 0.06007600021362305, 0.06022553634643555, 0.06001216125488281, 0.0599576301574707, 0.059930305480957034, 0.05987360000610351, 0.059811424255371094, 0.059928993225097656, 0.05985184097290039, 0.0599249267578125, 0.05988108825683594, 0.05995404815673828, 0.05998499298095703, 0.06000057601928711, 0.06018016052246094, 0.06004329681396484, 0.060134273529052734, 0.06000630569458008, 0.059883617401123045, 0.06005145645141602, 0.05993369674682617, 0.059963390350341796, 0.059882209777832034, 0.06000054550170898, 0.06003507232666016, 0.06016937637329101, 0.06000857543945313, 0.06006998443603516, 0.0599409294128418, 0.06000668716430664, 0.06013888168334961]",tokens/s,16.714459216902927,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,782.155776,3286.106112,0.0,2883.584,2829.29664,s,1,6.9814697265625,6.9814697265625,0.0,6.9814697265625,6.9814697265625,6.9814697265625,6.9814697265625,[6.9814697265625],,kWh,4.648916008318338e-06,5.059206600764325e-07,2.5325020260036357e-06,7.687338694398407e-06,,MB,1145.602048,3304.98048,0.0,2891.972608,2759.921664,s,10,2.4884664001464847,0.24884664001464846,0.002203561360290385,0.24969026947021483,0.25036970672607417,0.25043384628295895,0.25048515792846676,"[0.2429496307373047, 0.25049798583984373, 0.249465087890625, 0.2503554534912109, 0.2502372131347656, 0.24968556213378906, 0.24762387084960938, 0.25030291748046873, 0.24765370178222657, 0.24969497680664063]",tokens/s,1028.746058154253,kWh,7.2473477914638525e-06,7.992586298710596e-07,4.800843949072477e-06,1.284745037040739e-05,tokens/kWh,19926132.627035968,MB,1171.124224,3304.98048,0.0,2891.972608,2771.964928,s,10,10.162656127929688,1.016265612792969,0.003755117381255847,1.015844482421875,1.020410137939453,1.0224829010009766,1.0241411114501953,"[1.0144153442382813, 1.0245556640625, 1.014386962890625, 1.0096940307617188, 1.01574755859375, 1.01594140625, 1.0179148559570312, 1.0161218872070312, 1.0139288940429687, 1.0199495239257812]",tokens/s,61.991667539413434,kWh,2.9821642698535716e-05,3.2889886977653774e-06,1.9740453461729213e-05,5.285108485803032e-05,tokens/kWh,1192028.5112260592,,s,630,10.16029148197173,0.01612744679678054,0.0003227909233041935,0.016058095932006834,0.01625134773254395,0.01652180233001709,0.01773783021926883,"[0.016246400833129882, 0.016048223495483398, 0.016081024169921875, 0.01604185676574707, 0.01652764892578125, 0.01601126480102539, 0.016105472564697267, 0.016021503448486327, 0.016091136932373046, 0.01592521572113037, 0.015882271766662598, 0.015890144348144532, 0.015971903800964356, 0.015985376358032228, 0.016058368682861326, 0.01649785614013672, 0.0160960636138916, 0.016025760650634765, 0.01627734375, 0.01601433563232422, 0.01599392032623291, 0.015918527603149414, 0.016663040161132812, 0.019230079650878907, 0.0161081600189209, 0.015988736152648925, 0.0160545597076416, 0.016023168563842772, 0.016080991744995117, 0.015947744369506835, 0.01600124740600586, 0.015933247566223146, 0.015937536239624024, 0.015930848121643068, 0.01621046447753906, 0.016009471893310548, 0.016022720336914063, 0.015941472053527833, 0.01591158390045166, 0.016108991622924805, 0.016013120651245116, 0.015923680305480956, 0.01598083209991455, 0.016008832931518554, 0.015947872161865235, 0.015943679809570312, 0.015993184089660644, 0.015937536239624024, 0.016035839080810545, 0.01596131229400635, 0.016290592193603515, 0.01601148796081543, 0.01595695972442627, 0.016089920043945313, 0.015975520133972167, 0.01617193603515625, 0.016035520553588867, 0.015988351821899414, 0.016018112182617186, 0.016021503448486327, 0.016059776306152344, 0.01599142360687256, 0.01600921630859375, 0.016687103271484375, 0.016222143173217775, 0.015988800048828126, 0.016146432876586913, 0.01603753662109375, 0.016209951400756838, 0.015992256164550783, 0.016194112777709962, 0.016015392303466797, 0.01612998390197754, 0.016016767501831054, 0.016065504074096678, 0.016096384048461913, 0.01603264045715332, 0.016000127792358397, 0.0159302396774292, 0.016002656936645508, 0.01604774475097656, 0.0162040958404541, 0.01610697555541992, 0.015978528022766114, 0.015993632316589356, 0.016014591217041015, 0.01603798484802246, 0.016020320892333986, 0.016035072326660155, 0.016081087112426756, 0.015996607780456543, 0.015975296020507814, 0.016103263854980468, 0.015992416381835937, 0.016013727188110352, 0.017021087646484374, 0.01934931182861328, 0.01797929573059082, 0.0162573127746582, 0.01626246452331543, 0.016062816619873046, 0.016049535751342773, 0.016016351699829103, 0.01603977584838867, 0.016007328033447267, 0.016683008193969725, 0.0161975040435791, 0.016109695434570314, 0.016109567642211914, 0.016101375579833984, 0.016016992568969726, 0.016021568298339842, 0.01601571273803711, 0.015988832473754884, 0.015976351737976076, 0.01614028739929199, 0.0160501766204834, 0.016063840866088867, 0.01639900779724121, 0.018711967468261717, 0.01692486381530762, 0.016171199798583984, 0.01614396858215332, 0.01622719955444336, 0.01664588737487793, 0.016216064453125, 0.016936960220336913, 0.016565919876098633, 0.01620524787902832, 0.016331680297851564, 0.016125951766967773, 0.016046207427978517, 0.01602342414855957, 0.016084991455078124, 0.0160849609375, 0.015945759773254395, 0.016005119323730468, 0.015951871871948242, 0.01602886390686035, 0.01600147247314453, 0.016001216888427733, 0.015982303619384765, 0.016005599975585937, 0.016153728485107422, 0.01604832077026367, 0.015984736442565917, 0.015970080375671387, 0.016376895904541014, 0.016142112731933594, 0.015994463920593262, 0.01606809616088867, 0.01604083251953125, 0.016066560745239256, 0.01598464012145996, 0.016100608825683593, 0.01613599967956543, 0.0160849609375, 0.015985631942749025, 0.016013311386108398, 0.015969632148742675, 0.01602627182006836, 0.016074752807617186, 0.01604198455810547, 0.01606012725830078, 0.016035680770874024, 0.01604652786254883, 0.016142143249511718, 0.015982784271240235, 0.016066560745239256, 0.015970303535461427, 0.016356832504272462, 0.016252479553222655, 0.016073823928833008, 0.016070528030395506, 0.01604630470275879, 0.01609017562866211, 0.016133056640625, 0.01607244873046875, 0.016162847518920897, 0.01620345687866211, 0.016128320693969727, 0.016121856689453123, 0.016128032684326173, 0.01622831916809082, 0.016093183517456054, 0.016097280502319337, 0.01595759963989258, 0.01605673599243164, 0.015996928215026854, 0.016256160736083984, 0.016118623733520507, 0.016101375579833984, 0.016033279418945313, 0.016074592590332032, 0.015909536361694336, 0.016082624435424804, 0.016005151748657225, 0.01596063995361328, 0.016301792144775392, 0.01601251220703125, 0.01595423984527588, 0.015985119819641114, 0.01602560043334961, 0.01603539276123047, 0.01599715232849121, 0.016005279541015625, 0.015990847587585448, 0.01599078369140625, 0.016093183517456054, 0.01616383934020996, 0.016140607833862303, 0.015995583534240722, 0.016008863449096678, 0.016007776260375976, 0.016271104812622072, 0.01598361587524414, 0.016061439514160156, 0.016107776641845702, 0.015993727684020995, 0.015978528022766114, 0.015917280197143554, 0.01595241641998291, 0.015982687950134276, 0.015972352027893065, 0.01601068878173828, 0.016013887405395506, 0.016029695510864257, 0.016130048751831053, 0.016082944869995116, 0.01603993606567383, 0.016052255630493163, 0.016047935485839843, 0.01602908706665039, 0.01596288013458252, 0.015916064262390138, 0.016008159637451173, 0.01618227195739746, 0.015958944320678712, 0.01596579170227051, 0.015926015853881835, 0.01599462413787842, 0.015924351692199707, 0.015932191848754884, 0.0159432315826416, 0.015987232208251954, 0.015960063934326172, 0.01598057556152344, 0.01592518424987793, 0.015988896369934082, 0.015958080291748045, 0.016000831604003906, 0.01603366470336914, 0.0164866886138916, 0.01618534469604492, 0.016060352325439453, 0.016049663543701173, 0.016027807235717773, 0.01606697654724121, 0.01609744071960449, 0.01609097671508789, 0.016021503448486327, 0.01599007987976074, 0.016021728515625, 0.016044511795043945, 0.016360736846923827, 0.016003231048583984, 0.016042848587036133, 0.015981535911560058, 0.01611356735229492, 0.01647270393371582, 0.016091360092163085, 0.01658060836791992, 0.016080352783203126, 0.015945504188537598, 0.016480352401733397, 0.01613465690612793, 0.01625699234008789, 0.016217792510986328, 0.016103391647338868, 0.015979007720947267, 0.01606150436401367, 0.016064992904663088, 0.016390655517578127, 0.016193056106567384, 0.0160435848236084, 0.01599894428253174, 0.016083616256713867, 0.015996512413024903, 0.01599510383605957, 0.016154752731323243, 0.016066240310668944, 0.016067232131958008, 0.016072959899902345, 0.016059455871582032, 0.016081695556640626, 0.016094783782958984, 0.01615292739868164, 0.01606991958618164, 0.01613667106628418, 0.016516416549682618, 0.016079807281494142, 0.016062463760375977, 0.016006816864013673, 0.016071008682250976, 0.016037248611450197, 0.016169471740722655, 0.016032127380371092, 0.016526208877563477, 0.016056480407714843, 0.016060415267944335, 0.016034400939941407, 0.01602681541442871, 0.015991168022155763, 0.016032320022583008, 0.016041759490966798, 0.016922624588012695, 0.016263263702392578, 0.016252096176147462, 0.016171968460083008, 0.016101152420043945, 0.016080896377563478, 0.01601535987854004, 0.016117023468017577, 0.016157407760620118, 0.016240287780761718, 0.016215583801269532, 0.016138175964355468, 0.016098272323608397, 0.016127904891967772, 0.016205951690673827, 0.016066400527954102, 0.016046239852905275, 0.016184799194335936, 0.016140352249145506, 0.016052064895629884, 0.01607923126220703, 0.016080608367919923, 0.016009632110595702, 0.016082944869995116, 0.016021535873413085, 0.01606012725830078, 0.016039392471313477, 0.016082815170288086, 0.01597436809539795, 0.015971296310424803, 0.01621603202819824, 0.01606982421875, 0.01597862434387207, 0.016072864532470702, 0.016101728439331053, 0.016029888153076172, 0.01600271987915039, 0.016030111312866212, 0.015992799758911134, 0.01603171157836914, 0.01605219268798828, 0.016048160552978516, 0.016035743713378906, 0.016054367065429686, 0.016035839080810545, 0.016031679153442384, 0.016090368270874022, 0.016057088851928712, 0.016135488510131836, 0.01605407905578613, 0.01616582489013672, 0.016152576446533205, 0.01614156723022461, 0.016167680740356447, 0.016078847885131836, 0.01611372756958008, 0.016057695388793945, 0.016097888946533204, 0.016480415344238282, 0.01606809616088867, 0.016118112564086913, 0.0164270076751709, 0.016452991485595703, 0.01707491111755371, 0.016250112533569335, 0.016304128646850585, 0.016196447372436522, 0.016086944580078123, 0.016671903610229494, 0.016128255844116212, 0.016083551406860352, 0.01617433547973633, 0.016173824310302735, 0.01602764892578125, 0.016131807327270507, 0.016013599395751952, 0.01659641647338867, 0.01618195152282715, 0.01622208023071289, 0.01617305564880371, 0.01605843162536621, 0.016035999298095703, 0.016078624725341797, 0.01614028739929199, 0.016225568771362303, 0.016841440200805663, 0.016091136932373046, 0.016080896377563478, 0.0160830078125, 0.016125888824462892, 0.016074623107910156, 0.016068735122680665, 0.016251136779785156, 0.016163871765136718, 0.01611849594116211, 0.016180992126464844, 0.016086719512939454, 0.016177728652954103, 0.016154144287109377, 0.01611110305786133, 0.01609641647338867, 0.01603923225402832, 0.016044544219970702, 0.015980159759521485, 0.016251264572143556, 0.016158527374267578, 0.01597049617767334, 0.0160166072845459, 0.01597929573059082, 0.016091136932373046, 0.016151840209960938, 0.016006975173950194, 0.016040288925170898, 0.01607539176940918, 0.0161648006439209, 0.016046079635620117, 0.016130048751831053, 0.01610918426513672, 0.016083328247070313, 0.016051424026489257, 0.016048927307128907, 0.016064512252807618, 0.016183296203613282, 0.016150527954101563, 0.016074592590332032, 0.016042144775390624, 0.016293888092041017, 0.016223520278930665, 0.016181375503540037, 0.015997119903564453, 0.016013280868530273, 0.016181440353393556, 0.016162431716918946, 0.016029727935791015, 0.015956735610961913, 0.01595580768585205, 0.015945247650146484, 0.016087295532226563, 0.015922528266906738, 0.015937952041625975, 0.015987168312072753, 0.017133567810058595, 0.015994879722595216, 0.01592255973815918, 0.015943936347961426, 0.016086719512939454, 0.01613510322570801, 0.01593247985839844, 0.016046688079833983, 0.016144479751586914, 0.015965439796447753, 0.01644620704650879, 0.015980480194091796, 0.016011167526245117, 0.016001184463500975, 0.016063488006591797, 0.01597920036315918, 0.016068927764892577, 0.016026912689208986, 0.01597923183441162, 0.016013471603393555, 0.016012544631958007, 0.01611836814880371, 0.01605411148071289, 0.01613430404663086, 0.01607689666748047, 0.01700035285949707, 0.016080352783203126, 0.016083488464355467, 0.0162193603515625, 0.017146656036376953, 0.016594751358032227, 0.016175167083740234, 0.016111295700073244, 0.016219648361206054, 0.01600979232788086, 0.01603331184387207, 0.016167776107788086, 0.015978464126586912, 0.016103456497192383, 0.01625497627258301, 0.01602761650085449, 0.016085023880004882, 0.01618502426147461, 0.015963775634765625, 0.016121952056884766, 0.015989407539367676, 0.016213119506835936, 0.01597644805908203, 0.01609657669067383, 0.01608710479736328, 0.0160830078125, 0.016107488632202148, 0.016056224822998046, 0.016025407791137695, 0.016001920700073242, 0.01603993606567383, 0.01596332836151123, 0.016249664306640627, 0.015965727806091307, 0.01602403259277344, 0.015966208457946777, 0.015968255996704102, 0.01683660888671875, 0.018105791091918944, 0.016561983108520507, 0.016124671936035156, 0.016035360336303713, 0.0161343994140625, 0.016105024337768555, 0.016032415390014647, 0.016078847885131836, 0.016134143829345703, 0.015980544090270995, 0.01603171157836914, 0.016230144500732423, 0.01612953567504883, 0.01602742385864258, 0.015988767623901366, 0.01595888042449951, 0.0160250244140625, 0.016159616470336913, 0.015992256164550783, 0.016074527740478517, 0.015923168182373045, 0.01601215934753418, 0.016031423568725587, 0.01594598388671875, 0.015928992271423338, 0.01604150390625, 0.016069248199462892, 0.01597267246246338, 0.016178367614746093, 0.015944191932678223, 0.016082176208496092, 0.015989503860473632, 0.01602355194091797, 0.016011423110961914, 0.01606230354309082, 0.015990943908691407, 0.016074592590332032, 0.015927295684814453, 0.01601257514953613, 0.015946463584899904, 0.016010944366455077, 0.01597439956665039, 0.015976767539978027, 0.016033567428588868, 0.015962335586547853, 0.016010976791381835, 0.016048223495483398, 0.016138399124145508, 0.016448127746582032, 0.016027551651000976, 0.0160231990814209, 0.01608038330078125, 0.01611657524108887, 0.015958016395568847, 0.016067808151245117, 0.016096128463745116, 0.016057823181152345, 0.01621865653991699, 0.016064416885375975, 0.016138240814208983, 0.01602560043334961, 0.01599398422241211, 0.01595241641998291, 0.015976223945617676, 0.015970879554748536, 0.016012479782104492, 0.016024320602416993, 0.01604854393005371, 0.016053279876708983, 0.015999584197998046, 0.016042015075683595, 0.015991904258728026, 0.0165262393951416, 0.018064992904663086, 0.016973920822143555, 0.016142656326293945, 0.016066272735595702, 0.016228639602661132, 0.016160768508911134, 0.016084575653076173, 0.016049823760986327, 0.01603046417236328, 0.015987775802612306, 0.01631692886352539, 0.016067007064819335, 0.01600271987915039, 0.016079200744628906, 0.016037887573242187, 0.015968255996704102, 0.016029407501220703, 0.01604217529296875, 0.016032928466796874, 0.01600534439086914, 0.01601011276245117, 0.0160930233001709, 0.016065919876098633, 0.016869056701660157, 0.0189736328125, 0.016196863174438476, 0.016163583755493163, 0.01605251121520996, 0.016124895095825195, 0.016188224792480468, 0.01611731147766113, 0.01599270439147949, 0.016107263565063475, 0.01597721576690674, 0.016029247283935545, 0.01619296073913574, 0.016088064193725587, 0.01618115234375]",tokens/s,62.00609511231661,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,837.947392,9637.39648,0.0,9242.148864,8603.568128,s,1,7.6634521484375,7.6634521484375,0.0,7.6634521484375,7.6634521484375,7.6634521484375,7.6634521484375,[7.6634521484375],,kWh,1.2463342704139297e-05,1.3674601049854078e-06,5.724449023997158e-06,1.9555251833121862e-05,,MB,1140.477952,9886.957568,0.0,9481.224192,8972.090368,s,10,7.041542114257813,0.7041542114257812,0.00480887993823181,0.7044410705566406,0.7089775390624999,0.7092788391113282,0.7095198791503906,"[0.6915390625, 0.7037074584960937, 0.7027904663085938, 0.7025294799804688, 0.707328369140625, 0.7038873291015625, 0.7049948120117188, 0.7062744140625, 0.7095801391601563, 0.7089105834960937]",tokens/s,363.5567264188446,kWh,2.0551944174999337e-05,2.266518104224068e-06,1.3657510926000834e-05,3.647597320522424e-05,tokens/kWh,7018318.5671200855,MB,1162.006528,9891.151872,0.0,9485.418496,8972.092928,s,10,25.263025390625,2.5263025390625002,0.014150736211522822,2.5282164306640627,2.541820458984375,2.5455982177734375,2.5486204248046875,"[2.50933154296875, 2.5493759765625, 2.54098095703125, 2.530982421875, 2.53126611328125, 2.530928466796875, 2.52550439453125, 2.523835205078125, 2.524316650390625, 2.496503662109375]",tokens/s,24.937630796737047,kWh,7.337199067916725e-05,8.093241882545913e-06,4.856909441079924e-05,0.0001300343269725124,tokens/kWh,484487.45394219947,,s,630,25.25983639907835,0.0400949784112355,0.0007732978727018159,0.03999001693725586,0.04052755584716797,0.041276592826843254,0.04312175277709963,"[0.04249353790283203, 0.043307422637939456, 0.04041104125976563, 0.04082649612426758, 0.039516448974609375, 0.04048089599609375, 0.03943430328369141, 0.03960422515869141, 0.03983273696899414, 0.03978736114501953, 0.040048641204833986, 0.03961222457885742, 0.039841983795166014, 0.039616512298583983, 0.0398315200805664, 0.03960160064697266, 0.039370529174804686, 0.039526657104492186, 0.039404094696044924, 0.039636993408203126, 0.0396492805480957, 0.0395489273071289, 0.03973104095458985, 0.03973699188232422, 0.03969279861450195, 0.03982950210571289, 0.0396124153137207, 0.039546463012695314, 0.039659393310546874, 0.039483936309814456, 0.03946700668334961, 0.03945676803588867, 0.039907329559326174, 0.039407646179199216, 0.03943753433227539, 0.03988966369628906, 0.03963401412963867, 0.0395846061706543, 0.039653438568115235, 0.03948249435424805, 0.039720897674560544, 0.03951440048217773, 0.03964144134521484, 0.03988883209228516, 0.039820735931396484, 0.03991151809692383, 0.039537567138671875, 0.03972438430786133, 0.04002799987792969, 0.0399183349609375, 0.0400445442199707, 0.039894943237304685, 0.040130657196044923, 0.03968819046020508, 0.039223297119140625, 0.03971072006225586, 0.03956041717529297, 0.03962140655517578, 0.0394728012084961, 0.039738945007324215, 0.03996652984619141, 0.03994704055786133, 0.0397468147277832, 0.04230822372436523, 0.04061753463745117, 0.04019039916992188, 0.04031488037109375, 0.040381568908691406, 0.04056358337402344, 0.04083017730712891, 0.04029232025146484, 0.04026860809326172, 0.04024115371704102, 0.04027801513671875, 0.040235008239746094, 0.040065025329589846, 0.04008972930908203, 0.04031475067138672, 0.03991551971435547, 0.040182785034179686, 0.04011872100830078, 0.04032979202270508, 0.040613887786865234, 0.04000521469116211, 0.04030636978149414, 0.04048355102539063, 0.04027391815185547, 0.04000342559814453, 0.04035190582275391, 0.04004249572753906, 0.04007843017578125, 0.04011529541015625, 0.04025939178466797, 0.04039475250244141, 0.04056883239746094, 0.04006092834472656, 0.040030208587646485, 0.04024729537963867, 0.03990323257446289, 0.04020822525024414, 0.040249504089355466, 0.040359935760498046, 0.04177094268798828, 0.05037062454223633, 0.040304641723632816, 0.03995606231689453, 0.04008182525634765, 0.040570911407470704, 0.04038803100585937, 0.04024076843261719, 0.040403968811035154, 0.040560256958007815, 0.03999116897583008, 0.04015913772583008, 0.040331775665283204, 0.04028211212158203, 0.04038614273071289, 0.03992556762695312, 0.039981056213378906, 0.04011193466186523, 0.04023580932617187, 0.040226814270019534, 0.04014284896850586, 0.04012236785888672, 0.04028742218017578, 0.040119102478027344, 0.042237953186035154, 0.040529918670654294, 0.040189056396484374, 0.040264575958251954, 0.03995600128173828, 0.04003839874267578, 0.03977059173583984, 0.03991689682006836, 0.04018652725219726, 0.040130561828613284, 0.03990528106689453, 0.04024115371704102, 0.03989299011230469, 0.04024140930175781, 0.04001475143432617, 0.04149129486083984, 0.04015513610839844, 0.0400148811340332, 0.04007974243164063, 0.04015478515625, 0.04041366577148438, 0.0402619514465332, 0.04006108856201172, 0.04007491302490234, 0.04033980941772461, 0.040793441772460935, 0.043618976593017576, 0.04017356872558594, 0.03986022567749024, 0.040005630493164065, 0.040182880401611325, 0.040197025299072264, 0.040136703491210936, 0.04324726486206055, 0.040528160095214844, 0.04041494369506836, 0.040080928802490236, 0.04008348846435547, 0.04017641448974609, 0.03976380920410156, 0.04017942428588867, 0.04032761764526367, 0.04001587295532227, 0.040271198272705075, 0.04011804962158203, 0.039943038940429686, 0.039852001190185546, 0.03999337768554687, 0.04029849624633789, 0.0404029426574707, 0.04031401443481445, 0.04038896179199219, 0.04026371383666992, 0.041116416931152346, 0.04016505432128906, 0.040022048950195316, 0.040136703491210936, 0.039890113830566405, 0.040360767364501955, 0.040118270874023435, 0.04021657562255859, 0.03993804931640625, 0.04049903869628906, 0.04219910430908203, 0.040132190704345705, 0.040548126220703126, 0.040122112274169924, 0.0401212158203125, 0.040339710235595704, 0.03994598388671875, 0.040343551635742186, 0.04240588760375977, 0.040908798217773434, 0.04048889541625977, 0.04038246536254883, 0.04024639892578125, 0.04031155014038086, 0.040116416931152345, 0.03985168075561524, 0.03987900924682617, 0.03994009780883789, 0.039725055694580076, 0.039882080078125, 0.03977072143554688, 0.04063852691650391, 0.040030208587646485, 0.04000358581542969, 0.04009369659423828, 0.039929855346679685, 0.03970364761352539, 0.03980527877807617, 0.03950249481201172, 0.03989807891845703, 0.04387321472167969, 0.04003190231323242, 0.04065299224853516, 0.03973068618774414, 0.0397823371887207, 0.04006943893432617, 0.040132545471191404, 0.03981727981567383, 0.03960211181640625, 0.03975743865966797, 0.039737407684326174, 0.03945568084716797, 0.040290145874023436, 0.04001792144775391, 0.04005401611328125, 0.03995929718017578, 0.03975167846679688, 0.039782398223876955, 0.039728416442871096, 0.0397790412902832, 0.03991686248779297, 0.040065216064453124, 0.04014883041381836, 0.03991619110107422, 0.03998886489868164, 0.04026816177368164, 0.04030025482177734, 0.04028995132446289, 0.039895103454589846, 0.03997753524780273, 0.04017935943603516, 0.03992995071411133, 0.040478046417236326, 0.04252345657348633, 0.04176688003540039, 0.03992374420166016, 0.040097793579101565, 0.03978035354614258, 0.04008038330078125, 0.03974403381347656, 0.03961459350585937, 0.0397633285522461, 0.03987760162353516, 0.039792640686035156, 0.04057510375976563, 0.04004441452026367, 0.0400261116027832, 0.04024428939819336, 0.039893184661865234, 0.04004735946655273, 0.03968000030517578, 0.03990323257446289, 0.040164958953857424, 0.040137054443359375, 0.04082284927368164, 0.0402655029296875, 0.04041046524047852, 0.04049359893798828, 0.04001177597045898, 0.041587039947509764, 0.03987875366210938, 0.040081024169921875, 0.039936286926269535, 0.039667713165283204, 0.04018918228149414, 0.03981593704223633, 0.03968729782104492, 0.04035820770263672, 0.04001811218261719, 0.04026124954223633, 0.03986412811279297, 0.039830463409423825, 0.0398636474609375, 0.03971343994140625, 0.039937343597412106, 0.039529151916503906, 0.039663646697998045, 0.0399318733215332, 0.04036198425292969, 0.041538719177246095, 0.04276924896240234, 0.041893470764160154, 0.039936416625976565, 0.03991926574707031, 0.04089478302001953, 0.03980233764648437, 0.039567039489746096, 0.03998348617553711, 0.039784286499023436, 0.04051545715332031, 0.039713569641113285, 0.04026313781738281, 0.03988124847412109, 0.0395489273071289, 0.03957756805419922, 0.03954691314697266, 0.042626911163330075, 0.03976003265380859, 0.03952409744262695, 0.03968022537231446, 0.03951004791259766, 0.03968819046020508, 0.039609760284423826, 0.03963875198364258, 0.039723743438720704, 0.03981123352050781, 0.03977948760986328, 0.0396317138671875, 0.03970457458496094, 0.039626750946044925, 0.03955507278442383, 0.039599777221679684, 0.03958204650878906, 0.039752960205078125, 0.03978931045532227, 0.04245913696289062, 0.03993804931640625, 0.04020412826538086, 0.04121567916870117, 0.0398135986328125, 0.039796382904052734, 0.039817184448242185, 0.03974959945678711, 0.0399117431640625, 0.039822654724121095, 0.03967055892944336, 0.03969023895263672, 0.0395994873046875, 0.04105484771728515, 0.04093337631225586, 0.039919296264648435, 0.04002848052978516, 0.04005462265014648, 0.04026339340209961, 0.040126625061035155, 0.04013699340820313, 0.040226814270019534, 0.04046847915649414, 0.040269824981689455, 0.040271873474121096, 0.040267200469970704, 0.04038304138183594, 0.0405852165222168, 0.04038643264770508, 0.03994432067871094, 0.04101116943359375, 0.04034694290161133, 0.040089920043945314, 0.040272289276123044, 0.040458240509033204, 0.04013449478149414, 0.040185791015625, 0.04039680099487305, 0.04019836807250977, 0.04041475296020508, 0.04062019348144531, 0.04253523254394531, 0.040132766723632814, 0.040224609375, 0.0425750732421875, 0.04048934555053711, 0.04036662292480469, 0.04028598403930664, 0.04020230484008789, 0.040374271392822264, 0.040390655517578124, 0.04035500717163086, 0.04003923034667969, 0.04014617538452148, 0.04024396896362305, 0.0400992317199707, 0.040127071380615234, 0.04062822341918945, 0.040447681427001954, 0.04046675109863281, 0.041326431274414065, 0.04020598220825195, 0.04014745712280274, 0.04004191970825195, 0.04042195129394531, 0.040180862426757814, 0.04027891159057617, 0.04054425430297852, 0.04032067108154297, 0.04011452865600586, 0.04089785766601563, 0.03979945755004883, 0.03999542236328125, 0.039649185180664064, 0.0397694091796875, 0.03962073516845703, 0.04077017593383789, 0.04016035079956055, 0.03974854278564453, 0.039739166259765625, 0.039728862762451175, 0.04039120101928711, 0.03972911834716797, 0.03956307220458984, 0.03968819046020508, 0.03958784103393555, 0.04004188919067383, 0.040230846405029295, 0.03981939315795899, 0.039656158447265624, 0.0395546875, 0.039639423370361325, 0.03953823852539062, 0.039483329772949216, 0.039559680938720705, 0.039943294525146486, 0.04048166275024414, 0.039798782348632815, 0.03970172882080078, 0.03984054565429687, 0.039683391571044925, 0.03960022354125976, 0.03969836807250977, 0.03976668930053711, 0.03972476959228516, 0.039532257080078126, 0.04019878387451172, 0.042686431884765626, 0.040072574615478515, 0.04028684616088867, 0.040050048828125, 0.0437254409790039, 0.03998454284667969, 0.04017139053344727, 0.039885025024414066, 0.039629310607910154, 0.04011008071899414, 0.039631935119628904, 0.039328704833984374, 0.040013343811035156, 0.04003644943237305, 0.03962099075317383, 0.03975481414794922, 0.0398570556640625, 0.03965254211425781, 0.039871326446533205, 0.03969177627563476, 0.0398770866394043, 0.03965760040283203, 0.04024899291992187, 0.039974143981933594, 0.03990771102905273, 0.03967552185058594, 0.03973392105102539, 0.03979504013061524, 0.039462913513183595, 0.03963651275634766, 0.03969887924194336, 0.03991145706176758, 0.04006707382202149, 0.040174976348876956, 0.03981990432739258, 0.039886081695556644, 0.039639774322509765, 0.03974313735961914, 0.040053119659423826, 0.03975987243652344, 0.03970230484008789, 0.039491134643554686, 0.03988528060913086, 0.04009328079223633, 0.03975228881835938, 0.03974553680419922, 0.04007052612304687, 0.0397523193359375, 0.03972844696044922, 0.03989369583129883, 0.03988479995727539, 0.03985123062133789, 0.0399529914855957, 0.04143328094482422, 0.03999334335327148, 0.039800830841064457, 0.04371839904785156, 0.039946495056152345, 0.03993190383911133, 0.040308734893798825, 0.0398131217956543, 0.039817054748535155, 0.040161441802978516, 0.04222566223144531, 0.03999692916870117, 0.03972876739501953, 0.04028710556030273, 0.039657024383544924, 0.039728641510009766, 0.039791038513183594, 0.03967356872558594, 0.03973308944702148, 0.03974854278564453, 0.03970790481567383, 0.03975040054321289, 0.03960627365112305, 0.03949747085571289, 0.040175167083740235, 0.03974828720092773, 0.03984384155273438, 0.0396124153137207, 0.039929855346679685, 0.03963900756835938, 0.04009539031982422, 0.03987494277954102, 0.03967583847045898, 0.03975600051879883, 0.039875839233398436, 0.040221473693847654, 0.03997257614135742, 0.039954784393310544, 0.040189697265625, 0.04098787307739258, 0.039983905792236325, 0.03960780715942383, 0.03994265747070312, 0.039782398223876955, 0.03944412612915039, 0.04019235229492187, 0.03976396942138672, 0.03990323257446289, 0.040030208587646485, 0.03968511962890625, 0.0428144645690918, 0.040089599609375, 0.04002816009521484, 0.03951747131347656, 0.03972784042358399, 0.040898399353027345, 0.040099647521972655, 0.04009603118896484, 0.04015929412841797, 0.040086849212646485, 0.04044796752929687, 0.04009664154052734, 0.04006076812744141, 0.04142710494995117, 0.04020198440551758, 0.040527488708496096, 0.0401638069152832, 0.04038860702514648, 0.04010598373413086, 0.039847934722900394, 0.04001315307617188, 0.040106655120849606, 0.04006467056274414, 0.042194847106933595, 0.03951193618774414, 0.03925651168823242, 0.03937737655639648, 0.039175552368164064, 0.03977484893798828, 0.039327743530273435, 0.03949977493286133, 0.03945798492431641, 0.03958249664306641, 0.03936259078979492, 0.03938825607299805, 0.03962563323974609, 0.039908607482910155, 0.03928128051757813, 0.0394013442993164, 0.03953241729736328, 0.039489601135253904, 0.039338302612304685, 0.03938508987426758, 0.03924972915649414, 0.03937094497680664, 0.04028982543945313, 0.04028464126586914, 0.04197785568237305, 0.039395328521728515, 0.03915724945068359, 0.03924579238891602, 0.03899756622314453, 0.03931235122680664, 0.039117855072021486, 0.03911164855957031, 0.039075328826904294, 0.03918502426147461, 0.03954390335083008, 0.039448673248291016, 0.03922118377685547, 0.039185150146484375, 0.03907516860961914, 0.03897139358520508, 0.03913897705078125, 0.03926323318481445, 0.03935980987548828, 0.03906351852416992, 0.039336673736572264, 0.03940966415405273, 0.039340000152587894, 0.039489566802978514, 0.03951520156860352, 0.039642047882080075, 0.03990937423706055, 0.040341503143310545, 0.040232158660888674, 0.040098686218261716, 0.04002374267578125, 0.04030691146850586, 0.03999884796142578, 0.040100479125976564, 0.04018175888061523, 0.04001094436645508, 0.04002899169921875, 0.0400711669921875, 0.040223873138427735]",tokens/s,24.940779110627425,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,809.92256,14639.104,0.0,14243.856384,14221.3376,s,1,7.50015673828125,7.50015673828125,0.0,7.50015673828125,7.50015673828125,7.50015673828125,7.50015673828125,[7.50015673828125],,kWh,1.5394994562500605e-05,1.6904588861759922e-06,7.184450191999921e-06,2.426990364067652e-05,,MB,1110.228992,14735.572992,0.0,14329.839616,14290.688,s,10,14.017546020507812,1.4017546020507814,0.005064467607014813,1.4028267822265625,1.4066118896484376,1.4069143432617188,1.4071563061523438,"[1.3934443359375, 1.3924166259765625, 1.399595947265625, 1.4004007568359376, 1.4030111083984376, 1.4026424560546875, 1.4064407958984375, 1.407216796875, 1.40583251953125, 1.406544677734375]",tokens/s,182.6282572038425,kWh,4.101319376291641e-05,4.523301419646922e-06,2.717318840519999e-05,7.270968358776332e-05,tokens/kWh,3520851.520293007,MB,1138.958336,14750.253056,0.0,14344.51968,14290.69056,s,10,39.382056884765625,3.9382056884765624,0.0028361852012258122,3.9377154541015624,3.9428391357421875,3.942949108886719,3.943037087402344,"[3.93264404296875, 3.937390380859375, 3.936491943359375, 3.9376533203125, 3.937684814453125, 3.93774609375, 3.938677978515625, 3.93789453125, 3.94305908203125, 3.942814697265625]",tokens/s,15.997132954315202,kWh,0.00011518598807333356,1.2704394243449077e-05,7.642061669200003e-05,0.00020431099900878268,tokens/kWh,308353.4430630033,,s,630,39.37825381469723,0.06250516478523374,0.00023357617037945695,0.06250548934936523,0.06278532638549804,0.06288896923065185,0.06312925102233886,"[0.06311766433715821, 0.06226121520996094, 0.06225932693481445, 0.06189859390258789, 0.06193881607055664, 0.06222883224487305, 0.062042686462402345, 0.06216089630126953, 0.06200115203857422, 0.06224281692504883, 0.062414207458496095, 0.06231216049194336, 0.06223750305175781, 0.06258502578735352, 0.06216080093383789, 0.062195838928222655, 0.06219948959350586, 0.06223212814331055, 0.062155391693115236, 0.061967552185058596, 0.06203680038452149, 0.06238412857055664, 0.062453758239746096, 0.06251529693603515, 0.062299297332763674, 0.06239932632446289, 0.06256835174560547, 0.06234688186645508, 0.06235583877563477, 0.0625022087097168, 0.06236435317993164, 0.06235955047607422, 0.06239231872558594, 0.062486526489257815, 0.0626104965209961, 0.062387134552001955, 0.062183425903320315, 0.06274252700805664, 0.06236774444580078, 0.06257020950317382, 0.062295360565185545, 0.06242812728881836, 0.06235100936889648, 0.06262566375732422, 0.06256089782714844, 0.06289523315429688, 0.06241558456420898, 0.062365280151367185, 0.06243158340454102, 0.0625992317199707, 0.06236108779907226, 0.06244403076171875, 0.06293017578125, 0.0627119369506836, 0.06256089782714844, 0.062718017578125, 0.06256991958618165, 0.06255836868286133, 0.06274492645263671, 0.06263919830322266, 0.062446495056152344, 0.06305791854858399, 0.06260940933227539, 0.06337152099609375, 0.06236617660522461, 0.062189697265625, 0.0622059211730957, 0.06199219131469726, 0.06248444747924805, 0.0626480941772461, 0.0623089599609375, 0.062179550170898434, 0.06225324630737305, 0.06225100708007812, 0.06245321655273438, 0.06236959838867188, 0.062443359375, 0.06221836853027344, 0.06223244857788086, 0.06235635375976562, 0.06242083358764648, 0.06240480041503906, 0.0625781135559082, 0.06203776168823242, 0.06258774566650391, 0.06259241485595703, 0.062396064758300784, 0.06261407852172851, 0.06264172744750976, 0.06238899230957031, 0.06265856170654296, 0.06253936004638672, 0.06258870315551758, 0.062401153564453124, 0.06234316635131836, 0.06234223937988281, 0.06262614440917968, 0.06252953720092773, 0.06270214462280274, 0.06232451248168945, 0.06226908874511719, 0.062341697692871095, 0.06239846420288086, 0.06252105712890625, 0.06245404815673828, 0.062389633178710935, 0.06264281463623046, 0.06261920166015625, 0.0628813133239746, 0.06250588989257813, 0.062437374114990236, 0.0626480941772461, 0.06256371307373047, 0.06261398315429688, 0.06275305557250976, 0.06250102233886719, 0.06256019210815429, 0.062453407287597656, 0.0627305908203125, 0.0628223991394043, 0.06261491012573242, 0.06262643051147461, 0.06271155166625976, 0.06254111862182617, 0.06269382476806641, 0.0625805778503418, 0.06323279953002929, 0.06244480133056641, 0.06195462417602539, 0.06197174453735352, 0.06202057647705078, 0.06210355377197266, 0.062117889404296876, 0.062156097412109375, 0.062370494842529295, 0.06252544021606446, 0.062461952209472656, 0.06241888046264649, 0.06259267044067383, 0.06221993637084961, 0.06219209671020508, 0.06256262588500977, 0.06238361740112305, 0.06256662368774414, 0.06222463989257813, 0.062279678344726565, 0.06249241638183594, 0.0623372802734375, 0.062230270385742185, 0.06227788925170898, 0.06233472061157227, 0.0628364486694336, 0.06253366470336914, 0.062470657348632816, 0.06265001678466797, 0.06252579116821289, 0.06264012908935547, 0.06253948974609375, 0.062384449005126956, 0.06255379104614257, 0.062331199645996094, 0.06238934326171875, 0.062281856536865236, 0.06225382232666016, 0.06267903900146485, 0.0625541114807129, 0.06258895874023437, 0.06283039855957032, 0.06266073608398437, 0.06249065780639648, 0.06242899322509766, 0.06250310516357421, 0.0625450553894043, 0.062402721405029296, 0.06244579315185547, 0.06283929443359375, 0.06248566436767578, 0.06252767944335938, 0.06247283172607422, 0.06247219085693359, 0.06278511810302734, 0.0625316162109375, 0.06254767990112305, 0.06276079940795898, 0.06269209671020508, 0.06261142349243164, 0.06286959838867187, 0.06285472106933594, 0.06267744064331054, 0.06311296081542969, 0.063023681640625, 0.06239980697631836, 0.062185665130615235, 0.062368255615234375, 0.06248646545410156, 0.062292030334472656, 0.06258009719848633, 0.06228649520874024, 0.06237334442138672, 0.06241888046264649, 0.062293952941894534, 0.06242367935180664, 0.06255001449584961, 0.06257664108276367, 0.06236569595336914, 0.062296062469482424, 0.062438560485839845, 0.06245379257202149, 0.06234195327758789, 0.06222438430786133, 0.062273536682128906, 0.06208512115478516, 0.06271721649169922, 0.06257056045532226, 0.06263792037963867, 0.06243411254882812, 0.06236502456665039, 0.06232950210571289, 0.062381790161132815, 0.06241923141479492, 0.062394367218017575, 0.062210079193115234, 0.06262688064575195, 0.06285609436035157, 0.06252953720092773, 0.06239641571044922, 0.06255001449584961, 0.06234255981445312, 0.06252934265136718, 0.06247504043579102, 0.06254182434082031, 0.06264236831665039, 0.06281609725952149, 0.06253065490722656, 0.06274288177490234, 0.06250960159301758, 0.06239401626586914, 0.06245830535888672, 0.0625516471862793, 0.062439743041992186, 0.06271139144897461, 0.06240431976318359, 0.06270022583007813, 0.0625041618347168, 0.06275913619995117, 0.06271446228027344, 0.06268937683105469, 0.06248819351196289, 0.06251264190673828, 0.06257535934448243, 0.0625334701538086, 0.06250486373901368, 0.06326476669311523, 0.06257171249389648, 0.062003265380859374, 0.06205094528198242, 0.061884449005126956, 0.062217662811279294, 0.062077598571777345, 0.06223052978515625, 0.06232252883911133, 0.06264233779907226, 0.06252463912963867, 0.06259519958496093, 0.062415519714355466, 0.06256006240844726, 0.0626157455444336, 0.06241487884521484, 0.06250508880615234, 0.06264815902709961, 0.06249881744384766, 0.0624824333190918, 0.06230019378662109, 0.0625561294555664, 0.06217113494873047, 0.06223801422119141, 0.062281566619873045, 0.06251708984375, 0.06240972900390625, 0.06280825424194336, 0.0623675537109375, 0.06285702514648438, 0.06253587341308593, 0.06262579345703125, 0.06268713760375977, 0.06254982376098633, 0.06233116912841797, 0.06246809768676758, 0.06227084732055664, 0.0624251823425293, 0.06218191909790039, 0.06240179061889648, 0.062400447845458985, 0.06284735870361328, 0.06265647888183594, 0.062519775390625, 0.06236569595336914, 0.06251043319702149, 0.06238684844970703, 0.06243139266967773, 0.062449504852294925, 0.06248857498168945, 0.06268915176391601, 0.0627256965637207, 0.06258723068237304, 0.06278720092773438, 0.06251100921630859, 0.062462337493896486, 0.06264438247680663, 0.06260559844970703, 0.06244486236572266, 0.06257030487060547, 0.0629502067565918, 0.06282969665527344, 0.0629349136352539, 0.06313398361206055, 0.062306304931640626, 0.061986175537109375, 0.06216870498657227, 0.06230876922607422, 0.062469982147216795, 0.062294784545898436, 0.06219980621337891, 0.06226947021484375, 0.06256022262573242, 0.06264131164550782, 0.06254806518554687, 0.062470943450927734, 0.06226953506469726, 0.06219555282592774, 0.06238332748413086, 0.06248860931396484, 0.06250783920288086, 0.062375358581542965, 0.062324703216552736, 0.06210390472412109, 0.06235891342163086, 0.062196575164794925, 0.0625398063659668, 0.06229913711547851, 0.06261417770385742, 0.0629865608215332, 0.0625316162109375, 0.06247011184692383, 0.062484127044677734, 0.0624268798828125, 0.0625011215209961, 0.062443294525146485, 0.06257846450805664, 0.06254230499267578, 0.062403999328613284, 0.062364574432373046, 0.06262287902832031, 0.06239718246459961, 0.06246342468261719, 0.06266947174072265, 0.06256972885131835, 0.0625456657409668, 0.06253676986694336, 0.06256768035888671, 0.06252819061279297, 0.06265964889526367, 0.062491134643554686, 0.06285734558105469, 0.06281849670410156, 0.06255801773071289, 0.062488895416259765, 0.06267084884643555, 0.06258393478393555, 0.06242598342895508, 0.06245785522460937, 0.06271385574340821, 0.06294937515258789, 0.06274867248535156, 0.06264435195922852, 0.06258838272094727, 0.06258131027221679, 0.06251897430419921, 0.06338969421386718, 0.06230764770507812, 0.06221667098999024, 0.06216847991943359, 0.0619958381652832, 0.06206399917602539, 0.06206531143188477, 0.0622562255859375, 0.06222476959228516, 0.06220982360839844, 0.06274329757690429, 0.06271392059326172, 0.0625266227722168, 0.06247663879394531, 0.06232310485839844, 0.062362945556640625, 0.06251187133789063, 0.06235504150390625, 0.062306655883789065, 0.06257664108276367, 0.06219776153564453, 0.062321727752685546, 0.062446529388427735, 0.062281726837158206, 0.06234883117675781, 0.0623985595703125, 0.06262211227416992, 0.06258256149291992, 0.06240604782104492, 0.06253647994995117, 0.06243081665039062, 0.06266694259643554, 0.06258505630493164, 0.06255820846557616, 0.06245158386230469, 0.06241225433349609, 0.06237820816040039, 0.0623724479675293, 0.062370849609375, 0.06228255844116211, 0.062434879302978516, 0.06264214324951171, 0.06268473434448242, 0.06282332611083985, 0.06271356964111328, 0.06252691268920899, 0.06268012619018555, 0.06284265518188477, 0.06255372619628906, 0.06265894317626954, 0.062475936889648434, 0.06269987106323242, 0.06254169464111328, 0.06267712020874024, 0.06287155151367188, 0.06269337463378906, 0.06264124679565429, 0.06267919921875, 0.0626879997253418, 0.0628809928894043, 0.06277920150756836, 0.06287263870239258, 0.06273993682861329, 0.06307872009277343, 0.06253891372680664, 0.062354270935058596, 0.06221014404296875, 0.062033790588378906, 0.062272670745849606, 0.06225913619995117, 0.06230518341064453, 0.06236972808837891, 0.06242867279052734, 0.06242707061767578, 0.06253760147094727, 0.062314369201660155, 0.06243779373168945, 0.06252297592163086, 0.06257548904418946, 0.06270326232910156, 0.06265420913696289, 0.06231228637695312, 0.06224972915649414, 0.06216022491455078, 0.06218112182617187, 0.062192543029785156, 0.06210697555541992, 0.062169761657714845, 0.06227475357055664, 0.06244160079956055, 0.0626572151184082, 0.06275459289550782, 0.06264585494995117, 0.06250969696044922, 0.06244966506958008, 0.06251830291748046, 0.0626115837097168, 0.06230223846435547, 0.06219232177734375, 0.062425247192382814, 0.0626003189086914, 0.06239113616943359, 0.062461952209472656, 0.0623135986328125, 0.06242755126953125, 0.06236620712280273, 0.06269705581665039, 0.06277772903442383, 0.06264371109008789, 0.06244809722900391, 0.06266681671142578, 0.06259913635253907, 0.06306204986572266, 0.06258070373535156, 0.06246793746948242, 0.06249283218383789, 0.06253148651123047, 0.06256204986572265, 0.0623985595703125, 0.062445217132568356, 0.06278358459472656, 0.06269321441650391, 0.0630951042175293, 0.06310960006713867, 0.06295849609375, 0.06276559829711914, 0.06363750457763671, 0.062493854522705075, 0.062117855072021486, 0.06217001724243164, 0.062152671813964847, 0.0625541114807129, 0.062363296508789065, 0.06243977737426758, 0.062228511810302735, 0.06235340881347656, 0.06246192169189453, 0.06279894256591798, 0.06251359939575195, 0.062454238891601566, 0.06237913513183594, 0.06274665451049805, 0.06229414367675781, 0.062333343505859375, 0.062303550720214845, 0.062235649108886716, 0.0620637435913086, 0.06254681777954102, 0.06274867248535156, 0.06261920166015625, 0.06242758560180664, 0.06263603210449219, 0.06255363082885743, 0.06261193466186524, 0.062441375732421874, 0.06254409790039063, 0.06260015869140625, 0.0626902084350586, 0.06256991958618165, 0.06255465698242188, 0.06277059173583985, 0.06254451370239258, 0.062304256439208984, 0.06237712097167969, 0.06239068984985351, 0.06246591949462891, 0.06242569732666016, 0.06299440002441406, 0.06295347213745117, 0.06295670318603516, 0.0626328010559082, 0.06287974548339843, 0.06260534286499024, 0.06251830291748046, 0.06257145690917969, 0.06277436828613281, 0.0627680320739746, 0.06278144073486328, 0.06265174484252929, 0.06286403274536133, 0.06257846450805664, 0.06270793533325195, 0.06261964797973633, 0.0626729278564453, 0.0627199363708496, 0.06285654449462891, 0.06297833633422852, 0.06302560043334961, 0.06266876983642578, 0.06329708862304688, 0.062414302825927734, 0.062134815216064454, 0.062142688751220705, 0.06235161590576172, 0.062339038848876954, 0.06250451278686524, 0.062354942321777344, 0.062443649291992184, 0.06258319854736329, 0.0622022705078125, 0.06255408096313476, 0.0625992317199707, 0.06256995010375976, 0.062335487365722655, 0.06255615997314454, 0.06250499343872071, 0.0625458869934082, 0.0623185920715332, 0.062217601776123045, 0.062173534393310546, 0.06231606292724609, 0.06236630249023437, 0.062457408905029294, 0.062487136840820315, 0.06252544021606446, 0.06254796981811524, 0.06251472091674805, 0.06262179183959961, 0.06260089492797852, 0.06244217681884766, 0.06255136108398437, 0.0625835189819336, 0.06261270523071288, 0.0625334701538086, 0.06253456115722657, 0.062408737182617184, 0.06248239898681641, 0.0625576629638672, 0.06263407897949219, 0.06251359939575195, 0.06258073425292969, 0.06271088027954101, 0.06274863815307617, 0.06261446380615235, 0.06283059310913086, 0.06292214584350586, 0.06275337600708007, 0.06265804672241211, 0.06294681549072266, 0.06256313705444336, 0.06280774307250976, 0.06289596939086914, 0.06260147094726562, 0.06283283233642578, 0.06293116760253906, 0.06290537643432617, 0.06275993728637695, 0.06263804626464843, 0.06274665451049805, 0.06286844635009765, 0.06285209655761718, 0.06282649612426758]",tokens/s,15.998677924231961,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,883.073024,6232.604672,0.0,5830.08256,5627.341824,s,1,7.65989453125,7.65989453125,0.0,7.65989453125,7.65989453125,7.65989453125,7.65989453125,[7.65989453125],,kWh,6.886582499987526e-06,7.522430979055894e-07,4.035836561985806e-06,1.167466215987892e-05,,MB,1214.574592,6314.3936,0.0,5901.385728,5767.735296,s,10,4.836625213623047,0.4836625213623047,0.0036975833019111284,0.4839998474121094,0.4870138122558594,0.48811321411132813,0.4889927355957031,"[0.47577590942382814, 0.4814521179199219, 0.4800661315917969, 0.48259161376953125, 0.48378704833984376, 0.48648907470703123, 0.484212646484375, 0.4862685546875, 0.48676950073242187, 0.4892126159667969]",tokens/s,529.2946810907312,kWh,1.4107119045833215e-05,1.555766168776927e-06,9.305734957808695e-06,2.4968620172418833e-05,tokens/kWh,10252869.3308726,MB,1240.096768,6377.30816,0.0,5964.300288,5767.737856,s,10,18.01112841796875,1.801112841796875,0.0054001968840739515,1.8000936889648438,1.8069676879882812,1.8102692443847657,1.8129104895019532,"[1.8010057373046875, 1.7958402099609374, 1.79633984375, 1.8062340087890625, 1.8037437744140625, 1.799181640625, 1.81357080078125, 1.7972933349609375, 1.802501220703125, 1.7954178466796875]",tokens/s,34.97837477919942,kWh,5.338711193374692e-05,5.887036031077323e-06,3.545977307679258e-05,9.473392104161681e-05,tokens/kWh,665020.5048762202,,s,630,18.008201988220215,0.028584447600349547,0.00040857817448015276,0.028495648384094237,0.028852032470703125,0.029147196102142333,0.0303755574798584,"[0.030068416595458985, 0.02877676773071289, 0.028407808303833007, 0.028161951065063476, 0.02823504066467285, 0.02827712059020996, 0.028268159866333006, 0.028519231796264647, 0.0282860164642334, 0.028515167236328125, 0.028444160461425783, 0.028424415588378906, 0.028387712478637694, 0.028319711685180663, 0.028290271759033203, 0.028801855087280274, 0.02846080017089844, 0.02930508804321289, 0.03101408004760742, 0.028377471923828126, 0.02860076713562012, 0.028399232864379884, 0.02837334442138672, 0.028346399307250976, 0.028454912185668944, 0.02844998359680176, 0.028326719284057618, 0.02850761604309082, 0.028482080459594727, 0.028761184692382813, 0.029035327911376953, 0.02877859115600586, 0.028507776260375976, 0.028619136810302735, 0.028647424697875977, 0.028707935333251954, 0.028451744079589843, 0.028637184143066406, 0.02838528060913086, 0.028611648559570314, 0.028436832427978516, 0.028497760772705077, 0.02851683235168457, 0.0284552001953125, 0.028495872497558594, 0.028428096771240235, 0.028592319488525392, 0.028442623138427735, 0.02835251235961914, 0.028628992080688476, 0.028491775512695314, 0.02884111976623535, 0.02854924774169922, 0.028580575942993163, 0.028612224578857422, 0.02850649642944336, 0.028499967575073244, 0.02854092788696289, 0.028452863693237306, 0.028402816772460936, 0.028777023315429688, 0.028684608459472655, 0.028520448684692383, 0.029904895782470704, 0.028770303726196288, 0.02850771141052246, 0.028420543670654295, 0.028432384490966797, 0.02880512046813965, 0.028509376525878906, 0.028224319458007813, 0.028269983291625975, 0.02828758430480957, 0.02838041687011719, 0.02822742462158203, 0.028314016342163087, 0.02866364860534668, 0.02853715133666992, 0.02861440086364746, 0.028381248474121094, 0.028362911224365236, 0.028414335250854492, 0.02839347267150879, 0.02830745506286621, 0.028282304763793946, 0.028348255157470702, 0.028736223220825197, 0.02829484748840332, 0.028455232620239256, 0.02832758331298828, 0.028416255950927734, 0.028464927673339843, 0.028434560775756835, 0.028466943740844727, 0.028660160064697265, 0.028391136169433593, 0.02843062400817871, 0.02840985679626465, 0.028671167373657228, 0.028564191818237303, 0.028340320587158203, 0.02842527961730957, 0.028308128356933592, 0.02846544075012207, 0.028528640747070313, 0.028425344467163084, 0.02840025520324707, 0.028375295639038085, 0.028642528533935546, 0.02836355209350586, 0.028478752136230467, 0.028405855178833008, 0.028383264541625975, 0.02924553680419922, 0.028553728103637696, 0.02863279914855957, 0.028671295166015624, 0.028496864318847657, 0.028477312088012696, 0.02845913505554199, 0.028408863067626952, 0.028490591049194335, 0.028836160659790038, 0.028532543182373048, 0.028513824462890625, 0.028594655990600584, 0.03035136032104492, 0.029063167572021483, 0.028423328399658204, 0.028246271133422853, 0.028125696182250977, 0.02861680030822754, 0.028161279678344725, 0.02831216049194336, 0.02828483200073242, 0.028178688049316405, 0.02819174385070801, 0.028253183364868165, 0.0283768310546875, 0.028360063552856446, 0.02827872085571289, 0.02839033508300781, 0.028562591552734374, 0.028254623413085937, 0.029496896743774415, 0.02913983917236328, 0.028360639572143555, 0.028526208877563478, 0.028270912170410157, 0.02828848075866699, 0.02833667182922363, 0.02828892707824707, 0.02821881675720215, 0.028144416809082032, 0.02819398307800293, 0.02848236846923828, 0.028546112060546875, 0.028511167526245117, 0.028350271224975587, 0.028456607818603517, 0.02842428779602051, 0.028758464813232423, 0.028415456771850586, 0.028359199523925783, 0.028359872817993164, 0.028434560775756835, 0.02859283256530762, 0.02850201606750488, 0.028530080795288085, 0.028506719589233398, 0.028422048568725586, 0.028485727310180665, 0.028432384490966797, 0.028504064559936523, 0.028252159118652344, 0.028520448684692383, 0.02852249526977539, 0.028727296829223634, 0.03019100761413574, 0.028607072830200194, 0.028470880508422853, 0.02847990417480469, 0.028483583450317384, 0.028468608856201172, 0.02845350456237793, 0.02856083106994629, 0.028528608322143555, 0.02854153633117676, 0.028471296310424804, 0.03034169578552246, 0.029023839950561525, 0.028494016647338867, 0.028389951705932618, 0.028200960159301756, 0.028831520080566407, 0.028327903747558593, 0.028887296676635744, 0.028532352447509766, 0.029153215408325196, 0.02835478401184082, 0.028463327407836914, 0.028499967575073244, 0.028290239334106446, 0.028408639907836913, 0.028441919326782226, 0.028460960388183593, 0.028363168716430662, 0.028423904418945312, 0.028671680450439455, 0.029760095596313478, 0.028379520416259765, 0.028339296340942382, 0.029043615341186522, 0.02858188819885254, 0.028904544830322267, 0.02858448028564453, 0.028610944747924805, 0.028289024353027343, 0.028248064041137694, 0.028420095443725587, 0.028721151351928712, 0.02854092788696289, 0.02855526351928711, 0.02863033676147461, 0.029606592178344725, 0.028512063980102538, 0.02855062484741211, 0.02846384048461914, 0.029531391143798828, 0.030986080169677733, 0.028410783767700197, 0.028495040893554688, 0.028725311279296874, 0.028670112609863282, 0.028618560791015626, 0.028742176055908203, 0.028459264755249025, 0.028419103622436524, 0.028574687957763672, 0.0285347843170166, 0.028786272048950196, 0.028608383178710936, 0.02865001678466797, 0.02841804885864258, 0.0287457275390625, 0.02833612823486328, 0.028347616195678712, 0.02839836883544922, 0.02856345558166504, 0.02856345558166504, 0.02856262397766113, 0.028473535537719728, 0.030283647537231444, 0.029018463134765624, 0.028653568267822265, 0.02854310417175293, 0.02841993522644043, 0.02835193634033203, 0.02850444793701172, 0.028334367752075196, 0.02831590461730957, 0.029734560012817383, 0.02984934425354004, 0.028506719589233398, 0.028374303817749025, 0.02837071990966797, 0.028347232818603515, 0.02860969543457031, 0.028343231201171874, 0.028300544738769532, 0.028211839675903322, 0.028409984588623045, 0.028291168212890624, 0.028485536575317383, 0.02867532730102539, 0.03104230308532715, 0.02832793617248535, 0.028494911193847658, 0.028479999542236328, 0.028377376556396484, 0.02850934410095215, 0.028488576889038084, 0.02844179153442383, 0.028263359069824218, 0.028438112258911134, 0.02881171226501465, 0.028899295806884766, 0.02890297508239746, 0.028839584350585937, 0.028825599670410155, 0.028887775421142577, 0.028495935440063475, 0.028553216934204102, 0.02853843116760254, 0.028515968322753906, 0.028547903060913087, 0.028493375778198243, 0.028745216369628908, 0.028591039657592774, 0.028506111145019532, 0.028440576553344726, 0.028484703063964844, 0.028470176696777344, 0.028444671630859376, 0.028452863693237306, 0.028436384201049804, 0.028723295211791993, 0.028466432571411134, 0.028517120361328124, 0.028461055755615236, 0.028497695922851562, 0.028540767669677735, 0.02853059196472168, 0.02860233688354492, 0.028443136215209962, 0.030078176498413087, 0.028746688842773437, 0.02845644760131836, 0.028450336456298828, 0.028294015884399414, 0.0285710391998291, 0.028315488815307616, 0.028373056411743165, 0.028377887725830078, 0.028338176727294922, 0.028379135131835938, 0.028763967514038084, 0.028438720703125, 0.028630239486694336, 0.028384031295776366, 0.0285467529296875, 0.028502336502075197, 0.028302879333496095, 0.028360511779785155, 0.028281503677368165, 0.02838937568664551, 0.028516351699829103, 0.028382879257202148, 0.028354911804199218, 0.028528640747070313, 0.028329504013061522, 0.028455135345458984, 0.028440832138061523, 0.02855116844177246, 0.02854092788696289, 0.02840575981140137, 0.028378175735473632, 0.02867910385131836, 0.028634336471557616, 0.02856220817565918, 0.028823551177978517, 0.02886444854736328, 0.028741695404052733, 0.028647424697875977, 0.028542400360107422, 0.028430431365966798, 0.028467647552490233, 0.02847542381286621, 0.028450624465942383, 0.028532928466796875, 0.028579839706420897, 0.028854047775268555, 0.028661983489990234, 0.028499967575073244, 0.028487680435180664, 0.028587295532226564, 0.02853878402709961, 0.02860038375854492, 0.028619520187377928, 0.028566623687744142, 0.028707168579101563, 0.028664384841918945, 0.028770303726196288, 0.02863443183898926, 0.02860102462768555, 0.028559232711791994, 0.028766336441040038, 0.028477439880371092, 0.030385440826416015, 0.02912518310546875, 0.028711072921752928, 0.028667903900146483, 0.028675424575805665, 0.028664480209350585, 0.028663711547851564, 0.02832598304748535, 0.02857574462890625, 0.028553216934204102, 0.02857881546020508, 0.028844095230102538, 0.02885523223876953, 0.028717056274414062, 0.02897920036315918, 0.028861984252929688, 0.02895510482788086, 0.028665855407714845, 0.028663488388061525, 0.028636991500854494, 0.02876844787597656, 0.029042272567749022, 0.029008607864379882, 0.02882294464111328, 0.028947040557861327, 0.029216768264770508, 0.02885180854797363, 0.028696992874145507, 0.02873139190673828, 0.028786687850952147, 0.02879897689819336, 0.028651071548461915, 0.02881376075744629, 0.029062271118164062, 0.028889856338500976, 0.028882207870483397, 0.02866169548034668, 0.02852659225463867, 0.02886672019958496, 0.02856012725830078, 0.028493312835693358, 0.028591936111450195, 0.0292379207611084, 0.028320831298828127, 0.028419040679931642, 0.028343648910522462, 0.028491455078125, 0.028422304153442383, 0.028353343963623046, 0.03172352027893066, 0.028571327209472655, 0.028833887100219727, 0.02849542427062988, 0.02853340721130371, 0.02911408042907715, 0.028555551528930665, 0.028384511947631835, 0.02849660873413086, 0.028616735458374024, 0.02857574462890625, 0.028724479675292968, 0.028662527084350586, 0.028610559463500978, 0.030277599334716798, 0.029984031677246094, 0.02859663963317871, 0.028315967559814453, 0.028256256103515624, 0.02832793617248535, 0.02839916801452637, 0.02823740768432617, 0.028325952529907227, 0.02814847946166992, 0.028182048797607422, 0.028033536911010744, 0.028427936553955077, 0.028407968521118165, 0.02862099266052246, 0.02894438362121582, 0.02839756774902344, 0.028329952239990235, 0.028198944091796876, 0.028275808334350585, 0.02828585624694824, 0.028407167434692383, 0.028393152236938477, 0.028494272232055664, 0.028323392868041992, 0.028394208908081055, 0.028450368881225586, 0.028495967864990233, 0.028250688552856444, 0.028338176727294922, 0.028489248275756836, 0.028598751068115234, 0.028408992767333986, 0.02845372772216797, 0.028490943908691405, 0.028595008850097657, 0.028667903900146483, 0.028578880310058594, 0.028528671264648437, 0.028365503311157225, 0.028444896697998046, 0.028825599670410155, 0.028563135147094725, 0.02834668731689453, 0.028446720123291015, 0.028404960632324217, 0.028411903381347657, 0.0287825927734375, 0.02861939239501953, 0.028563199996948244, 0.028505792617797853, 0.028582624435424805, 0.028583391189575195, 0.028565343856811524, 0.02848134422302246, 0.02900163269042969, 0.028664255142211915, 0.028693023681640624, 0.028618751525878908, 0.028432384490966797, 0.028520448684692383, 0.02877644729614258, 0.02847648048400879, 0.03045452880859375, 0.029097312927246093, 0.028356512069702147, 0.028416767120361328, 0.028696447372436523, 0.028797056198120115, 0.028434431076049805, 0.028466751098632812, 0.028433984756469726, 0.028361600875854494, 0.028341503143310548, 0.02839423942565918, 0.02836070442199707, 0.028351903915405274, 0.028506336212158204, 0.02826835250854492, 0.028442975997924804, 0.028293344497680666, 0.028360319137573243, 0.028334463119506836, 0.028259424209594725, 0.02846732711791992, 0.02819145584106445, 0.02827884864807129, 0.028229248046875, 0.028157983779907227, 0.02813372802734375, 0.028448768615722656, 0.028450815200805665, 0.028575584411621092, 0.028397727966308593, 0.02864668846130371, 0.02844495964050293, 0.028611007690429686, 0.02878873634338379, 0.028639232635498047, 0.028431488037109376, 0.028606687545776367, 0.028496543884277345, 0.029810272216796874, 0.029413791656494142, 0.02855526351928711, 0.02857164764404297, 0.02851584053039551, 0.02842428779602051, 0.028692895889282227, 0.02837238311767578, 0.02837779235839844, 0.02823539161682129, 0.028534719467163086, 0.02848134422302246, 0.028910112380981446, 0.029243392944335936, 0.03180944061279297, 0.028631135940551757, 0.028529888153076173, 0.028442527770996092, 0.0284640007019043, 0.02842620849609375, 0.02852252769470215, 0.028505664825439453, 0.028594623565673827, 0.02869862365722656, 0.02997987174987793, 0.028799711227416994, 0.028524608612060548, 0.028467199325561524, 0.028180255889892578, 0.028149984359741212, 0.02931648063659668, 0.028700799942016603, 0.028545183181762697, 0.028298656463623048, 0.028395551681518555, 0.028281503677368165, 0.028245855331420898, 0.028281248092651368, 0.028378271102905275, 0.028341087341308593, 0.02836070442199707, 0.02824163246154785, 0.028367136001586916, 0.028583391189575195, 0.028490272521972657, 0.02835251235961914, 0.028304512023925782, 0.02837548828125, 0.028309375762939452, 0.028271167755126954, 0.028444671630859376, 0.028437599182128907, 0.028457056045532225, 0.02831795120239258, 0.02849590492248535, 0.028403488159179688, 0.028377504348754884, 0.028534751892089844, 0.028882463455200194, 0.028511072158813478, 0.028446720123291015, 0.028579584121704103, 0.02849203109741211, 0.02840575981140137, 0.02857164764404297, 0.028602367401123048, 0.028579839706420897, 0.028589248657226562, 0.028478271484375, 0.028532255172729493, 0.02852092742919922, 0.028325248718261718, 0.028422784805297852, 0.028434431076049805, 0.02855526351928711, 0.02844220733642578, 0.02845302391052246, 0.028440351486206054, 0.02851068878173828, 0.028624576568603517, 0.02872723197937012, 0.0284716796875, 0.0285614070892334, 0.028403711318969727, 0.028606464385986328, 0.02857369613647461, 0.028375040054321288]",tokens/s,34.98405895336496,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 353, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 153153 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,784.0768,6162.415616,0.0,5767.168,5561.701376,s,1,7.27330419921875,7.27330419921875,0.0,7.27330419921875,7.27330419921875,7.27330419921875,7.27330419921875,[7.27330419921875],,kWh,4.423376970855013e-06,4.805878847703495e-07,1.4000011199916118e-06,6.303965975616974e-06,,MB,1110.417408,6172.901376,0.0,5767.168,5440.258048,s,10,4.984247161865235,0.4984247161865235,0.0032084074497081786,0.4983302459716797,0.5019024810791015,0.5026877975463867,0.5033160507202149,"[0.49047341918945314, 0.4979249572753906, 0.497743896484375, 0.49801724243164064, 0.49864324951171873, 0.5034731140136719, 0.5017279663085937, 0.4996493225097656, 0.49895217895507815, 0.4976418151855469]",tokens/s,513.6181888383684,kWh,1.448518486944421e-05,1.5974670973678187e-06,9.581885972381016e-06,2.5664537939193045e-05,tokens/kWh,9974853.262760485,MB,1135.939584,6174.998528,0.0,5769.265152,5523.463168,s,10,18.293435791015625,1.8293435791015624,0.003121007848996947,1.8282823486328126,1.8341563354492187,1.8346082580566405,1.8349697961425782,"[1.8311048583984375, 1.8350601806640625, 1.827931396484375, 1.8247835693359375, 1.82711572265625, 1.8304154052734376, 1.834055908203125, 1.826645263671875, 1.827690185546875, 1.82863330078125]",tokens/s,34.43858262587333,kWh,5.3543071277223245e-05,5.905837187575707e-06,3.5573594596419904e-05,9.502250306121886e-05,tokens/kWh,663000.8468563688,,s,630,18.290563566207897,0.02903264058128236,0.00021874437743233727,0.029019136428833008,0.029228883743286135,0.029365083980560303,0.029853543834686278,"[0.029493919372558595, 0.02923014450073242, 0.02893305587768555, 0.028976512908935548, 0.028829311370849608, 0.028816383361816408, 0.028851295471191408, 0.0287425594329834, 0.02881033515930176, 0.028782783508300783, 0.028768287658691407, 0.028854560852050782, 0.028818975448608397, 0.02878963279724121, 0.028829696655273438, 0.02877644729614258, 0.028891103744506836, 0.028975135803222658, 0.028838911056518556, 0.028721311569213866, 0.028959199905395507, 0.028925376892089842, 0.028842208862304687, 0.02884272003173828, 0.0292161922454834, 0.02918662452697754, 0.029124607086181642, 0.029107999801635743, 0.029051103591918946, 0.02902016067504883, 0.02901718330383301, 0.02896784019470215, 0.029022207260131837, 0.028904544830322267, 0.029229055404663085, 0.029608287811279298, 0.029907680511474608, 0.029040159225463866, 0.029331775665283204, 0.029017919540405272, 0.029100223541259764, 0.029152704238891602, 0.029253952026367186, 0.029024511337280273, 0.02906723213195801, 0.02914102363586426, 0.029128223419189452, 0.02909132766723633, 0.029004768371582033, 0.02911782455444336, 0.029112960815429686, 0.02913689613342285, 0.029456384658813478, 0.029107967376708985, 0.028993568420410155, 0.028979391098022462, 0.02908367919921875, 0.02933260726928711, 0.029719072341918944, 0.02917340850830078, 0.02912326431274414, 0.02918604850769043, 0.029286207199096678, 0.029671424865722655, 0.02931427192687988, 0.029045536041259767, 0.02888630485534668, 0.02899836730957031, 0.028839647293090822, 0.028772640228271484, 0.028856319427490236, 0.028944095611572265, 0.028768064498901368, 0.028758495330810548, 0.028763200759887694, 0.02884281539916992, 0.02899286460876465, 0.028893983840942383, 0.028915199279785156, 0.0288602237701416, 0.029218624114990235, 0.0290350399017334, 0.029170015335083007, 0.029068384170532226, 0.029084064483642577, 0.02913564872741699, 0.029161184310913087, 0.029321216583251954, 0.03021414375305176, 0.02913088035583496, 0.02913267135620117, 0.02911027145385742, 0.029077503204345705, 0.029016063690185546, 0.02886444854736328, 0.028934207916259766, 0.029069120407104493, 0.029108480453491212, 0.029177791595458986, 0.029124607086181642, 0.02916316795349121, 0.02909542465209961, 0.029145952224731445, 0.029083648681640626, 0.029087135314941406, 0.02906991958618164, 0.02915328025817871, 0.02899964714050293, 0.029085727691650392, 0.029117919921875, 0.029311519622802734, 0.029843456268310548, 0.02924896049499512, 0.029106143951416016, 0.029459039688110353, 0.029147136688232423, 0.029103967666625978, 0.028928159713745117, 0.028993535995483398, 0.02914656066894531, 0.029030656814575194, 0.029110591888427736, 0.029132575988769532, 0.02917740821838379, 0.02936016082763672, 0.030347904205322265, 0.029787456512451172, 0.029506431579589844, 0.029179904937744142, 0.028985343933105468, 0.02895257568359375, 0.02886419105529785, 0.028846080780029298, 0.028741920471191406, 0.028893024444580077, 0.028743520736694336, 0.028778175354003906, 0.029153472900390626, 0.028944000244140625, 0.028795743942260744, 0.028889087677001952, 0.028874176025390625, 0.028903999328613282, 0.028740800857543946, 0.02887353515625, 0.028844160079956056, 0.029081472396850584, 0.028849983215332033, 0.02879088020324707, 0.02878678321838379, 0.02896895980834961, 0.029009920120239258, 0.02904051208496094, 0.02897011184692383, 0.029074432373046875, 0.028998783111572266, 0.028883840560913084, 0.02889094352722168, 0.02902035140991211, 0.0290795841217041, 0.029104032516479493, 0.02905206489562988, 0.02916035270690918, 0.02906422424316406, 0.029096927642822266, 0.029001407623291016, 0.028926271438598633, 0.02889727973937988, 0.02897100830078125, 0.029024255752563476, 0.029112319946289062, 0.029111520767211914, 0.029065919876098634, 0.028939903259277342, 0.028939775466918945, 0.028939231872558594, 0.029005823135375978, 0.029087743759155273, 0.029157375335693358, 0.029236991882324218, 0.02911836814880371, 0.029069664001464844, 0.029142528533935546, 0.028967424392700194, 0.02912156867980957, 0.02917180824279785, 0.029139680862426756, 0.02912188720703125, 0.029170495986938477, 0.029755392074584962, 0.02931635284423828, 0.029012416839599608, 0.028788543701171874, 0.02880460739135742, 0.028767232894897462, 0.028890623092651366, 0.028827295303344727, 0.02875040054321289, 0.02880745506286621, 0.02886444854736328, 0.028806463241577148, 0.028754592895507813, 0.02879033660888672, 0.028735456466674806, 0.02883635139465332, 0.028755136489868164, 0.028827775955200197, 0.028825504302978516, 0.028746591567993165, 0.028820735931396484, 0.028857088088989256, 0.028823551177978517, 0.028831743240356447, 0.028814847946166993, 0.02888902473449707, 0.028856895446777345, 0.02892185592651367, 0.028775936126708986, 0.0288035831451416, 0.029042688369750977, 0.02904854393005371, 0.028936479568481447, 0.028863679885864257, 0.029032896041870117, 0.029126047134399414, 0.02900271987915039, 0.029165567398071288, 0.029130367279052733, 0.029143135070800782, 0.029122848510742188, 0.029090879440307617, 0.029180864334106445, 0.029112319946289062, 0.029033632278442384, 0.028984159469604493, 0.029030399322509767, 0.02902195167541504, 0.02900105667114258, 0.02892892837524414, 0.02890310478210449, 0.02899795150756836, 0.028994911193847655, 0.029101951599121094, 0.029081600189208984, 0.029008672714233397, 0.029022207260131837, 0.02897305679321289, 0.028964864730834962, 0.028985343933105468, 0.02910313606262207, 0.029178815841674803, 0.029143072128295897, 0.02969267272949219, 0.029351936340332032, 0.029144832611083984, 0.02889084815979004, 0.028858911514282225, 0.028799232482910157, 0.028759807586669923, 0.028671392440795897, 0.028844064712524414, 0.02883865547180176, 0.028699615478515624, 0.028875455856323243, 0.028817472457885743, 0.028805215835571288, 0.029059072494506837, 0.028835840225219726, 0.02895462417602539, 0.028704767227172853, 0.028994848251342773, 0.028906208038330078, 0.028882335662841797, 0.028838783264160155, 0.02888604736328125, 0.02908585548400879, 0.028891103744506836, 0.028912191390991212, 0.028909568786621095, 0.028857696533203126, 0.028861183166503906, 0.028864416122436523, 0.028910655975341797, 0.029014976501464843, 0.029089536666870117, 0.02917196846008301, 0.029042591094970704, 0.028950624465942383, 0.029048831939697265, 0.029009920120239258, 0.02884383964538574, 0.028960607528686524, 0.029036991119384764, 0.02910812759399414, 0.029070751190185547, 0.029086143493652343, 0.028951839447021486, 0.028954559326171875, 0.02886342430114746, 0.029114048004150392, 0.029135168075561522, 0.02911027145385742, 0.029071359634399413, 0.029149183273315428, 0.029043935775756837, 0.029076255798339844, 0.02910380744934082, 0.029090112686157226, 0.02916761589050293, 0.02919628715515137, 0.02918943977355957, 0.02908639907836914, 0.02939228820800781, 0.029194175720214845, 0.029102752685546875, 0.029838720321655274, 0.030132768630981445, 0.029251615524291993, 0.02888096046447754, 0.02879692840576172, 0.02876006317138672, 0.028824607849121095, 0.028921920776367186, 0.028797855377197267, 0.028831872940063476, 0.028702272415161132, 0.028780256271362305, 0.0287523193359375, 0.028774431228637695, 0.028884511947631836, 0.028987007141113283, 0.029023199081420897, 0.029081279754638673, 0.028879167556762696, 0.02877440071105957, 0.0289401912689209, 0.02898543930053711, 0.02891366386413574, 0.028770303726196288, 0.028847200393676758, 0.028778816223144533, 0.02871766471862793, 0.028956512451171874, 0.028886655807495117, 0.028885536193847657, 0.028955968856811523, 0.02891436767578125, 0.02916761589050293, 0.029095935821533202, 0.02915328025817871, 0.029124607086181642, 0.02909756851196289, 0.029219167709350586, 0.029106239318847656, 0.029123968124389648, 0.02905766487121582, 0.02897305679321289, 0.029138944625854493, 0.0289072322845459, 0.02903273582458496, 0.02894985580444336, 0.028938911437988282, 0.02902355194091797, 0.029780672073364257, 0.030064640045166017, 0.029091840744018556, 0.029106176376342774, 0.029122560501098634, 0.029185760498046876, 0.02918796730041504, 0.029290912628173828, 0.029278207778930664, 0.02922700881958008, 0.02915328025817871, 0.029007648468017577, 0.029075679779052736, 0.029115776062011718, 0.02908812713623047, 0.02990015983581543, 0.02932569694519043, 0.029077695846557616, 0.02898067283630371, 0.02894099235534668, 0.029857664108276366, 0.02898099136352539, 0.02896732711791992, 0.029054943084716796, 0.028907520294189453, 0.02891948890686035, 0.0289036808013916, 0.028763967514038084, 0.029310400009155274, 0.028996416091918945, 0.029042400360107423, 0.02890166473388672, 0.029031871795654297, 0.028822080612182617, 0.028842144012451172, 0.028954463958740233, 0.028988927841186524, 0.029158975601196287, 0.028808128356933593, 0.028852127075195313, 0.02894857597351074, 0.029059072494506837, 0.029017440795898436, 0.02896473693847656, 0.02889616012573242, 0.029093023300170898, 0.02912719917297363, 0.02915551948547363, 0.029290496826171877, 0.029315071105957033, 0.029743104934692382, 0.0291975040435791, 0.0290251522064209, 0.028962751388549805, 0.029072895050048828, 0.02921513557434082, 0.029093984603881837, 0.029112319946289062, 0.028987648010253907, 0.02917296028137207, 0.029114912033081055, 0.029112319946289062, 0.029063167572021483, 0.02927155113220215, 0.02903910446166992, 0.029152864456176757, 0.029106592178344725, 0.029218719482421874, 0.029154592514038086, 0.02903327941894531, 0.029140127182006835, 0.029135103225708007, 0.028985952377319334, 0.029244831085205078, 0.02939913558959961, 0.02932371139526367, 0.029175872802734374, 0.029335552215576172, 0.029573152542114258, 0.02922083282470703, 0.028986463546752928, 0.02880780792236328, 0.028684576034545897, 0.028790143966674803, 0.02876684761047363, 0.028818912506103515, 0.028898080825805663, 0.02885196876525879, 0.028753919601440428, 0.028804351806640625, 0.028711103439331056, 0.02920694351196289, 0.028950687408447265, 0.028839935302734376, 0.028846080780029298, 0.02883558464050293, 0.028823808670043947, 0.02885577583312988, 0.028899135589599608, 0.029093759536743164, 0.029055263519287108, 0.029049407958984374, 0.029119552612304686, 0.028967872619628906, 0.028964864730834962, 0.028837888717651368, 0.028853792190551758, 0.028862943649291994, 0.028850048065185548, 0.028860544204711912, 0.028909568786621095, 0.029114368438720704, 0.029281375885009765, 0.02910915184020996, 0.029138368606567384, 0.029043104171752928, 0.029070911407470704, 0.029003904342651366, 0.029083776473999023, 0.02894063949584961, 0.028811264038085937, 0.028808191299438478, 0.029035520553588868, 0.029017887115478515, 0.02889289665222168, 0.028883071899414064, 0.028995168685913085, 0.029240095138549804, 0.028987199783325195, 0.02906540870666504, 0.029105152130126953, 0.02916454315185547, 0.0291409912109375, 0.029167167663574217, 0.029110687255859375, 0.02948508834838867, 0.029087135314941406, 0.029122560501098634, 0.028979808807373046, 0.028970815658569335, 0.02915760040283203, 0.029523359298706055, 0.029139328002929687, 0.028970560073852538, 0.029030527114868164, 0.028924543380737303, 0.02877008056640625, 0.028821407318115236, 0.02876367950439453, 0.028709888458251953, 0.02878441619873047, 0.028672224044799806, 0.028870527267456054, 0.028838016510009765, 0.02877337646484375, 0.02882252883911133, 0.028769439697265625, 0.02886124801635742, 0.028880640029907225, 0.028767616271972656, 0.028785696029663087, 0.028931968688964842, 0.028815071105957032, 0.028714527130126954, 0.029078271865844725, 0.02892799949645996, 0.02896076774597168, 0.028903423309326173, 0.02894438362121582, 0.029032447814941405, 0.028960224151611327, 0.028862815856933594, 0.028819232940673827, 0.02924176025390625, 0.029159423828125, 0.02915305519104004, 0.02912892723083496, 0.029089792251586914, 0.029091583251953126, 0.029018112182617187, 0.029143455505371094, 0.029095264434814454, 0.029097888946533205, 0.029364063262939454, 0.029164287567138673, 0.028901376724243165, 0.02895680046081543, 0.029065088272094728, 0.029286367416381836, 0.029003807067871094, 0.029065216064453125, 0.029155328750610353, 0.029228864669799806, 0.029130943298339845, 0.029147136688232423, 0.029097984313964844, 0.029079744338989258, 0.028983104705810548, 0.029109920501708984, 0.02917206382751465, 0.029149183273315428, 0.02936591911315918, 0.0292127685546875, 0.02911414337158203, 0.029585439682006835, 0.029360128402709962, 0.0290317440032959, 0.028840639114379882, 0.02878607940673828, 0.028741920471191406, 0.028700672149658202, 0.028709184646606444, 0.028735488891601563, 0.028729280471801757, 0.028768320083618164, 0.02873958396911621, 0.029011743545532227, 0.028839424133300783, 0.028760799407958983, 0.028839935302734376, 0.028837888717651368, 0.028844032287597656, 0.028841983795166014, 0.028917247772216798, 0.028868127822875976, 0.02886070442199707, 0.02893484878540039, 0.028899328231811523, 0.029386175155639647, 0.02962441635131836, 0.02909379196166992, 0.028977727890014647, 0.028841983795166014, 0.0290546875, 0.029050304412841798, 0.02952176094055176, 0.029180927276611326, 0.02919219207763672, 0.02897100830078125, 0.029097600936889647, 0.02896067237854004, 0.028998111724853514, 0.02898124885559082, 0.029068607330322266, 0.02911846351623535, 0.02893894386291504, 0.028908607482910156, 0.02886284828186035, 0.02895110321044922, 0.029173759460449217, 0.02914303970336914, 0.029106176376342774, 0.029140928268432616, 0.02900383949279785, 0.029147136688232423, 0.029120512008666992, 0.02914406394958496, 0.029159423828125, 0.029145599365234375, 0.029131263732910157, 0.02913430404663086, 0.02913539123535156, 0.029217920303344726, 0.02936716842651367, 0.029095935821533202, 0.029086719512939452, 0.028946815490722658]",tokens/s,34.443990624976436,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,882.335744,3447.586816,0.0,3045.064704,2842.846208,s,1,7.7210048828125,7.7210048828125,0.0,7.7210048828125,7.7210048828125,7.7210048828125,7.7210048828125,[7.7210048828125],,kWh,5.835663399996823e-06,6.329522974417794e-07,2.0383349639968573e-06,8.50695066143546e-06,,MB,1209.663488,3642.621952,0.0,3229.61408,2982.452736,s,10,2.442856903076172,0.2442856903076172,0.0024501983762480924,0.24484102630615234,0.2467932830810547,0.24753977661132812,0.2481369714355469,"[0.2449255676269531, 0.23918403625488283, 0.2454659881591797, 0.24227999877929687, 0.2466273956298828, 0.24292214965820313, 0.24268789672851562, 0.24475648498535157, 0.24828627014160157, 0.24572111511230468]",tokens/s,1047.9533192371257,kWh,7.380895959791283e-06,8.13830150021564e-07,4.916371988649901e-06,1.3111098098462747e-05,tokens/kWh,19525443.10762312,MB,1241.391104,3642.621952,0.0,3229.61408,2982.455296,s,10,14.60658544921875,1.460658544921875,0.004847186554955456,1.4616015014648438,1.4673707397460938,1.4678042053222655,1.4681509777832031,"[1.4555418701171876, 1.4633021240234374, 1.462778564453125, 1.4574183349609375, 1.4682376708984375, 1.4628365478515626, 1.4554061279296875, 1.4672744140625, 1.4533653564453124, 1.4604244384765626]",tokens/s,43.13123023791274,kWh,4.226237690646296e-05,4.6614494844228874e-06,2.5920680458750916e-05,7.284450684963677e-05,tokens/kWh,864855.8789757822,,s,630,14.602551849365232,0.023178653729151167,0.00037493229851685606,0.02311244773864746,0.0234231803894043,0.023610700607299806,0.02455951375961304,"[0.0233908805847168, 0.02282048034667969, 0.023239295959472658, 0.023003135681152344, 0.023117824554443358, 0.022928447723388673, 0.022897439956665037, 0.022919328689575195, 0.023003007888793944, 0.02308723258972168, 0.022888320922851563, 0.022816896438598633, 0.02304614448547363, 0.023044095993041993, 0.023264448165893556, 0.02303433609008789, 0.02310383987426758, 0.02318934440612793, 0.023086624145507814, 0.023035551071166994, 0.023088287353515625, 0.023052095413208008, 0.023082719802856446, 0.02299523162841797, 0.023044095993041993, 0.02307459259033203, 0.023380191802978515, 0.02309097671508789, 0.023076128005981446, 0.023030496597290038, 0.022921440124511718, 0.022958080291748048, 0.023114912033081053, 0.023046655654907225, 0.023090784072875976, 0.02319366455078125, 0.023185920715332032, 0.023054527282714843, 0.02311347198486328, 0.02382259178161621, 0.02323628807067871, 0.02292527961730957, 0.023000831604003905, 0.022973024368286132, 0.022947839736938477, 0.022956031799316406, 0.02306876754760742, 0.02303164863586426, 0.02319705581665039, 0.023390911102294923, 0.023725311279296876, 0.02330454444885254, 0.023130527496337892, 0.02303081512451172, 0.023089984893798828, 0.022891807556152343, 0.022879104614257812, 0.0233503360748291, 0.023100351333618162, 0.02304204750061035, 0.023145599365234373, 0.023292800903320313, 0.02307244873046875, 0.023453792572021483, 0.02293168067932129, 0.023067968368530273, 0.022917695999145508, 0.022870208740234373, 0.023115711212158205, 0.022853631973266602, 0.02307891273498535, 0.02312118339538574, 0.023289663314819336, 0.023193632125854492, 0.023169343948364257, 0.023142047882080078, 0.02309622383117676, 0.02757360076904297, 0.026079904556274413, 0.02317283248901367, 0.023189151763916015, 0.02312256050109863, 0.02354380798339844, 0.022957183837890624, 0.02330099105834961, 0.023340320587158202, 0.023149280548095702, 0.02310163116455078, 0.022976320266723634, 0.023436511993408203, 0.02306537628173828, 0.023070911407470703, 0.02308198356628418, 0.023163711547851563, 0.02309529685974121, 0.02329804801940918, 0.023214080810546874, 0.023088672637939452, 0.023077056884765624, 0.02295631980895996, 0.022999040603637694, 0.02355392074584961, 0.023040128707885743, 0.023184831619262696, 0.02296380805969238, 0.02303299140930176, 0.023008768081665038, 0.023044416427612305, 0.022996992111206056, 0.02285740852355957, 0.02290105628967285, 0.023160255432128907, 0.0231081600189209, 0.02346598434448242, 0.023044095993041993, 0.02312723159790039, 0.022986976623535157, 0.023011072158813477, 0.02290768051147461, 0.022917184829711914, 0.02304310417175293, 0.023030752182006835, 0.022984352111816406, 0.02309974479675293, 0.023023616790771483, 0.023060480117797853, 0.023719839096069336, 0.023162879943847657, 0.02333657646179199, 0.02323699188232422, 0.02337353515625, 0.023108991622924804, 0.02327225685119629, 0.023193599700927735, 0.023295360565185545, 0.023175167083740233, 0.023118560791015624, 0.023867263793945312, 0.023315872192382812, 0.023118560791015624, 0.02324857521057129, 0.02305788803100586, 0.023126367568969727, 0.023071231842041014, 0.0231014404296875, 0.023257087707519532, 0.023487871170043945, 0.023269855499267578, 0.023267488479614257, 0.023341056823730468, 0.023244800567626952, 0.02314035224914551, 0.02305023956298828, 0.023171072006225587, 0.023076416015625, 0.02300480079650879, 0.02297494316101074, 0.023152992248535155, 0.023244800567626952, 0.02313363265991211, 0.02323311996459961, 0.02358678436279297, 0.023121919631958008, 0.023183359146118163, 0.023218175888061524, 0.024130624771118166, 0.023178176879882814, 0.02289036750793457, 0.023048320770263673, 0.02304819107055664, 0.02291836738586426, 0.023214879989624022, 0.023244287490844725, 0.02307308769226074, 0.02316921615600586, 0.023146240234375, 0.023214271545410156, 0.023025728225708007, 0.023119968414306642, 0.023009183883666993, 0.023859199523925782, 0.023031808853149413, 0.02332262420654297, 0.02311292839050293, 0.023180063247680665, 0.023158655166625977, 0.022978591918945312, 0.023105152130126955, 0.023114208221435548, 0.023684288024902345, 0.0229752311706543, 0.023254783630371093, 0.02314271926879883, 0.022972415924072266, 0.023244800567626952, 0.023005184173583985, 0.023152639389038086, 0.022958080291748048, 0.02320342445373535, 0.023103904724121094, 0.02310553550720215, 0.02313167953491211, 0.023003616333007813, 0.022983871459960937, 0.02308348846435547, 0.022981056213378905, 0.022994848251342775, 0.022964223861694336, 0.022964223861694336, 0.023580671310424805, 0.02552182388305664, 0.02302992057800293, 0.02299513626098633, 0.022996671676635744, 0.02295225524902344, 0.02297382354736328, 0.022931808471679686, 0.02279248046875, 0.02305183982849121, 0.0228417911529541, 0.022898687362670898, 0.022999040603637694, 0.023111328125, 0.023439231872558593, 0.023122400283813477, 0.023147680282592772, 0.022947711944580076, 0.0230020809173584, 0.02312396812438965, 0.023257087707519532, 0.02306217575073242, 0.02306902313232422, 0.02302566337585449, 0.023066848754882813, 0.023152128219604492, 0.023238847732543946, 0.02310153579711914, 0.023418880462646483, 0.02309734344482422, 0.02315987205505371, 0.02299728012084961, 0.023241504669189453, 0.02310313606262207, 0.023027936935424806, 0.02302566337585449, 0.023136255264282226, 0.02304614448547363, 0.023150592803955077, 0.023093248367309572, 0.022971935272216797, 0.023166879653930664, 0.02304185676574707, 0.023386112213134767, 0.023133855819702148, 0.023283231735229493, 0.02304252815246582, 0.023574880599975586, 0.02310348892211914, 0.02318262481689453, 0.023096031188964843, 0.024392959594726562, 0.023538431167602538, 0.023562240600585937, 0.023353343963623048, 0.023169183731079103, 0.023045984268188477, 0.022975744247436522, 0.023460256576538087, 0.02324515151977539, 0.023069696426391603, 0.023077184677124024, 0.022993600845336915, 0.022939136505126953, 0.02312633514404297, 0.02314054489135742, 0.023085056304931642, 0.024408063888549804, 0.025577472686767577, 0.02327552032470703, 0.023286848068237304, 0.02327238464355469, 0.02317299270629883, 0.02319955253601074, 0.023200063705444335, 0.02321343994140625, 0.02316556739807129, 0.023199615478515626, 0.02324287986755371, 0.023085056304931642, 0.022966304779052735, 0.023375743865966796, 0.023084159851074218, 0.02308732795715332, 0.02328153610229492, 0.023262079238891602, 0.023226367950439454, 0.02332262420654297, 0.02327756881713867, 0.023181312561035155, 0.023425024032592775, 0.023422975540161133, 0.023375871658325196, 0.023366943359375, 0.02331062316894531, 0.02334355163574219, 0.023113727569580078, 0.02330828857421875, 0.023385215759277343, 0.023312639236450196, 0.02312460708618164, 0.023114912033081053, 0.02319820785522461, 0.023451135635375975, 0.02316543960571289, 0.023089504241943358, 0.02348640060424805, 0.02325119972229004, 0.023093248367309572, 0.02307788848876953, 0.023065088272094726, 0.02311359977722168, 0.02314703941345215, 0.02321609687805176, 0.023084543228149415, 0.023195968627929688, 0.022958175659179687, 0.023115455627441408, 0.02304800033569336, 0.02304819107055664, 0.02324742317199707, 0.02299068832397461, 0.02331180763244629, 0.023571327209472658, 0.023834623336791993, 0.023887392044067382, 0.023509471893310548, 0.02307481575012207, 0.023174751281738282, 0.02332713508605957, 0.023093248367309572, 0.023193151473999023, 0.023144895553588868, 0.023211231231689455, 0.02337183952331543, 0.023271839141845704, 0.023410655975341796, 0.023019039154052734, 0.02310966491699219, 0.023171487808227538, 0.02318489646911621, 0.02311871910095215, 0.023027711868286133, 0.023188608169555664, 0.023159679412841798, 0.02313737678527832, 0.02293756866455078, 0.02298876762390137, 0.023156896591186523, 0.02320191955566406, 0.023147199630737306, 0.023170848846435547, 0.023764640808105468, 0.023232255935668945, 0.0235130558013916, 0.02349286460876465, 0.023207679748535156, 0.023228288650512696, 0.023089792251586912, 0.023170591354370117, 0.02321609687805176, 0.023165792465209962, 0.0231212158203125, 0.02305081558227539, 0.02310966491699219, 0.023044191360473632, 0.02306252861022949, 0.02358032035827637, 0.02310383987426758, 0.024459999084472658, 0.023638015747070314, 0.02320988845825195, 0.023820383071899414, 0.023168384552001955, 0.023723743438720704, 0.02309212875366211, 0.02315673637390137, 0.022994207382202148, 0.023115520477294923, 0.02308134460449219, 0.023110240936279298, 0.023160224914550782, 0.023128671646118162, 0.02362272071838379, 0.02302569580078125, 0.022944927215576172, 0.02291996765136719, 0.023386463165283203, 0.022925952911376953, 0.022976287841796873, 0.02300480079650879, 0.02302204895019531, 0.0227063045501709, 0.02278160095214844, 0.02272831916809082, 0.022798368453979492, 0.02284614372253418, 0.022810304641723633, 0.022624576568603515, 0.02273641586303711, 0.023345632553100584, 0.02304844856262207, 0.023039743423461913, 0.02307788848876953, 0.022969343185424804, 0.02309881591796875, 0.02291155242919922, 0.02299830436706543, 0.023050207138061524, 0.023214847564697265, 0.023193119049072265, 0.023310815811157227, 0.02323865509033203, 0.023076864242553712, 0.02298195266723633, 0.023171775817871092, 0.023242208480834962, 0.023271968841552734, 0.023040000915527343, 0.022964223861694336, 0.023019519805908203, 0.022988800048828126, 0.0230645751953125, 0.023158784866333007, 0.02288025665283203, 0.023019519805908203, 0.022982656478881838, 0.02275119972229004, 0.022958080291748048, 0.022979711532592772, 0.023070880889892578, 0.023173887252807616, 0.023638784408569338, 0.02325299263000488, 0.023044095993041993, 0.022980607986450196, 0.02376095962524414, 0.02571001625061035, 0.026135040283203126, 0.02329091262817383, 0.02317001533508301, 0.02292736053466797, 0.022941280364990234, 0.02330009651184082, 0.022888864517211914, 0.02309836769104004, 0.02326144027709961, 0.023208703994750977, 0.023093248367309572, 0.02345724868774414, 0.02332521629333496, 0.022984703063964843, 0.02309065628051758, 0.023054880142211916, 0.022962175369262695, 0.02330931282043457, 0.02304627227783203, 0.02313478469848633, 0.023033727645874025, 0.023163328170776366, 0.023172447204589844, 0.02305295944213867, 0.023235679626464844, 0.02308803176879883, 0.02291734313964844, 0.023369184494018556, 0.023040416717529297, 0.02332972717285156, 0.024105247497558595, 0.023136959075927735, 0.023172224044799804, 0.023132608413696288, 0.023287872314453124, 0.02319545555114746, 0.023089727401733397, 0.023130111694335938, 0.023616863250732423, 0.023597728729248046, 0.02320969581604004, 0.023138559341430665, 0.023005216598510743, 0.023144447326660156, 0.023004480361938476, 0.02322265625, 0.02333247947692871, 0.023349088668823244, 0.02300195121765137, 0.024457120895385744, 0.023282848358154296, 0.023116735458374022, 0.022928768157958985, 0.0231409912109375, 0.02281062316894531, 0.02286591911315918, 0.02294278335571289, 0.023531455993652344, 0.0229683837890625, 0.0237587833404541, 0.023025440216064452, 0.023029823303222657, 0.0231507511138916, 0.022950016021728515, 0.02283296012878418, 0.02309312057495117, 0.022982847213745116, 0.022921215057373046, 0.02291916847229004, 0.022840511322021483, 0.023187711715698243, 0.023155359268188475, 0.023549407958984376, 0.02297648048400879, 0.023376352310180665, 0.023228479385375977, 0.022931392669677735, 0.02307072067260742, 0.023045312881469725, 0.02291289520263672, 0.02307961654663086, 0.02284060859680176, 0.023208927154541016, 0.022863872528076173, 0.022798336029052735, 0.02274508857727051, 0.023117824554443358, 0.0227959041595459, 0.02283955192565918, 0.022763168334960938, 0.022938079833984375, 0.022941408157348634, 0.02299728012084961, 0.023744512557983398, 0.02296803283691406, 0.023095392227172853, 0.02322150421142578, 0.02305734443664551, 0.02305843162536621, 0.022957536697387697, 0.022888992309570314, 0.02292697525024414, 0.022911264419555665, 0.023453279495239256, 0.022902912139892578, 0.022868255615234374, 0.023446752548217775, 0.02287705612182617, 0.02287606430053711, 0.02289801597595215, 0.023249664306640626, 0.02311369514465332, 0.022945247650146484, 0.023224607467651367, 0.02307046318054199, 0.02308764839172363, 0.023621055603027345, 0.02307321548461914, 0.02312771224975586, 0.0229803524017334, 0.023576543807983397, 0.023326656341552735, 0.02310598373413086, 0.02360316848754883, 0.023201311111450195, 0.023284191131591796, 0.023142400741577147, 0.02326019287109375, 0.023092191696166994, 0.023230464935302734, 0.023557727813720702, 0.024600160598754882, 0.0233767032623291, 0.023171072006225587, 0.02308915138244629, 0.023108640670776368, 0.022983295440673828, 0.0228621768951416, 0.02343231964111328, 0.022877248764038086, 0.02299171257019043, 0.0228832950592041, 0.023011327743530274, 0.023152639389038086, 0.023184383392333984, 0.023149568557739256, 0.02311564826965332, 0.023001216888427736, 0.02287171173095703, 0.023318880081176756, 0.023250944137573244, 0.022962175369262695, 0.023068159103393555, 0.023142688751220702, 0.023036127090454103, 0.023130111694335938, 0.023230464935302734, 0.023072063446044924, 0.023390911102294923, 0.02310704040527344, 0.023200128555297853, 0.022974624633789062, 0.02325503921508789, 0.022912864685058595, 0.023088895797729492, 0.02312575912475586, 0.023227039337158202, 0.023034944534301757, 0.02328233528137207, 0.02311196708679199, 0.023121023178100587, 0.023223167419433595, 0.023043296813964845, 0.023061279296875, 0.02309529685974121, 0.02310371208190918, 0.023052064895629883, 0.023060480117797853, 0.02325299263000488, 0.02301046371459961, 0.02316783905029297, 0.023248895645141602, 0.02336790466308594]",tokens/s,43.143144191430196,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1004.232704,7156.465664,0.0,6761.218048,6730.975744,s,1,7.091578125,7.091578125,0.0,7.091578125,7.091578125,7.091578125,7.091578125,[7.091578125],,kWh,7.47206920834742e-06,8.131772279029663e-07,2.6544465679956275e-06,1.0939693004246014e-05,,MB,1350.61504,7190.020096,0.0,6784.28672,5879.090688,s,10,6.102068359375,0.6102068359375,0.0034555948911244496,0.6108451843261719,0.6128059692382812,0.614352294921875,0.61558935546875,"[0.60192724609375, 0.6122781982421875, 0.6089447631835937, 0.6087664184570313, 0.6105785522460937, 0.6085650024414062, 0.61111181640625, 0.6124623413085938, 0.6158986206054687, 0.611535400390625]",tokens/s,419.5298789248906,kWh,1.7794484573283517e-05,1.9624232949622367e-06,1.1749372144587646e-05,3.15062800128334e-05,tokens/kWh,8125364.20979322,MB,1388.425216,7192.117248,0.0,6784.28672,5879.093248,s,10,23.0571494140625,2.30571494140625,0.012005142879227621,2.3050274658203125,2.3150239501953127,2.3248480346679687,2.3327073022460936,"[2.334672119140625, 2.31217724609375, 2.308483642578125, 2.296367431640625, 2.296572998046875, 2.3066298828125, 2.303425048828125, 2.293894775390625, 2.29208544921875, 2.3128408203125]",tokens/s,27.323412304201167,kWh,6.846874325254622e-05,7.5523427275254046e-06,4.5330506852612535e-05,0.00012135159283268417,tokens/kWh,519152.64175281534,,s,630,23.055483802795376,0.03659600603618319,0.0005661317764007443,0.03645841598510742,0.037020870971679685,0.03727109375,0.038784446487426766,"[0.038057792663574216, 0.036905471801757815, 0.03668345642089844, 0.036488670349121094, 0.03668345642089844, 0.040451969146728516, 0.03691811370849609, 0.03684979248046875, 0.03659715270996094, 0.03651440048217774, 0.03667510223388672, 0.03657308959960937, 0.03655955123901367, 0.03668912124633789, 0.03886931228637695, 0.037033729553222657, 0.03700796890258789, 0.036907009124755856, 0.03712409591674805, 0.036894081115722656, 0.037198143005371095, 0.03705199813842774, 0.036584159851074216, 0.0365404167175293, 0.03669379043579102, 0.03692156982421875, 0.036773311614990235, 0.03676326370239258, 0.03671072006225586, 0.03741475296020508, 0.03729843139648437, 0.03726729583740234, 0.0368606071472168, 0.03718892669677734, 0.036825569152832034, 0.03666147232055664, 0.03700326538085937, 0.036711456298828125, 0.036805152893066406, 0.03674771118164062, 0.036808353424072265, 0.03692780685424805, 0.039954463958740236, 0.03708422470092773, 0.036559806823730466, 0.03750297546386719, 0.03710723114013672, 0.03696073532104492, 0.03687628936767578, 0.03713980865478515, 0.036923648834228516, 0.03698729705810547, 0.03708911895751953, 0.03686006546020508, 0.03700060653686523, 0.03697484970092774, 0.03716540908813477, 0.03690198516845703, 0.03689081573486328, 0.03721289443969727, 0.03714815902709961, 0.03689932632446289, 0.03701958465576172, 0.03801939010620117, 0.03705670547485351, 0.03687763214111328, 0.03667407989501953, 0.036657150268554685, 0.03653612899780274, 0.036503902435302736, 0.037146625518798826, 0.03685289764404297, 0.036757537841796875, 0.03673171234130859, 0.03663052749633789, 0.036603134155273435, 0.036573951721191406, 0.036474815368652345, 0.03676268768310547, 0.03665999984741211, 0.03657923126220703, 0.03648748779296875, 0.03685171127319336, 0.03701145553588867, 0.03707686233520508, 0.03672281646728515, 0.03671244812011719, 0.03721807861328125, 0.03691772842407227, 0.03734092712402344, 0.036706302642822264, 0.03690086364746094, 0.036646976470947265, 0.03639900970458984, 0.03667731094360351, 0.03653814315795898, 0.03632799911499023, 0.03667967987060547, 0.036825088500976565, 0.03671161651611328, 0.03664070510864258, 0.036434814453125, 0.03646841430664063, 0.0366673583984375, 0.03640969467163086, 0.03659366226196289, 0.03644134521484375, 0.036279041290283205, 0.036483104705810544, 0.03641955184936523, 0.03664691162109375, 0.036826751708984376, 0.03651372909545898, 0.03659574508666992, 0.03652035140991211, 0.0363765754699707, 0.03659542465209961, 0.036499744415283204, 0.03642732620239258, 0.036733089447021486, 0.03632156753540039, 0.0366445426940918, 0.0370445442199707, 0.03644742584228516, 0.0374136962890625, 0.03671244812011719, 0.03967190551757813, 0.03775897598266602, 0.03655238342285156, 0.03646905517578125, 0.036413089752197265, 0.03644432067871094, 0.036491455078125, 0.036392192840576175, 0.03619305419921875, 0.036222721099853514, 0.03665238571166992, 0.03629555130004883, 0.03652947235107422, 0.03663897705078125, 0.036270145416259766, 0.0364189453125, 0.036389217376708985, 0.036363937377929687, 0.036807392120361326, 0.036262176513671876, 0.036337055206298825, 0.037034271240234375, 0.03613875198364258, 0.03642764663696289, 0.036305599212646485, 0.03670377731323242, 0.036420063018798826, 0.03674224090576172, 0.036335742950439454, 0.03637942504882812, 0.03623302459716797, 0.03666694259643555, 0.03623385620117187, 0.036224639892578125, 0.03634009552001953, 0.03676553726196289, 0.036538078308105466, 0.03674566268920899, 0.037814014434814455, 0.036880638122558596, 0.036724735260009765, 0.036450302124023434, 0.036345855712890625, 0.036677375793457034, 0.03654886245727539, 0.03697868728637695, 0.03660796737670898, 0.03645222473144531, 0.03653753662109375, 0.03668025588989258, 0.036581790924072266, 0.0367534065246582, 0.03682918548583984, 0.03675286483764648, 0.03675519943237305, 0.03678239822387695, 0.03647126388549805, 0.03661004638671875, 0.03626393508911133, 0.03794454574584961, 0.03666614532470703, 0.03676160049438477, 0.036618240356445314, 0.03733913421630859, 0.03684777450561524, 0.03659561538696289, 0.03648284912109375, 0.03657046508789062, 0.03640566253662109, 0.03637500762939453, 0.03638681411743164, 0.036635936737060545, 0.036388641357421876, 0.036192256927490236, 0.03652851104736328, 0.03727334213256836, 0.03665151977539063, 0.03653414535522461, 0.03644230270385742, 0.036520191192626957, 0.03638886260986328, 0.03622457504272461, 0.036190654754638674, 0.03656294250488281, 0.03616505432128906, 0.03626009750366211, 0.036261825561523436, 0.036346240997314455, 0.03637567901611328, 0.03636108779907227, 0.03665510559082031, 0.03647430419921875, 0.036364864349365235, 0.036370433807373044, 0.03645430374145508, 0.03669820785522461, 0.03680460739135742, 0.036369632720947266, 0.03645868682861328, 0.03624406433105469, 0.03647103881835938, 0.03664870452880859, 0.03637247848510742, 0.036466270446777346, 0.03632304000854492, 0.0362064323425293, 0.03648912048339844, 0.036142017364501955, 0.036747295379638674, 0.03622079849243164, 0.036259456634521486, 0.036372318267822265, 0.03613785552978516, 0.03635993576049805, 0.036156959533691406, 0.03624998474121094, 0.03647027206420898, 0.03637212753295899, 0.03643283081054687, 0.03642367935180664, 0.0364251823425293, 0.03704207992553711, 0.03640790557861328, 0.0361835823059082, 0.03632726287841797, 0.03629471969604492, 0.037151840209960936, 0.03672067260742187, 0.03674771118164062, 0.03680883026123047, 0.036542144775390625, 0.03659542465209961, 0.03658844757080078, 0.036706302642822264, 0.037104705810546874, 0.03643622589111328, 0.03633427047729492, 0.036230369567871096, 0.0364304313659668, 0.03616787338256836, 0.036274177551269535, 0.03635929489135742, 0.036336128234863284, 0.0365715217590332, 0.036362049102783206, 0.03630713653564453, 0.03630489730834961, 0.036435966491699216, 0.03617792129516602, 0.0362762222290039, 0.03609366226196289, 0.0363458251953125, 0.03618220901489258, 0.03621696090698242, 0.03624070358276367, 0.03641356658935547, 0.03650822448730469, 0.036450302124023434, 0.036435966491699216, 0.03644416046142578, 0.036413185119628905, 0.037042430877685543, 0.036495361328125, 0.036519233703613284, 0.03638265609741211, 0.03641420745849609, 0.0365219841003418, 0.036723903656005856, 0.036571678161621095, 0.03727286529541016, 0.03645747375488281, 0.036650527954101564, 0.03640572738647461, 0.03631718444824219, 0.03621887969970703, 0.03632979202270508, 0.03616259384155274, 0.036230945587158205, 0.0362720947265625, 0.0362212142944336, 0.03634368133544922, 0.036254783630371094, 0.03627180862426758, 0.03662643051147461, 0.03634783935546875, 0.03646879959106445, 0.036267040252685546, 0.03666409683227539, 0.03624307250976563, 0.03728079986572266, 0.03648406219482422, 0.036511745452880856, 0.03649945449829101, 0.03632537460327148, 0.036392288208007814, 0.03645814514160156, 0.036357120513916014, 0.03661126327514649, 0.03669414520263672, 0.03643862533569336, 0.036487262725830076, 0.036636383056640624, 0.036659328460693356, 0.037550174713134765, 0.03933804702758789, 0.0369332160949707, 0.03656950378417969, 0.03681398391723633, 0.036387775421142576, 0.03661536026000976, 0.036347904205322266, 0.03626678466796875, 0.03658691024780274, 0.03647084808349609, 0.036469215393066405, 0.03638272094726563, 0.036279487609863284, 0.036428607940673825, 0.03649945449829101, 0.03639910507202149, 0.03651318359375, 0.03651440048217774, 0.03629795074462891, 0.036299518585205075, 0.04444384002685547, 0.03694790267944336, 0.03659670257568359, 0.03655084609985351, 0.03638143920898437, 0.03640028762817383, 0.03668201446533203, 0.036268062591552734, 0.036390495300292966, 0.03624604797363281, 0.03623772811889649, 0.03621683120727539, 0.03623680114746094, 0.036137470245361326, 0.036155391693115234, 0.03613622283935547, 0.03618684768676758, 0.03615129470825195, 0.03614310455322266, 0.03616153717041016, 0.03625529479980469, 0.03630124664306641, 0.03617587280273438, 0.03631020736694336, 0.03618899154663086, 0.03609190368652344, 0.03630867385864258, 0.036364608764648435, 0.03726892852783203, 0.036571102142333986, 0.036534881591796874, 0.036281665802001956, 0.036271999359130856, 0.03649209594726562, 0.03648128128051758, 0.03625462341308594, 0.03619430541992188, 0.03620937728881836, 0.036321407318115236, 0.036395137786865234, 0.036405120849609375, 0.036253505706787106, 0.03634195327758789, 0.038384639739990234, 0.03662540817260742, 0.036408672332763674, 0.037776126861572265, 0.03665091323852539, 0.036757278442382815, 0.0364967041015625, 0.03709763336181641, 0.03827145767211914, 0.03673321533203125, 0.036939777374267575, 0.03629260635375976, 0.036361278533935545, 0.03633452987670899, 0.036317249298095704, 0.03641132736206055, 0.03644950485229492, 0.036307743072509766, 0.03627980804443359, 0.03703376007080078, 0.036289249420166016, 0.03618163299560547, 0.03689897537231445, 0.036503776550292966, 0.03649055862426758, 0.03643632125854492, 0.03640764617919922, 0.036264064788818356, 0.036544384002685545, 0.03617996978759765, 0.03666716766357422, 0.03709075164794922, 0.03734403228759765, 0.036634624481201174, 0.03635311889648438, 0.036350273132324216, 0.036534881591796874, 0.03644416046142578, 0.03686809539794922, 0.036478977203369144, 0.036279617309570314, 0.036270145416259766, 0.036241214752197264, 0.03620729446411133, 0.036329601287841795, 0.036396190643310546, 0.036112449645996095, 0.03626031875610351, 0.03724499130249023, 0.03651155090332031, 0.03624969482421875, 0.03635587310791016, 0.03642800140380859, 0.0363721923828125, 0.036432159423828124, 0.03632880020141602, 0.03639344024658203, 0.03645993423461914, 0.03623331069946289, 0.03619091033935547, 0.03630806350708008, 0.036262462615966794, 0.03641289520263672, 0.036289119720458986, 0.03627670288085937, 0.03640095901489258, 0.03834435272216797, 0.03664316940307617, 0.036329471588134765, 0.03673702239990234, 0.03672063827514648, 0.036705760955810546, 0.036373023986816404, 0.03628851318359375, 0.036341087341308594, 0.036399776458740235, 0.03618304061889648, 0.03621785736083984, 0.036378623962402344, 0.036716449737548826, 0.03645391845703125, 0.03631689453125, 0.03637299346923828, 0.03639046478271484, 0.03612089538574219, 0.036100574493408207, 0.03623526382446289, 0.036224510192871096, 0.0362889289855957, 0.036274272918701174, 0.03624739074707031, 0.036219039916992185, 0.036413440704345705, 0.03654860687255859, 0.03634726333618164, 0.036262527465820316, 0.036569087982177735, 0.0364031982421875, 0.03657318496704102, 0.03635599899291992, 0.036447776794433596, 0.03623788833618164, 0.03623030471801758, 0.036383583068847654, 0.03634518432617188, 0.03623798370361328, 0.03637247848510742, 0.03624959945678711, 0.03634902572631836, 0.03624028778076172, 0.03638272094726563, 0.03759894561767578, 0.03658422470092773, 0.03654975891113281, 0.03649219131469727, 0.03628012847900391, 0.03620297622680664, 0.03642339324951172, 0.0361673583984375, 0.0361802864074707, 0.036160736083984374, 0.03626710510253906, 0.03621795272827148, 0.03625225448608398, 0.036446208953857424, 0.036397056579589845, 0.03621852874755859, 0.03664726257324219, 0.03640729522705078, 0.03631513595581055, 0.03621683120727539, 0.03630403137207031, 0.03625398254394531, 0.036520511627197265, 0.036523136138916015, 0.0364450569152832, 0.03636627197265625, 0.036294113159179686, 0.03661248016357422, 0.03648329544067383, 0.03640054321289062, 0.03641609573364258, 0.036283615112304685, 0.036252159118652344, 0.03635795211791992, 0.0362828483581543, 0.03625721740722656, 0.036262462615966794, 0.0364400634765625, 0.0363392333984375, 0.03629923248291016, 0.0363372802734375, 0.03624179077148437, 0.036278270721435545, 0.03644646453857422, 0.03622614288330078, 0.03636275100708008, 0.03634118270874023, 0.03627648162841797, 0.0361879997253418, 0.03637094497680664, 0.036317470550537106, 0.03664265441894531, 0.03641139221191406, 0.03638272094726563, 0.036218814849853516, 0.036463905334472656, 0.03653299331665039, 0.036517921447753905, 0.03645644760131836, 0.03637452697753906, 0.036445823669433594, 0.03643840026855469, 0.03623075103759766, 0.03767359924316406, 0.03704419326782227, 0.03676764678955078, 0.03663679885864258, 0.03668787384033203, 0.03670761489868164, 0.03662329483032226, 0.03667740631103516, 0.03643353652954102, 0.036485504150390625, 0.03628851318359375, 0.036757057189941406, 0.036439937591552736, 0.03635276794433594, 0.036398910522460935, 0.03653152084350586, 0.03644076919555664, 0.036800479888916014, 0.03651689529418945, 0.03655081558227539, 0.036515998840332034, 0.03647663879394531, 0.03639599990844727, 0.03666447830200195, 0.03643068695068359, 0.03676160049438477, 0.03689596939086914, 0.036907264709472656, 0.0385766716003418, 0.03682486343383789, 0.03661209487915039, 0.03635235214233398, 0.0363493766784668, 0.03648966217041016, 0.03727939224243164, 0.03635030364990234, 0.036408321380615234, 0.036381248474121095, 0.036383167266845706, 0.03641139221191406, 0.03669305419921875, 0.03634908676147461, 0.03629033660888672, 0.036241409301757815, 0.036354049682617184, 0.03720601654052735, 0.03641513442993164, 0.03635388946533203, 0.036433727264404296, 0.03629536056518555, 0.04035785675048828, 0.03676089477539062, 0.036584159851074216, 0.03655881500244141, 0.03637251281738281, 0.03666723251342773, 0.036526241302490235, 0.03847980880737305, 0.03672633743286133, 0.0365032958984375, 0.03665536117553711, 0.03703244781494141, 0.03653539276123047]",tokens/s,27.32538624600948,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,841.879552,12640.518144,0.0,12245.270528,12234.440192,s,1,7.38060107421875,7.38060107421875,0.0,7.38060107421875,7.38060107421875,7.38060107421875,7.38060107421875,[7.38060107421875],,kWh,1.417804526249521e-05,1.5536159205005573e-06,7.2327835639977855e-06,2.2964444746993552e-05,,MB,1094.017024,12923.633664,0.0,12517.900288,12440.744448,s,10,11.752084594726563,1.1752084594726564,0.005107282186621974,1.1762398071289062,1.1798683715820313,1.1802225158691406,1.1805058312988281,"[1.1633475341796875, 1.1689735107421875, 1.17531640625, 1.173837158203125, 1.1766422119140625, 1.178609375, 1.1791546630859375, 1.1797896728515624, 1.17583740234375, 1.18057666015625]",tokens/s,217.83369404511708,kWh,3.425772850499953e-05,3.778108879748517e-06,2.2671157025799803e-05,6.070699441054785e-05,tokens/kWh,4216977.013698441,MB,1109.85216,12986.548224,0.0,12580.814848,12543.681024,s,10,37.15011059570313,3.7150110595703127,0.0038259498988241136,3.7155910644531254,3.717879052734375,3.72025166015625,3.7221497460937503,"[3.707107421875, 3.715757568359375, 3.7126572265625, 3.712315185546875, 3.715424560546875, 3.71710791015625, 3.71376318359375, 3.717351806640625, 3.71600146484375, 3.722624267578125]",tokens/s,16.958226769663167,kWh,0.00010866937428958418,1.1985650922047147e-05,7.236253011220009e-05,0.0001930175553238314,tokens/kWh,326395.18148648704,,s,630,37.14671617889402,0.05896304155380006,0.0003156827940853869,0.05896268844604492,0.05928697166442871,0.05940313873291016,0.060383857650756836,"[0.05999932861328125, 0.058727359771728514, 0.05837321472167969, 0.05853577423095703, 0.05843865585327149, 0.05848601531982422, 0.05831142425537109, 0.0584376335144043, 0.05838838577270508, 0.058574337005615235, 0.058396385192871096, 0.05852979278564453, 0.05912623977661133, 0.058511775970458986, 0.05869977569580078, 0.05861500930786133, 0.05880207824707031, 0.05899766540527344, 0.05897830581665039, 0.05879600143432617, 0.058531841278076174, 0.05841632080078125, 0.05851532745361328, 0.058525760650634764, 0.05855731201171875, 0.0586608657836914, 0.05935103988647461, 0.05890252685546875, 0.05878121566772461, 0.05884934234619141, 0.05882102584838867, 0.05874835205078125, 0.05885599899291992, 0.058933246612548826, 0.059031585693359374, 0.05919334411621094, 0.05912547302246094, 0.05888230514526367, 0.05868364715576172, 0.05875686264038086, 0.05893257522583008, 0.058915489196777346, 0.05899625778198242, 0.05898211288452149, 0.05889510345458984, 0.058889919281005856, 0.058732864379882815, 0.058738689422607425, 0.05881241607666016, 0.05911129760742188, 0.05926105499267578, 0.059041534423828125, 0.0590195198059082, 0.05901091384887695, 0.059146400451660155, 0.058916862487792966, 0.05890380859375, 0.05901324844360351, 0.05903424072265625, 0.05938995361328125, 0.05917612838745117, 0.05891372680664062, 0.05913177490234375, 0.06052249526977539, 0.05883084869384766, 0.05861171340942383, 0.05854412841796875, 0.058423297882080075, 0.05864857482910156, 0.058515296936035154, 0.058488094329833984, 0.05856752014160156, 0.05899814224243164, 0.0586267204284668, 0.05878579330444336, 0.05865382385253906, 0.05896268844604492, 0.058953697204589844, 0.058808353424072264, 0.058925182342529296, 0.0588043212890625, 0.05885737609863281, 0.05882195281982422, 0.05873487854003906, 0.0586346549987793, 0.058673152923583986, 0.05860966491699219, 0.058710014343261716, 0.058916160583496094, 0.058813121795654295, 0.05889596939086914, 0.05879235076904297, 0.05901091384887695, 0.059078815460205075, 0.05912128067016602, 0.05942499160766602, 0.059232574462890625, 0.05945737457275391, 0.05916649627685547, 0.05919356918334961, 0.05924863815307617, 0.059025409698486325, 0.05893939208984375, 0.058703872680664064, 0.058866943359375, 0.05896268844604492, 0.058840320587158206, 0.05895423889160156, 0.05923455810546875, 0.05900294494628906, 0.059319393157958984, 0.059335296630859374, 0.05948819351196289, 0.05927350234985351, 0.05917055892944336, 0.059074817657470705, 0.059172863006591796, 0.05908070373535156, 0.05954150390625, 0.059089984893798825, 0.05897312164306641, 0.0589571533203125, 0.059084449768066406, 0.05915075302124023, 0.0589543342590332, 0.05915852737426758, 0.060614654541015625, 0.05891900634765625, 0.05849827194213867, 0.058653278350830076, 0.05840300750732422, 0.058479713439941405, 0.058436511993408206, 0.058570751190185545, 0.05862771224975586, 0.05852812957763672, 0.05885257720947266, 0.05851567840576172, 0.058576702117919925, 0.058906688690185546, 0.05884998321533203, 0.05869583892822266, 0.05945942306518555, 0.059150337219238285, 0.058910591125488285, 0.05881622314453125, 0.058589599609375, 0.05854719924926758, 0.058618881225585937, 0.05868342590332031, 0.058755039215087894, 0.05885974502563476, 0.05876508712768555, 0.058759166717529294, 0.05881961441040039, 0.05871020889282227, 0.05907904052734375, 0.05906790542602539, 0.059105663299560546, 0.05914064025878906, 0.05915238571166992, 0.05899776077270508, 0.05904793548583984, 0.058893310546875, 0.058950847625732425, 0.058988574981689454, 0.058854175567626954, 0.058963966369628903, 0.05892095947265625, 0.05890614318847656, 0.05889686584472656, 0.05892902374267578, 0.059039169311523435, 0.05892323303222656, 0.05900511932373047, 0.05905155181884766, 0.05939878463745117, 0.05923443222045898, 0.05934044647216797, 0.05924508666992188, 0.05915590286254883, 0.05906265640258789, 0.05895491027832031, 0.059093791961669924, 0.05922617721557617, 0.05904723358154297, 0.05897507095336914, 0.05900886535644531, 0.05911072158813477, 0.06039766311645508, 0.05884249496459961, 0.05860611343383789, 0.058449920654296876, 0.058342910766601565, 0.05838681411743164, 0.05829235076904297, 0.05855136108398438, 0.05854832077026367, 0.05859209442138672, 0.05850243377685547, 0.058581729888916016, 0.058492416381835936, 0.05890646362304688, 0.05859804916381836, 0.05860761642456055, 0.058947582244873044, 0.059099136352539064, 0.05869772720336914, 0.05866495895385742, 0.0585904312133789, 0.05875996780395508, 0.05871615982055664, 0.058654720306396485, 0.058834720611572265, 0.058895614624023436, 0.05874784088134766, 0.05896527862548828, 0.05874560165405274, 0.05895161437988281, 0.05891897583007812, 0.05894553756713867, 0.05907046508789063, 0.05906556701660156, 0.059284351348876954, 0.05925183868408203, 0.05929040145874023, 0.05918467330932617, 0.058964000701904294, 0.05883964920043945, 0.058724193572998046, 0.05890662384033203, 0.05888204956054687, 0.05890252685546875, 0.05897216033935547, 0.05907046508789063, 0.05916617584228516, 0.05912793731689453, 0.05901558303833008, 0.05918467330932617, 0.05950716781616211, 0.05940825653076172, 0.05910134506225586, 0.059031105041503905, 0.05905039978027344, 0.05907455825805664, 0.05907660675048828, 0.0591278076171875, 0.059084800720214846, 0.05939302444458008, 0.059020286560058595, 0.05916672134399414, 0.05920134353637695, 0.0606530876159668, 0.058823200225830076, 0.058709087371826174, 0.058522464752197266, 0.05850896072387695, 0.05868988800048828, 0.05851264190673828, 0.05855104064941406, 0.05850124740600586, 0.05857238388061523, 0.05884700775146484, 0.05874431991577148, 0.05870265579223633, 0.05878406524658203, 0.05881948852539062, 0.05868233489990234, 0.05884108734130859, 0.05897750473022461, 0.05888694381713867, 0.05900697708129883, 0.05915238571166992, 0.05862400054931641, 0.05874470520019531, 0.058721630096435544, 0.0586391372680664, 0.05880569458007812, 0.05894406509399414, 0.059041793823242185, 0.058931102752685545, 0.05909097671508789, 0.059004222869873044, 0.05897702407836914, 0.05887171173095703, 0.059041278839111325, 0.05937622451782226, 0.05941862487792969, 0.05895539093017578, 0.05897452926635742, 0.05880223846435547, 0.0590643196105957, 0.05903974533081055, 0.058964126586914065, 0.058910560607910153, 0.058969825744628904, 0.05904412841796875, 0.059145790100097656, 0.05919996643066406, 0.05919247817993164, 0.05913888168334961, 0.05918124771118164, 0.05942252731323242, 0.05936304092407227, 0.05921820831298828, 0.05916204833984375, 0.05918572616577149, 0.05895270538330078, 0.05884415817260742, 0.05904592132568359, 0.059027423858642576, 0.05907455825805664, 0.059084800720214846, 0.059084800720214846, 0.05931792068481445, 0.06030950546264648, 0.05882470321655273, 0.05849702453613281, 0.05855417633056641, 0.058599006652832034, 0.05862051010131836, 0.05859328079223633, 0.05885504150390625, 0.05869417572021484, 0.05868288040161133, 0.05870627212524414, 0.058552223205566405, 0.05859542465209961, 0.058867263793945315, 0.05890220642089844, 0.058727169036865236, 0.059305633544921875, 0.05950860977172852, 0.05886614227294922, 0.05884438323974609, 0.058815265655517576, 0.0591071662902832, 0.058643936157226566, 0.058663616180419924, 0.05893280029296875, 0.05895212936401367, 0.05898438262939453, 0.05884460830688477, 0.058887809753417966, 0.05897727966308594, 0.058961376190185544, 0.058837535858154294, 0.05906985473632813, 0.059127521514892575, 0.059114368438720706, 0.05911286544799805, 0.05910179138183594, 0.05921791839599609, 0.05891401672363281, 0.059018016815185544, 0.05902054214477539, 0.05905075073242187, 0.059041793823242185, 0.058977886199951174, 0.05892275238037109, 0.05909324645996094, 0.05914870452880859, 0.059022815704345706, 0.058951423645019534, 0.05896271896362305, 0.05929081726074219, 0.05931500625610352, 0.05932457733154297, 0.05928537750244141, 0.059444862365722655, 0.0590912971496582, 0.059035873413085936, 0.05917059326171875, 0.059150337219238285, 0.059186656951904296, 0.059087390899658206, 0.05949033737182617, 0.059286590576171874, 0.0603873291015625, 0.05882032012939453, 0.05850809478759766, 0.058455615997314456, 0.05847833633422852, 0.05841561508178711, 0.05851359939575195, 0.05853152084350586, 0.05855059051513672, 0.05873481750488281, 0.05867507171630859, 0.05864233779907226, 0.05863401412963867, 0.058646751403808595, 0.05874892807006836, 0.05875299072265625, 0.05881043243408203, 0.05888988876342773, 0.058813953399658205, 0.058772480010986325, 0.05858915328979492, 0.05866684722900391, 0.05880217742919922, 0.05890572738647461, 0.05871731185913086, 0.058746238708496094, 0.058880287170410155, 0.05901116943359375, 0.058851329803466794, 0.058963966369628903, 0.05899468612670898, 0.05923395156860352, 0.05903721618652344, 0.058966846466064454, 0.05894553756713867, 0.05912985610961914, 0.05921692657470703, 0.05906940841674805, 0.05890457534790039, 0.05937561416625976, 0.058910720825195315, 0.05897340774536133, 0.058937793731689454, 0.0590250244140625, 0.05904662322998047, 0.05904572677612305, 0.058931358337402345, 0.05895100784301758, 0.05917379379272461, 0.05944895935058594, 0.05917241668701172, 0.05885923385620117, 0.05895999908447266, 0.059173473358154295, 0.05949248123168945, 0.05969305419921875, 0.05897571182250977, 0.05902582550048828, 0.05908230209350586, 0.059011646270751957, 0.05916057586669922, 0.05917302322387695, 0.05940806579589844, 0.0604653434753418, 0.05899248123168945, 0.0585230712890625, 0.058399105072021486, 0.05863004684448242, 0.058402305603027345, 0.058577312469482425, 0.05851299285888672, 0.058552959442138675, 0.058638526916503904, 0.05881241607666016, 0.05895782470703125, 0.05872956848144531, 0.058839969635009766, 0.05877110290527344, 0.05877590560913086, 0.05916057586669922, 0.05910502243041992, 0.05878195190429687, 0.058670143127441406, 0.05908486557006836, 0.05890089416503906, 0.058861793518066405, 0.05884259033203125, 0.058753631591796876, 0.05890848159790039, 0.058849662780761716, 0.05899017715454102, 0.0589881591796875, 0.05894595336914062, 0.05898073577880859, 0.0591234245300293, 0.059138336181640626, 0.05913977432250977, 0.059144512176513675, 0.05912566375732422, 0.05905215835571289, 0.059076576232910155, 0.05907660675048828, 0.059006175994873046, 0.05908153533935547, 0.05903926467895508, 0.058971969604492185, 0.05924025726318359, 0.05894636917114258, 0.059025409698486325, 0.05919049453735352, 0.05906438446044922, 0.05905481719970703, 0.05940224075317383, 0.05937062454223633, 0.059267967224121094, 0.059230335235595705, 0.059275009155273437, 0.059410430908203124, 0.059312255859375, 0.05907436752319336, 0.05913417434692383, 0.05901689529418945, 0.05919772720336914, 0.05901276779174805, 0.059216224670410156, 0.05917625427246094, 0.06037535858154297, 0.0588590087890625, 0.05867366409301758, 0.0584169921875, 0.05848489761352539, 0.05843379211425781, 0.05856972885131836, 0.058538753509521486, 0.05853567886352539, 0.058566177368164066, 0.05868207931518555, 0.058789249420166015, 0.05885948944091797, 0.05870249557495117, 0.05881856155395508, 0.05890457534790039, 0.059229248046875, 0.05913491058349609, 0.05901039886474609, 0.058872032165527347, 0.05869612884521484, 0.058842910766601565, 0.05880416107177734, 0.05881679916381836, 0.05888332748413086, 0.05888691329956055, 0.058915969848632815, 0.059035968780517575, 0.05883142471313477, 0.05897206497192383, 0.05910537719726563, 0.05902748870849609, 0.05908067321777344, 0.0590909423828125, 0.05910528182983398, 0.05920153427124023, 0.05922633743286133, 0.05908662414550781, 0.05886777496337891, 0.05884921646118164, 0.05886361694335938, 0.058893600463867185, 0.05892499160766602, 0.05933747100830078, 0.05896809768676758, 0.059066497802734375, 0.05911337661743164, 0.05917283248901367, 0.05921177673339844, 0.059391551971435544, 0.05934038543701172, 0.059275360107421876, 0.05929616165161133, 0.05922649765014648, 0.059231296539306644, 0.059274143218994144, 0.058981472015380856, 0.05895414352416992, 0.05913782501220703, 0.059120384216308594, 0.05887382507324219, 0.058992641448974606, 0.05922812652587891, 0.0604139518737793, 0.05894710540771484, 0.05870230484008789, 0.05864572906494141, 0.058583839416503906, 0.05867833709716797, 0.05860857772827149, 0.058738689422607425, 0.058761215209960936, 0.05891481781005859, 0.05876041412353516, 0.05862684631347656, 0.05875487899780273, 0.05874710464477539, 0.05874070358276367, 0.05892252731323242, 0.05909142303466797, 0.05925273513793945, 0.059383167266845706, 0.05929638290405274, 0.059057697296142575, 0.05873916625976563, 0.05876435089111328, 0.058886592864990234, 0.05922870254516602, 0.05901836776733398, 0.05902016067504883, 0.058919967651367186, 0.05904838562011719, 0.0590300178527832, 0.05894569778442383, 0.05902320098876953, 0.05931008148193359, 0.05937753677368164, 0.05957235336303711, 0.05927084732055664, 0.058992641448974606, 0.05901708984375, 0.059003326416015626, 0.059030975341796875, 0.05907638549804688, 0.0591798095703125, 0.0590643196105957, 0.059039295196533205, 0.05907500839233398, 0.05910732650756836, 0.05925628662109375, 0.05908124923706055, 0.05931622314453125, 0.05945257568359375, 0.05925084686279297, 0.059219905853271484, 0.05917567825317383, 0.05940150451660156, 0.05947055816650391, 0.059061504364013674, 0.05922278213500977, 0.05940387344360352, 0.05949276733398438, 0.05931008148193359, 0.059138046264648435, 0.05922332763671875, 0.05939683151245117]",tokens/s,16.95977638954671,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,894.365696,3900.571648,0.0,3498.049536,3295.335424,s,1,7.64323046875,7.64323046875,0.0,7.64323046875,7.64323046875,7.64323046875,7.64323046875,[7.64323046875],,kWh,5.87347620418465e-06,6.406084998500892e-07,1.91000152799381e-06,8.42408623202855e-06,,MB,1243.848704,4064.149504,0.0,3651.141632,3408.337408,s,10,2.6035013122558595,0.2603501312255859,0.002045786645764332,0.25966404724121095,0.26323404846191406,0.2638071640014648,0.26426565643310546,"[0.2643802795410156, 0.2602549133300781, 0.257736083984375, 0.25848175048828126, 0.263106689453125, 0.2589918212890625, 0.26055450439453126, 0.25902288818359376, 0.25907318115234373, 0.26189920043945314]",tokens/s,983.2912270675345,kWh,7.911197350787775e-06,8.72456495988852e-07,5.232586768648969e-06,1.4016240615425597e-05,tokens/kWh,18264526.631931446,MB,1269.039104,4066.246656,0.0,3653.238784,3408.339968,s,10,14.494636962890624,1.4494636962890626,0.011962762476419222,1.4495819702148438,1.4636225219726564,1.468571697998047,1.4725310388183595,"[1.454275634765625, 1.451805908203125, 1.4735208740234376, 1.462522705078125, 1.4444423828125, 1.452262939453125, 1.4362078857421876, 1.4473580322265625, 1.42964501953125, 1.4425955810546875]",tokens/s,43.464351788384555,kWh,4.187949765920756e-05,4.618439451797141e-06,2.6323105142551368e-05,7.282104225355607e-05,tokens/kWh,865134.5552105651,,s,630,14.49192337989807,0.023003052983965194,0.0004842300845926659,0.022926831245422365,0.023383289909362795,0.023480123138427737,0.024775126705169678,"[0.024006975173950194, 0.023187456130981447, 0.023406272888183595, 0.023359424591064454, 0.023271808624267577, 0.023347200393676756, 0.02332854461669922, 0.023088415145874022, 0.022835552215576174, 0.023220832824707032, 0.02330828857421875, 0.022974464416503908, 0.023049856185913088, 0.02308294486999512, 0.023086591720581053, 0.023301055908203125, 0.023031808853149413, 0.02289606475830078, 0.023151168823242186, 0.02330828857421875, 0.02304204750061035, 0.022902431488037108, 0.02304240036010742, 0.023051904678344726, 0.02317964744567871, 0.023230464935302734, 0.023463935852050782, 0.02326527976989746, 0.02366054344177246, 0.023154687881469727, 0.02344940757751465, 0.02320198440551758, 0.02347327995300293, 0.023243648529052734, 0.022937599182128905, 0.022882303237915038, 0.023334911346435547, 0.023084672927856445, 0.022982784271240234, 0.02304844856262207, 0.02314963150024414, 0.02294620704650879, 0.02296681594848633, 0.022992895126342772, 0.02296575927734375, 0.02287455940246582, 0.022814783096313476, 0.02275225639343262, 0.022803455352783202, 0.023005504608154297, 0.02306220817565918, 0.022784000396728517, 0.02307872009277344, 0.02257734489440918, 0.02272377586364746, 0.022784832000732422, 0.022840511322021483, 0.02272329521179199, 0.02308924865722656, 0.022742080688476562, 0.02277676773071289, 0.022837120056152342, 0.02285171127319336, 0.023794431686401368, 0.02316080093383789, 0.02309084892272949, 0.02281884765625, 0.02289699172973633, 0.02287001609802246, 0.022923263549804687, 0.022834623336791992, 0.022810943603515627, 0.023104896545410158, 0.0229036808013916, 0.02284339141845703, 0.02310348892211914, 0.022763296127319334, 0.022973983764648438, 0.022792224884033204, 0.023120256423950197, 0.022937759399414063, 0.02309542465209961, 0.023146303176879882, 0.023021760940551757, 0.023237695693969728, 0.022905536651611328, 0.022970624923706055, 0.023061567306518555, 0.022893503189086915, 0.02308268737792969, 0.022970687866210936, 0.02342911911010742, 0.02288217544555664, 0.022853343963623048, 0.02302367973327637, 0.02329408073425293, 0.023010976791381837, 0.022741472244262696, 0.022810720443725587, 0.02289398384094238, 0.02293404769897461, 0.023121248245239256, 0.022989536285400392, 0.023007232666015624, 0.022896575927734374, 0.022896703720092774, 0.022859264373779296, 0.022991455078125, 0.02311363220214844, 0.02289459228515625, 0.022955743789672852, 0.022890911102294922, 0.023457664489746094, 0.023100479125976563, 0.02284623908996582, 0.02321651268005371, 0.022929183959960936, 0.023222272872924804, 0.02302566337585449, 0.023395328521728515, 0.023202367782592773, 0.023185760498046874, 0.023785568237304686, 0.02319721603393555, 0.023045631408691408, 0.02329251289367676, 0.023560192108154295, 0.023138303756713868, 0.023108640670776368, 0.02290377616882324, 0.02308095932006836, 0.023040000915527343, 0.023126016616821288, 0.023201471328735353, 0.023019840240478515, 0.023199424743652344, 0.023072416305541993, 0.023005855560302733, 0.023412736892700195, 0.02302566337585449, 0.022962175369262695, 0.023363584518432616, 0.023074207305908204, 0.023234527587890626, 0.023473791122436524, 0.024771263122558593, 0.023308000564575194, 0.02318191909790039, 0.023381952285766602, 0.02341075134277344, 0.023463615417480467, 0.02422528076171875, 0.023132991790771485, 0.023412736892700195, 0.023400287628173828, 0.023363744735717774, 0.023410432815551756, 0.023539968490600586, 0.02326313591003418, 0.02297660827636719, 0.023668352127075194, 0.023349632263183595, 0.022999040603637694, 0.02312931251525879, 0.022829856872558594, 0.023526784896850585, 0.023232992172241212, 0.023120031356811524, 0.023001087188720702, 0.022984832763671876, 0.023307775497436522, 0.02313007926940918, 0.023277215957641602, 0.027462272644042968, 0.023330944061279297, 0.023339008331298827, 0.023152639389038086, 0.02300713539123535, 0.02335958480834961, 0.023448575973510744, 0.023221248626708983, 0.02336307144165039, 0.02324118423461914, 0.023441312789916992, 0.02346406364440918, 0.023349248886108398, 0.023347200393676756, 0.02409881591796875, 0.02477670478820801, 0.023947391510009765, 0.023247295379638672, 0.023201791763305665, 0.023434751510620116, 0.023491071701049804, 0.02366054344177246, 0.02308710479736328, 0.023398399353027344, 0.023203840255737306, 0.023245855331420897, 0.023478239059448243, 0.024359296798706055, 0.023423616409301757, 0.02292291259765625, 0.022982336044311522, 0.022990528106689452, 0.022825408935546875, 0.02301910400390625, 0.023177663803100587, 0.02297088050842285, 0.022984512329101564, 0.02298486328125, 0.022978591918945312, 0.022871103286743164, 0.02300819206237793, 0.022879392623901366, 0.022894847869873048, 0.02284604835510254, 0.022721824645996095, 0.022903520584106444, 0.02290483283996582, 0.023035903930664063, 0.022951936721801756, 0.022979743957519533, 0.02327347183227539, 0.023001087188720702, 0.0234003849029541, 0.02304057693481445, 0.023163232803344726, 0.02331648063659668, 0.023431039810180663, 0.023281791687011718, 0.023229856491088868, 0.02321878433227539, 0.023227424621582032, 0.023233312606811524, 0.0231077766418457, 0.023023616790771483, 0.02305843162536621, 0.02292323112487793, 0.023288864135742188, 0.022948863983154297, 0.022972543716430663, 0.023977855682373046, 0.02347417640686035, 0.023152639389038086, 0.023228416442871092, 0.022923263549804687, 0.0228024959564209, 0.022961503982543947, 0.02661027145385742, 0.022983936309814452, 0.022977184295654297, 0.023862016677856444, 0.02315817642211914, 0.023059040069580077, 0.022822912216186524, 0.022951936721801756, 0.022986751556396484, 0.022564863204956053, 0.022781951904296875, 0.02305561637878418, 0.022809343338012697, 0.022631872177124025, 0.022972511291503905, 0.02265750312805176, 0.022820863723754883, 0.02287740707397461, 0.022595615386962892, 0.02307967948913574, 0.025076896667480468, 0.022765823364257812, 0.022826847076416017, 0.02288102340698242, 0.023248895645141602, 0.022800384521484376, 0.02269388771057129, 0.022740415573120117, 0.024983360290527345, 0.02310220718383789, 0.022658624649047852, 0.022904447555541992, 0.023050783157348632, 0.022927839279174803, 0.02282476806640625, 0.02250268745422363, 0.022829696655273436, 0.022859872817993163, 0.02278131294250488, 0.022922975540161133, 0.022688671112060545, 0.022697984695434572, 0.022734848022460938, 0.022794240951538085, 0.02268083190917969, 0.0227007999420166, 0.022707199096679686, 0.022701055526733398, 0.022820735931396486, 0.02271039962768555, 0.022804256439208984, 0.022669536590576172, 0.022841344833374022, 0.02286969566345215, 0.023002944946289062, 0.023050752639770508, 0.022738943099975584, 0.023013376235961915, 0.022915071487426757, 0.022716415405273437, 0.022958080291748048, 0.022831071853637697, 0.022853055953979493, 0.022696544647216797, 0.02284339141845703, 0.02305843162536621, 0.023456480026245115, 0.022933408737182616, 0.022769760131835938, 0.022808576583862306, 0.023702527999877928, 0.02332569694519043, 0.023481664657592775, 0.022987455368041993, 0.023021568298339845, 0.022889984130859374, 0.022925823211669923, 0.023146495819091797, 0.022834463119506834, 0.022667999267578124, 0.022585344314575196, 0.022865280151367188, 0.022819456100463868, 0.02265088081359863, 0.022697504043579102, 0.022589920043945312, 0.023027679443359376, 0.02514691162109375, 0.023381696701049805, 0.02289462471008301, 0.02304489517211914, 0.022831104278564454, 0.02290678405761719, 0.023186752319335938, 0.022844192504882812, 0.02286591911315918, 0.02292531204223633, 0.022769664764404295, 0.022986112594604494, 0.023065216064453126, 0.02292300796508789, 0.022952159881591796, 0.02325302314758301, 0.02308095932006836, 0.022939647674560547, 0.02284873580932617, 0.022921056747436525, 0.02278700828552246, 0.022734655380249023, 0.023023103713989256, 0.022973152160644533, 0.02273891258239746, 0.02315251159667969, 0.023517311096191405, 0.023164928436279295, 0.023439296722412108, 0.02308006477355957, 0.023239328384399415, 0.02342531204223633, 0.02327142333984375, 0.023451648712158202, 0.023195648193359376, 0.023134208679199218, 0.023070112228393554, 0.023166688919067382, 0.022870912551879882, 0.02277903938293457, 0.022885568618774416, 0.022951583862304687, 0.02328153610229492, 0.0228023681640625, 0.022771936416625976, 0.022755264282226562, 0.02271449661254883, 0.022917791366577147, 0.022916191101074217, 0.022987295150756835, 0.02261849594116211, 0.022632448196411133, 0.02265907287597656, 0.022849056243896486, 0.022888927459716796, 0.02294495964050293, 0.022739776611328123, 0.022665216445922853, 0.022749183654785156, 0.02285772705078125, 0.022900703430175782, 0.022911008834838868, 0.02293270492553711, 0.022969120025634764, 0.022777856826782225, 0.022824384689331054, 0.022964704513549806, 0.022622304916381834, 0.022556671142578123, 0.022480735778808592, 0.022640159606933594, 0.022804256439208984, 0.022684511184692384, 0.022974464416503908, 0.022630271911621095, 0.022578880310058592, 0.02247715187072754, 0.022631647109985352, 0.022698528289794923, 0.022763872146606447, 0.022648704528808593, 0.022814367294311525, 0.022712799072265626, 0.022792192459106447, 0.0229354248046875, 0.023395999908447266, 0.022753248214721678, 0.022907392501831055, 0.02271820831298828, 0.0228723201751709, 0.022595584869384764, 0.022688831329345703, 0.02259654426574707, 0.022814720153808594, 0.02269545555114746, 0.0231200008392334, 0.02304150390625, 0.02272550392150879, 0.022734848022460938, 0.02276460838317871, 0.022781951904296875, 0.022772672653198243, 0.022708223342895507, 0.022871807098388673, 0.022913280487060546, 0.023613279342651367, 0.02296278381347656, 0.022790143966674805, 0.02277971267700195, 0.022993087768554688, 0.02279529571533203, 0.02290355110168457, 0.02301888084411621, 0.023085439682006836, 0.022927839279174803, 0.02316713523864746, 0.02296611213684082, 0.022988800048828126, 0.022846975326538087, 0.023486719131469727, 0.023001344680786132, 0.022837247848510742, 0.022732032775878906, 0.022829280853271485, 0.02294428825378418, 0.02288435173034668, 0.022904447555541992, 0.02294425582885742, 0.023342079162597656, 0.022937952041625978, 0.023065120697021484, 0.023121919631958008, 0.02324470329284668, 0.022888383865356444, 0.02278416061401367, 0.022808576583862306, 0.022717727661132812, 0.022735584259033204, 0.022779424667358397, 0.02282361602783203, 0.022779680252075194, 0.023127904891967775, 0.02319539260864258, 0.02318396759033203, 0.022886207580566406, 0.022814720153808594, 0.022826847076416017, 0.022939807891845704, 0.023010656356811522, 0.02286249542236328, 0.02293350410461426, 0.02280966377258301, 0.022727615356445314, 0.02281056022644043, 0.022779840469360352, 0.023008800506591796, 0.023024063110351562, 0.022947423934936522, 0.022859743118286133, 0.023208032608032225, 0.022910623550415038, 0.023063392639160157, 0.022998111724853516, 0.02307164764404297, 0.023230464935302734, 0.02310300827026367, 0.023169023513793945, 0.023149023056030272, 0.024000543594360352, 0.0230743350982666, 0.022936031341552733, 0.022700031280517577, 0.022662879943847657, 0.022722496032714843, 0.02262841606140137, 0.022638879776000976, 0.022638463973999025, 0.02272787284851074, 0.02251372718811035, 0.02257302474975586, 0.022831296920776366, 0.02271878433227539, 0.022685216903686523, 0.022708192825317382, 0.022694591522216798, 0.02272892761230469, 0.02267955207824707, 0.02265497589111328, 0.022740800857543944, 0.02274643135070801, 0.022571168899536132, 0.022882272720336914, 0.02252467155456543, 0.02258950424194336, 0.022511455535888673, 0.022386304855346678, 0.022476608276367188, 0.022542144775390623, 0.022592351913452147, 0.022696191787719727, 0.022537984848022462, 0.02272870445251465, 0.022654592514038088, 0.022673791885375976, 0.022683647155761717, 0.022629440307617186, 0.022664127349853517, 0.02280243110656738, 0.02278144073486328, 0.023120128631591796, 0.022675712585449218, 0.022754335403442384, 0.022713151931762696, 0.022644287109375, 0.0226977596282959, 0.022700031280517577, 0.02261894416809082, 0.022598720550537108, 0.02277676773071289, 0.022918304443359374, 0.022477664947509766, 0.022564863204956053, 0.02260153579711914, 0.02256915283203125, 0.02265292739868164, 0.022683488845825196, 0.022562976837158202, 0.022618112564086915, 0.022450176239013672, 0.02247065544128418, 0.022583232879638673, 0.023400640487670897, 0.024155296325683594, 0.022614688873291017, 0.02277142333984375, 0.02288863945007324, 0.023021087646484376, 0.022792768478393555, 0.022686975479125977, 0.022882911682128908, 0.022907039642333985, 0.02308095932006836, 0.02278144073486328, 0.022684160232543944, 0.022623519897460937, 0.023435359954833986, 0.022602144241333007, 0.022610143661499025, 0.022537471771240235, 0.022478912353515627, 0.02296403121948242, 0.02245910453796387, 0.022472864151000978, 0.022343040466308594, 0.022555200576782227, 0.02272163200378418, 0.022692256927490235, 0.02266694450378418, 0.022766464233398436, 0.022730752944946288, 0.022539424896240234, 0.022718336105346678, 0.022663936614990235, 0.02265519905090332, 0.02280441665649414, 0.02280624008178711, 0.022483327865600585, 0.022560543060302734, 0.02284713554382324, 0.022612512588500975, 0.022549728393554687, 0.02273276710510254, 0.022991680145263673, 0.022757375717163086, 0.022548479080200197, 0.02250886344909668, 0.022555135726928712, 0.02263612747192383, 0.02251408004760742, 0.022544576644897462, 0.022626304626464845, 0.02281622314453125, 0.022589984893798827, 0.02934982490539551, 0.024397855758666993, 0.023095327377319334, 0.02284339141845703, 0.022931455612182617, 0.02303081512451172, 0.022857887268066406, 0.022960960388183595, 0.022738943099975584, 0.022759424209594727, 0.022971519470214842]",tokens/s,43.4724903992993,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,888.373248,6266.159104,0.0,5863.636992,5744.700416,s,1,7.03466845703125,7.03466845703125,0.0,7.03466845703125,7.03466845703125,7.03466845703125,7.03466845703125,[7.03466845703125],,kWh,6.274118433293551e-06,6.844636622749819e-07,2.2855573839908327e-06,9.244139479559367e-06,,MB,1219.477504,6494.748672,0.0,6081.7408,6021.145088,s,10,4.899756317138672,0.48997563171386715,0.003788598740680617,0.4915230102539062,0.49275341796875,0.4929202728271484,0.49305375671386714,"[0.48052694702148435, 0.4901853942871094, 0.49171466064453123, 0.49238015747070313, 0.4852935791015625, 0.49050091552734376, 0.49133135986328125, 0.4927163391113281, 0.49201983642578123, 0.49308712768554686]",tokens/s,522.4749628967206,kWh,1.4312915218053808e-05,1.5784732264566353e-06,9.496145163047298e-06,2.5387533607557743e-05,tokens/kWh,10083689.26092884,MB,1249.816576,6557.663232,0.0,6144.65536,6123.871232,s,10,20.521436645507816,2.052143664550781,0.003896263801029672,2.0517861328125,2.05771884765625,2.0580939208984375,2.0583939794921875,"[2.0464171142578125, 2.049420654296875, 2.048068359375, 2.049048828125, 2.052938720703125, 2.058468994140625, 2.050633544921875, 2.05337109375, 2.055433837890625, 2.057635498046875]",tokens/s,30.699605046311824,kWh,6.07140338594455e-05,6.69682917050661e-06,4.029495022535169e-05,0.0001077058132553038,tokens/kWh,584926.6450517949,,s,630,20.51871072006227,0.03256938209533691,0.0002903501983759094,0.03253863906860352,0.03285252685546875,0.03303322963714599,0.03378777610778809,"[0.03350697708129883, 0.032739871978759764, 0.032416095733642576, 0.03230310440063477, 0.03222118377685547, 0.03215564727783203, 0.032083518981933595, 0.03210079956054687, 0.03358902359008789, 0.03220646286010742, 0.03257814407348633, 0.03234815979003906, 0.03220684814453125, 0.03219635009765625, 0.03216432189941406, 0.032221248626708984, 0.03231510543823242, 0.0322092170715332, 0.032247486114501955, 0.03232166290283203, 0.032292736053466796, 0.032460479736328124, 0.032667583465576175, 0.032317825317382816, 0.032366817474365234, 0.03229228973388672, 0.032354846954345706, 0.032354110717773436, 0.032409664154052734, 0.03252336120605469, 0.032594783782958985, 0.03258163070678711, 0.03248659133911133, 0.03247705459594727, 0.03249862289428711, 0.03249151992797852, 0.03238857650756836, 0.032417633056640624, 0.032381313323974606, 0.03240787124633789, 0.03244236755371094, 0.03253452682495117, 0.03259187316894531, 0.03250790405273438, 0.03243967819213867, 0.03244723129272461, 0.03246041488647461, 0.03244086456298828, 0.03244591903686524, 0.03246675109863281, 0.03257324981689453, 0.03260070419311523, 0.03263692855834961, 0.03253657531738281, 0.03256659317016602, 0.032553985595703126, 0.03262636947631836, 0.03266563034057617, 0.032655326843261716, 0.032719936370849606, 0.03279967880249023, 0.03280879974365234, 0.032739391326904295, 0.03381657409667969, 0.032860160827636715, 0.03241139221191406, 0.03223577499389649, 0.032197761535644534, 0.032350078582763675, 0.032199169158935545, 0.032172542572021484, 0.03218454360961914, 0.032236801147460935, 0.03218806457519531, 0.03223791885375977, 0.032204673767089846, 0.03224399948120117, 0.03226166534423828, 0.03227734375, 0.03218345642089844, 0.03220566558837891, 0.03237055969238281, 0.03217772674560547, 0.03219004821777344, 0.03229305648803711, 0.03230595016479492, 0.03233280181884766, 0.03237174224853516, 0.03227849578857422, 0.03234147262573242, 0.03237104034423828, 0.03256063842773437, 0.03253728103637695, 0.03265715026855469, 0.03263657760620117, 0.032619361877441404, 0.03249331283569336, 0.03258687973022461, 0.033073665618896485, 0.033212799072265625, 0.03264716720581055, 0.03253622436523437, 0.032533889770507814, 0.03268198394775391, 0.0324986572265625, 0.03247027206420899, 0.032494335174560546, 0.032555007934570314, 0.03320124816894531, 0.03255532836914062, 0.032844383239746096, 0.03261347198486328, 0.033012641906738284, 0.032524288177490236, 0.032557056427001956, 0.03250691223144531, 0.03269481658935547, 0.032431999206542966, 0.032535457611083986, 0.03266550445556641, 0.032704254150390626, 0.032851966857910156, 0.03279363250732422, 0.03283248138427734, 0.03279888153076172, 0.032730976104736326, 0.03367343902587891, 0.03297123336791992, 0.0324257926940918, 0.03232563018798828, 0.0323133430480957, 0.032110591888427735, 0.032185470581054684, 0.032184928894042966, 0.03221123123168945, 0.03220275115966797, 0.03218739318847656, 0.032244735717773435, 0.03222726440429687, 0.032266529083251956, 0.032214977264404296, 0.03224755096435547, 0.032261600494384764, 0.03232236862182617, 0.03224966430664063, 0.03231254577636719, 0.03229721450805664, 0.03244086456298828, 0.03252396774291992, 0.03254508972167969, 0.03239670562744141, 0.03233468627929687, 0.03236332702636719, 0.032392127990722656, 0.032459808349609376, 0.0327567024230957, 0.0327342414855957, 0.03259913635253906, 0.03266988754272461, 0.032507648468017576, 0.032562976837158204, 0.03255219268798828, 0.03254486465454102, 0.03252083206176758, 0.032519744873046874, 0.03255564880371094, 0.03246080017089844, 0.03248134231567383, 0.03251747131347656, 0.03253308868408203, 0.032522239685058595, 0.03254288101196289, 0.03255472183227539, 0.03252191925048828, 0.03243667221069336, 0.032513473510742186, 0.032576255798339844, 0.032628734588623046, 0.032548927307128904, 0.03250915145874023, 0.032677886962890625, 0.03271295928955078, 0.03268771362304688, 0.03271750259399414, 0.03282534408569336, 0.032970848083496096, 0.032876449584960936, 0.032790687561035155, 0.03277190399169922, 0.03374208068847656, 0.03291417694091797, 0.03256934356689453, 0.03233161544799805, 0.03228873443603516, 0.03228688049316406, 0.032290847778320315, 0.0322468147277832, 0.03243312072753906, 0.03225600051879883, 0.03226419067382812, 0.032530433654785154, 0.03234966278076172, 0.032266719818115235, 0.032278591156005856, 0.03243017578125, 0.032610206604003905, 0.03231536102294922, 0.0322591667175293, 0.03238188934326172, 0.032317440032958986, 0.03233996963500976, 0.03239440155029297, 0.032312320709228515, 0.03243596649169922, 0.03242329788208008, 0.03252707290649414, 0.032679935455322266, 0.0327086067199707, 0.032700225830078124, 0.03253184127807617, 0.03264368057250976, 0.03243417739868164, 0.03262239837646484, 0.032487136840820316, 0.032481983184814454, 0.03245686340332031, 0.03243724822998047, 0.032406368255615235, 0.03256320190429687, 0.032571392059326174, 0.03244851303100586, 0.0324771842956543, 0.032487422943115234, 0.032546241760253905, 0.032543041229248046, 0.03264246368408203, 0.032562015533447265, 0.03247455978393555, 0.03253955078125, 0.03254137420654297, 0.03254985427856445, 0.032555007934570314, 0.03260825729370117, 0.03261824035644531, 0.03256140899658203, 0.03262195205688476, 0.03274966430664063, 0.032714942932128906, 0.0327490234375, 0.032782718658447264, 0.03277222442626953, 0.03272310256958008, 0.033783935546875, 0.03279244613647461, 0.03249923324584961, 0.03234864044189453, 0.033116127014160154, 0.03232748794555664, 0.03226630401611328, 0.032331905364990234, 0.032271839141845705, 0.03226844787597656, 0.03228303909301758, 0.03247430419921875, 0.03222544097900391, 0.03215849685668945, 0.03231049728393555, 0.03243280029296875, 0.03232972717285156, 0.032570560455322264, 0.03273795318603516, 0.03241571044921875, 0.032436416625976565, 0.03250320053100586, 0.03242659378051758, 0.03239731216430664, 0.03242598342895508, 0.03253452682495117, 0.03242803192138672, 0.03252019119262695, 0.03259801483154297, 0.03266527938842773, 0.032908737182617186, 0.03271667098999023, 0.03264614486694336, 0.03262278366088867, 0.032597278594970705, 0.0325432014465332, 0.0326387825012207, 0.032491870880126957, 0.032476577758789066, 0.03268239974975586, 0.03277423858642578, 0.03262809753417969, 0.032686721801757815, 0.03260992050170899, 0.03253696060180664, 0.03257078552246094, 0.03257404708862305, 0.032548095703125, 0.032512958526611326, 0.032517024993896484, 0.03262464141845703, 0.03259894561767578, 0.03258761596679687, 0.03258348846435547, 0.03263299179077148, 0.03261868667602539, 0.03268745422363281, 0.03280144119262695, 0.032769088745117185, 0.032826305389404294, 0.032925567626953124, 0.0330302734375, 0.032820350646972654, 0.03366899108886719, 0.033056896209716795, 0.03265039825439453, 0.03237974548339844, 0.03230633544921875, 0.032349025726318356, 0.03227852630615234, 0.03231129455566406, 0.03236038589477539, 0.032311359405517576, 0.03244003295898437, 0.03242121505737305, 0.032392127990722656, 0.03242943954467774, 0.03232044982910156, 0.03238675308227539, 0.03239731216430664, 0.03235398483276367, 0.03238079833984375, 0.032319934844970706, 0.032415744781494144, 0.032417633056640624, 0.03245632171630859, 0.03250640106201172, 0.03249356842041016, 0.03256320190429687, 0.03279212951660156, 0.032551361083984376, 0.0326901741027832, 0.03277619171142578, 0.03323875045776367, 0.033035648345947265, 0.03269507217407226, 0.03256870269775391, 0.032576255798339844, 0.033146209716796875, 0.03246352005004883, 0.032540672302246096, 0.032578975677490234, 0.03255177688598633, 0.03259686279296875, 0.03259689712524414, 0.03332707214355469, 0.032734878540039064, 0.03281955337524414, 0.0326585922241211, 0.03264803314208985, 0.032849727630615236, 0.032677536010742185, 0.03257929611206055, 0.03258777618408203, 0.032657951354980466, 0.03380656051635742, 0.032784416198730466, 0.0327658576965332, 0.03281113433837891, 0.0334005126953125, 0.03277203369140625, 0.032747230529785155, 0.03298166275024414, 0.0330994873046875, 0.03295004653930664, 0.0327644157409668, 0.03378934478759766, 0.03290176010131836, 0.03244047927856445, 0.032429985046386715, 0.0324607048034668, 0.03249135971069336, 0.03232755279541016, 0.03224921417236328, 0.03232851028442383, 0.03244252777099609, 0.03227347183227539, 0.03237772750854492, 0.032299007415771484, 0.03230892944335938, 0.032282623291015625, 0.03227222442626953, 0.032215518951416014, 0.03230886459350586, 0.032498046875, 0.03243619155883789, 0.032413726806640626, 0.0323656005859375, 0.03236880111694336, 0.03241555023193359, 0.032355552673339845, 0.03242118453979492, 0.03234864044189453, 0.03264921569824219, 0.03269222259521484, 0.03257468795776367, 0.03267686462402344, 0.03269200134277344, 0.032595966339111326, 0.03254476928710937, 0.03253772735595703, 0.03256355285644531, 0.03242448043823242, 0.032427391052246096, 0.03249833679199219, 0.03252761459350586, 0.03262236785888672, 0.03253139114379883, 0.032552417755126954, 0.03261084747314453, 0.03244646453857422, 0.033020927429199216, 0.03253350448608398, 0.0325695686340332, 0.03271615982055664, 0.03271414566040039, 0.03252931213378906, 0.032573440551757815, 0.03252019119262695, 0.032530784606933594, 0.032912769317626954, 0.03247699356079101, 0.03253481674194336, 0.03254451370239258, 0.03275420761108398, 0.03284774398803711, 0.03298278427124023, 0.032911678314208985, 0.032706623077392576, 0.03344614410400391, 0.03288614273071289, 0.03243276977539063, 0.03227024078369141, 0.03218374252319336, 0.032129695892333984, 0.032263519287109375, 0.03225667190551758, 0.03218960189819336, 0.032275264739990234, 0.03226182556152344, 0.03256355285644531, 0.03230083084106445, 0.0323172492980957, 0.03236627197265625, 0.03224649429321289, 0.032357921600341795, 0.032422367095947265, 0.032309249877929686, 0.03237254333496094, 0.03257583999633789, 0.032358238220214844, 0.03245260620117187, 0.03234815979003906, 0.032380256652832035, 0.032320159912109375, 0.03239100646972656, 0.03243539047241211, 0.03265430450439453, 0.032697856903076174, 0.03282339096069336, 0.03268592071533203, 0.032764480590820315, 0.03259296035766602, 0.032549182891845704, 0.03258796691894531, 0.03255350494384766, 0.032745376586914066, 0.032595966339111326, 0.032645023345947266, 0.03269846343994141, 0.03285951995849609, 0.03271539306640625, 0.03263868713378906, 0.03266006469726562, 0.03281478500366211, 0.03274342346191406, 0.03279980850219726, 0.03276284790039063, 0.032659423828125, 0.0326901741027832, 0.032723262786865236, 0.03267142486572266, 0.03261260986328125, 0.032777984619140624, 0.032857791900634765, 0.032954559326171876, 0.03303631973266601, 0.03296265411376953, 0.032857566833496096, 0.03288937759399414, 0.03289651107788086, 0.03278694534301758, 0.03401318359375, 0.03295155334472656, 0.032592639923095704, 0.032368640899658206, 0.03241606521606445, 0.03239516830444336, 0.032298782348632815, 0.032247806549072264, 0.03234201431274414, 0.0323480339050293, 0.03232985687255859, 0.03250115203857422, 0.03288076782226562, 0.03226697540283203, 0.03235964965820313, 0.03228521728515625, 0.03228815841674805, 0.032309825897216794, 0.032315425872802735, 0.03234611129760742, 0.03238860702514648, 0.032618526458740235, 0.032664031982421876, 0.03258703994750976, 0.03249635314941406, 0.0325076789855957, 0.032684097290039064, 0.03238723373413086, 0.03256524658203125, 0.03270041656494141, 0.032747135162353516, 0.03263225555419922, 0.032698814392089846, 0.03258153533935547, 0.03273820877075195, 0.032537567138671876, 0.0325145263671875, 0.032417888641357424, 0.03246614456176758, 0.03243222427368164, 0.032567745208740236, 0.032522369384765625, 0.03268787384033203, 0.0327704963684082, 0.03277219009399414, 0.03262003326416016, 0.03288729476928711, 0.03259743881225586, 0.03251593780517578, 0.03249430465698242, 0.03254662322998047, 0.032610496520996096, 0.032531871795654296, 0.032586368560791015, 0.03256060791015625, 0.03257299041748047, 0.03268499374389648, 0.03287276840209961, 0.03302342224121094, 0.03420739364624024, 0.03291737747192383, 0.03288137435913086, 0.03349910354614258, 0.03372639846801758, 0.033046592712402345, 0.03256934356689453, 0.03230310440063477, 0.032280384063720705, 0.03227587127685547, 0.03223427200317383, 0.03219180679321289, 0.03234051132202148, 0.032405662536621097, 0.032352256774902347, 0.032389022827148437, 0.03234598541259766, 0.03230505752563476, 0.03230752182006836, 0.03240345764160156, 0.03238092803955078, 0.032380702972412106, 0.032545280456542966, 0.03242339324951172, 0.03231584167480469, 0.03237868881225586, 0.03252396774291992, 0.03259427261352539, 0.03410531234741211, 0.03309737777709961, 0.03245910263061524, 0.03259737777709961, 0.032583999633789065, 0.032772415161132815, 0.032780223846435544, 0.03271609497070312, 0.03268479919433594, 0.03265331268310547, 0.03266521453857422, 0.032698814392089846, 0.033886272430419924, 0.03280403137207031, 0.032674591064453126, 0.03274127960205078, 0.0327720947265625, 0.032704288482666016, 0.033113697052001956, 0.03271324920654297, 0.03257699203491211, 0.032567935943603514, 0.032606208801269534, 0.03271680068969727, 0.03280486297607422, 0.03260614395141601, 0.03255916976928711, 0.032718849182128903, 0.032632801055908205, 0.0325263671875, 0.032540416717529295, 0.03263123321533203, 0.032664993286132815, 0.03270595169067383, 0.03267891311645508, 0.03322880172729492, 0.03281862258911133, 0.03275859069824219, 0.03276300811767578]",tokens/s,30.703683510875507,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 169430 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 270, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 84051 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 905.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 72029 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,737.456128,804.192256,0.0,408.94464,387.119104,s,1,7.182080078125,7.182080078125,0.0,7.182080078125,7.182080078125,7.182080078125,7.182080078125,[7.182080078125],,kWh,6.235722179174748e-06,6.807830903083486e-07,2.0150016119997727e-06,8.931506881482869e-06,,MB,1039.253504,827.260928,0.0,421.527552,354.083328,s,17,0.44126082992553717,0.025956519407384537,0.0007704486004763689,0.025722015380859376,0.026036025619506836,0.02668463325500488,0.028530885696411133,"[0.028992448806762695, 0.025850559234619142, 0.02569664001464844, 0.02565315246582031, 0.025737695693969727, 0.025810623168945314, 0.025988256454467774, 0.02610767936706543, 0.025722015380859376, 0.02579302406311035, 0.02569443130493164, 0.025657024383544922, 0.0255994873046875, 0.02593222427368164, 0.02568422317504883, 0.025718751907348632, 0.02562259292602539]",tokens/s,9862.647452152962,kWh,8.819642737411628e-07,9.724077051836299e-08,5.812534022296101e-07,1.5604584464891359e-06,tokens/kWh,164054352.473129,MB,1065.844736,841.940992,0.0,436.207616,354.085888,s,17,10.024740356445314,0.5896906092026655,0.0030526179067493597,0.5890370483398437,0.593540087890625,0.5946925415039063,0.5948475415039063,"[0.5899246215820313, 0.5886675415039062, 0.5948862915039063, 0.5928040771484375, 0.588941650390625, 0.58674462890625, 0.5922803344726563, 0.5868428955078125, 0.5838924560546875, 0.5850997924804687, 0.5876492919921875, 0.5888758544921875, 0.5890370483398437, 0.59021142578125, 0.5946441040039062, 0.592195556640625, 0.5920427856445313]",tokens/s,106.83568470792468,kWh,1.686057835910163e-05,1.8594751716190853e-06,7.630632252240906e-06,2.6350685782961627e-05,tokens/kWh,2390829.617069619,,s,1071,10.016516191482534,0.009352489441160173,0.00018683751353915182,0.00931488037109375,0.00946723175048828,0.00958407974243164,0.010190162849426268,"[0.009582559585571289, 0.009558079719543457, 0.009326144218444824, 0.00931488037109375, 0.009281375885009765, 0.00934665584564209, 0.009284159660339356, 0.009285344123840331, 0.00930777645111084, 0.009285280227661133, 0.009210240364074708, 0.009363807678222657, 0.009299391746520997, 0.009382816314697265, 0.009649696350097657, 0.00953593635559082, 0.009332480430603028, 0.009400511741638183, 0.009327520370483398, 0.009412575721740722, 0.009391103744506836, 0.009379615783691406, 0.009423423767089843, 0.009365280151367188, 0.009382111549377441, 0.009430463790893556, 0.009339103698730469, 0.009338175773620606, 0.009308863639831542, 0.009281760215759277, 0.009324607849121093, 0.009309247970581054, 0.009366368293762207, 0.009301823616027832, 0.009455615997314454, 0.009381695747375489, 0.009307904243469238, 0.009355711936950683, 0.009283424377441406, 0.009275424003601073, 0.009264800071716308, 0.009260607719421386, 0.00930457592010498, 0.00935974407196045, 0.009297951698303223, 0.00934227180480957, 0.009354240417480468, 0.009420479774475098, 0.009416383743286133, 0.009494912147521973, 0.009358752250671386, 0.009275039672851563, 0.00933568000793457, 0.009369152069091797, 0.009308544158935547, 0.009293888092041016, 0.009302016258239745, 0.009267200469970703, 0.009359071731567383, 0.009388319969177246, 0.009350751876831055, 0.009293503761291504, 0.009546784400939942, 0.009229887962341308, 0.009464256286621094, 0.009391807556152343, 0.009322815895080567, 0.0092871675491333, 0.009281951904296875, 0.009401760101318359, 0.009308863639831542, 0.009303263664245606, 0.009335007667541504, 0.009238176345825196, 0.00947702407836914, 0.009273216247558594, 0.009261183738708496, 0.009357312202453612, 0.00928508758544922, 0.009359487533569336, 0.00931388759613037, 0.009306943893432618, 0.009261055946350098, 0.009347423553466797, 0.009307680130004882, 0.009312479972839356, 0.009352640151977539, 0.009262784004211426, 0.009347871780395508, 0.009293824195861817, 0.0093023681640625, 0.009313952445983887, 0.009267200469970703, 0.009287712097167968, 0.009363007545471192, 0.00931436824798584, 0.009352864265441895, 0.009267040252685547, 0.009341152191162109, 0.009306752204895019, 0.009350527763366699, 0.009271871566772462, 0.009363743782043457, 0.009260543823242188, 0.00932271957397461, 0.009631903648376464, 0.009445088386535645, 0.009341119766235351, 0.00930406379699707, 0.009351167678833008, 0.009528575897216797, 0.009360128402709961, 0.009375743865966797, 0.009318400382995605, 0.009344799995422363, 0.00946723175048828, 0.009370431900024415, 0.009443391799926757, 0.00932044792175293, 0.009281760215759277, 0.009328224182128907, 0.009242815971374512, 0.009369279861450195, 0.0093221435546875, 0.009335455894470214, 0.00932863998413086, 0.009151904106140137, 0.009402976036071778, 0.009377887725830078, 0.009296031951904296, 0.00934217643737793, 0.009245408058166505, 0.009254719734191895, 0.009275744438171387, 0.009334591865539551, 0.00929315185546875, 0.009347583770751953, 0.00936355209350586, 0.009344927787780762, 0.009375712394714356, 0.009305248260498047, 0.009325440406799317, 0.009369407653808594, 0.009300160408020019, 0.009312512397766114, 0.009222240447998046, 0.00925046443939209, 0.009249919891357422, 0.00932953643798828, 0.00932044792175293, 0.00925068759918213, 0.009352767944335937, 0.009396224021911622, 0.009417183876037597, 0.00983568000793457, 0.01030185604095459, 0.012427807807922363, 0.009649888038635254, 0.009404704093933106, 0.009422975540161133, 0.009332320213317872, 0.00932688045501709, 0.009476351737976074, 0.009602687835693359, 0.009342623710632323, 0.009554400444030761, 0.00932249641418457, 0.009361568450927735, 0.00965129566192627, 0.009378560066223144, 0.009418432235717773, 0.009400639533996582, 0.00941055965423584, 0.009401984214782715, 0.009322272300720216, 0.009316960334777831, 0.009431039810180664, 0.00940067195892334, 0.009299615859985351, 0.009313504219055175, 0.009947839736938477, 0.009334912300109863, 0.009376959800720215, 0.009346879959106446, 0.009292767524719239, 0.009319583892822265, 0.009216095924377441, 0.009306879997253418, 0.009346943855285644, 0.009099776268005372, 0.009375871658325196, 0.009298048019409179, 0.009340928077697755, 0.009361536026000976, 0.00932646369934082, 0.009373696327209472, 0.009654272079467773, 0.009332544326782226, 0.009395392417907714, 0.009556991577148437, 0.00935910415649414, 0.009406720161437988, 0.00928115177154541, 0.009389792442321777, 0.009453951835632324, 0.009525728225708009, 0.009358271598815918, 0.010694656372070312, 0.00983232021331787, 0.0095098237991333, 0.009506464004516602, 0.009488800048828124, 0.009463808059692384, 0.00937334442138672, 0.009331295967102051, 0.009312031745910644, 0.009614879608154298, 0.009484736442565918, 0.009418208122253418, 0.009451168060302734, 0.009443615913391113, 0.009316224098205567, 0.00940009593963623, 0.009788543701171874, 0.009307519912719726, 0.009364928245544434, 0.009255935668945312, 0.009406368255615234, 0.009400416374206542, 0.00928767967224121, 0.009222368240356445, 0.009295519828796386, 0.009244799613952637, 0.009234623908996582, 0.00927945613861084, 0.00929980754852295, 0.009388031959533692, 0.0093306884765625, 0.009385984420776367, 0.00949465560913086, 0.00952678394317627, 0.009320128440856933, 0.009306976318359375, 0.00930799961090088, 0.009289728164672852, 0.009250495910644531, 0.0092675199508667, 0.00928694438934326, 0.009321184158325195, 0.009313535690307618, 0.009277888298034668, 0.009314784049987793, 0.00906982421875, 0.009359968185424805, 0.009304415702819825, 0.009256768226623534, 0.009293824195861817, 0.009252927780151366, 0.009289088249206543, 0.009329216003417969, 0.009387071609497071, 0.009224960327148438, 0.00925715160369873, 0.00931430435180664, 0.009562272071838379, 0.009327615737915039, 0.009347935676574708, 0.009308416366577149, 0.009322015762329102, 0.009470175743103028, 0.00957875156402588, 0.009448800086975098, 0.009357536315917969, 0.009420096397399902, 0.009341823577880859, 0.009324543952941895, 0.009308159828186035, 0.00937168025970459, 0.00924668788909912, 0.009451519966125489, 0.009265248298645019, 0.009242527961730957, 0.009265151977539063, 0.00926959991455078, 0.0092642240524292, 0.009244319915771484, 0.009247648239135741, 0.009342687606811523, 0.009256383895874024, 0.009243552207946776, 0.009234399795532226, 0.009244480133056641, 0.009204928398132323, 0.009315648078918458, 0.009270943641662598, 0.009279071807861328, 0.009279328346252442, 0.009320480346679687, 0.00926159954071045, 0.00941055965423584, 0.010364831924438477, 0.010149024009704589, 0.009276448249816894, 0.009248671531677246, 0.009282848358154298, 0.00920854377746582, 0.009211551666259766, 0.009228639602661133, 0.009354911804199218, 0.009247072219848633, 0.009311296463012696, 0.009252127647399902, 0.009608896255493164, 0.009420160293579102, 0.009310815811157227, 0.00909334373474121, 0.009426048278808594, 0.009326815605163574, 0.009212672233581543, 0.00938588809967041, 0.00931948757171631, 0.010384639739990235, 0.00937337589263916, 0.009319968223571777, 0.00918883228302002, 0.009166879653930664, 0.009378175735473632, 0.009242688179016113, 0.009269791603088379, 0.00920969581604004, 0.009250752449035645, 0.00924614429473877, 0.00924454402923584, 0.009275615692138672, 0.009290399551391602, 0.009238080024719238, 0.009220576286315918, 0.009341119766235351, 0.009290911674499512, 0.009302687644958497, 0.009265119552612305, 0.009315839767456055, 0.009231167793273926, 0.009231167793273926, 0.009227168083190919, 0.009326592445373535, 0.009236479759216308, 0.009264351844787598, 0.009355839729309081, 0.009318623542785644, 0.009252863883972168, 0.00919961643218994, 0.009207807540893554, 0.00919155216217041, 0.00931606388092041, 0.009405599594116212, 0.009313280105590821, 0.009308159828186035, 0.009265151977539063, 0.009363455772399902, 0.009298239707946777, 0.009211071968078613, 0.009244959831237793, 0.009258399963378907, 0.009238431930541992, 0.009318400382995605, 0.0092741117477417, 0.009277600288391114, 0.009654272079467773, 0.009286975860595703, 0.009239232063293457, 0.009236607551574707, 0.009185152053833008, 0.00931663990020752, 0.009518719673156739, 0.009375295639038085, 0.009372063636779785, 0.009367072105407714, 0.009246848106384277, 0.00948857593536377, 0.009408255577087403, 0.009277407646179198, 0.009625568389892578, 0.009420607566833496, 0.009289919853210449, 0.009356831550598145, 0.009369888305664063, 0.009299424171447754, 0.009392864227294922, 0.009289759635925293, 0.009330656051635742, 0.009374752044677734, 0.009351936340332032, 0.009333215713500976, 0.009350336074829102, 0.009298496246337891, 0.009273344039916993, 0.009283519744873047, 0.009346688270568847, 0.00929366397857666, 0.009285247802734375, 0.009324895858764648, 0.00926534366607666, 0.009318816184997558, 0.009306143760681153, 0.00933852767944336, 0.009578847885131836, 0.009422080039978027, 0.009304896354675293, 0.009281824111938477, 0.009369248390197754, 0.009293824195861817, 0.009319711685180664, 0.009269984245300292, 0.009285632133483887, 0.009611328125, 0.009494463920593261, 0.0093306884765625, 0.009375583648681641, 0.009339039802551269, 0.00928767967224121, 0.009339103698730469, 0.009454367637634277, 0.00925817584991455, 0.00930726432800293, 0.009274016380310059, 0.009363136291503905, 0.009355615615844727, 0.009283103942871095, 0.009296383857727051, 0.009316320419311524, 0.009545568466186524, 0.009543935775756835, 0.010135104179382324, 0.009476448059082031, 0.010182751655578613, 0.009299872398376464, 0.00939136028289795, 0.009285408020019531, 0.009450464248657227, 0.010119423866271973, 0.009173184394836427, 0.009430848121643066, 0.009290047645568848, 0.00930735969543457, 0.00931062412261963, 0.0093635196685791, 0.009310208320617675, 0.009398271560668945, 0.009271295547485351, 0.00937382411956787, 0.009297792434692382, 0.009215840339660645, 0.009279871940612793, 0.009381664276123048, 0.009347071647644043, 0.009403903961181641, 0.009288127899169923, 0.00922214412689209, 0.00928767967224121, 0.009275168418884277, 0.009224512100219727, 0.009352704048156739, 0.009284064292907715, 0.009377792358398437, 0.00932863998413086, 0.009361408233642577, 0.009314240455627442, 0.009421088218688966, 0.009314080238342286, 0.009451519966125489, 0.009287872314453126, 0.009256768226623534, 0.009238752365112304, 0.009267104148864747, 0.00965824031829834, 0.009270400047302245, 0.009736703872680665, 0.009259391784667968, 0.009185279846191406, 0.009274656295776368, 0.009287520408630371, 0.009208703994750976, 0.009278464317321777, 0.009452383995056152, 0.009211711883544922, 0.00941500759124756, 0.009242624282836913, 0.009279423713684083, 0.009306464195251465, 0.009230048179626464, 0.009218048095703125, 0.009267200469970703, 0.009207424163818359, 0.009251199722290038, 0.009260160446166992, 0.009249152183532715, 0.009248479843139649, 0.009273119926452637, 0.009309184074401856, 0.00923033618927002, 0.009227583885192871, 0.009291999816894532, 0.009308735847473144, 0.00910540771484375, 0.009277440071105958, 0.009303839683532714, 0.009309503555297851, 0.009239456176757813, 0.009203295707702636, 0.009234880447387695, 0.009244768142700196, 0.009210975646972656, 0.009274144172668457, 0.009235679626464844, 0.00916915225982666, 0.009206303596496582, 0.009383968353271484, 0.009508831977844238, 0.009307840347290038, 0.009314144134521484, 0.009212160110473632, 0.009339327812194825, 0.009394047737121582, 0.00922374439239502, 0.009267552375793457, 0.009302271842956542, 0.009228032112121582, 0.009305088043212891, 0.00923750400543213, 0.009281824111938477, 0.009211615562438965, 0.009268544197082519, 0.009233023643493652, 0.009265215873718261, 0.009225600242614746, 0.009302304267883301, 0.009249119758605958, 0.00927948760986328, 0.009299615859985351, 0.009204352378845215, 0.009264384269714356, 0.009226847648620605, 0.009230208396911622, 0.009228287696838379, 0.009211168289184571, 0.009224672317504883, 0.009439776420593262, 0.009491904258728028, 0.009291999816894532, 0.00929798412322998, 0.009214240074157715, 0.009209600448608399, 0.009218015670776368, 0.009261216163635254, 0.009230175971984863, 0.009261055946350098, 0.009211935997009278, 0.009202783584594726, 0.009260992050170898, 0.009212863922119141, 0.009252448081970215, 0.009194016456604004, 0.009207679748535156, 0.009240511894226075, 0.009217663764953614, 0.009230655670166016, 0.00898204803466797, 0.00936188793182373, 0.009355327606201173, 0.009562047958374023, 0.009285728454589843, 0.009355263710021973, 0.009256863594055175, 0.009267200469970703, 0.009303680419921875, 0.009226271629333496, 0.009550368309020996, 0.009272640228271484, 0.009310144424438477, 0.009318943977355956, 0.009279520034790038, 0.009268223762512207, 0.009288703918457031, 0.009285056114196777, 0.009388383865356445, 0.00924079990386963, 0.009244671821594238, 0.009370688438415528, 0.009347552299499512, 0.00933683204650879, 0.009302207946777344, 0.0092511043548584, 0.009271455764770507, 0.00923737621307373, 0.009248064041137696, 0.009347968101501465, 0.009206560134887696, 0.009258591651916503, 0.009243040084838868, 0.009221343994140626, 0.00930076789855957, 0.00923356819152832, 0.009190560340881348, 0.009210944175720215, 0.009256832122802735, 0.00921670436859131, 0.009274623870849609, 0.009196352005004883, 0.009223648071289062, 0.009234175682067872, 0.009193440437316895, 0.009231167793273926, 0.009285632133483887, 0.009223872184753418, 0.009269696235656738, 0.00923635196685791, 0.009233407974243164, 0.009345215797424316, 0.009255743980407715, 0.009161727905273438, 0.009211999893188477, 0.009285599708557128, 0.009230976104736327, 0.009527968406677247, 0.009436832427978516, 0.009246720314025878, 0.009275584220886231, 0.00927519989013672, 0.009316351890563965, 0.008990336418151855, 0.00931388759613037, 0.009284480094909668, 0.009310208320617675, 0.009287520408630371, 0.009326016426086426, 0.009269984245300292, 0.009320351600646972, 0.009328960418701172, 0.009236255645751952, 0.009233983993530273, 0.00934342384338379, 0.009218048095703125, 0.009275391578674316, 0.009269248008728028, 0.00929753589630127, 0.009259391784667968, 0.009266464233398438, 0.0093088960647583, 0.00930735969543457, 0.009229248046875, 0.009288607597351074, 0.009585599899291992, 0.009276896476745606, 0.009320992469787598, 0.009342464447021484, 0.009284095764160156, 0.009261055946350098, 0.009336095809936523, 0.009263232231140136, 0.009298879623413086, 0.009286815643310548, 0.009265664100646973, 0.00931388759613037, 0.009264863967895508, 0.009244671821594238, 0.009248479843139649, 0.009271679878234863, 0.00942563247680664, 0.00931827163696289, 0.009319744110107421, 0.009265055656433105, 0.009269951820373535, 0.00934102439880371, 0.009291775703430176, 0.009302016258239745, 0.009224191665649414, 0.009610560417175293, 0.009357312202453612, 0.00929043197631836, 0.009299967765808105, 0.009461440086364747, 0.009367487907409667, 0.009306495666503906, 0.009332736015319825, 0.009302047729492188, 0.009275263786315917, 0.00942908763885498, 0.00926534366607666, 0.009265215873718261, 0.009313823699951172, 0.0102074556350708, 0.009504768371582031, 0.009276800155639649, 0.0096428804397583, 0.010266624450683593, 0.010410176277160645, 0.009338687896728516, 0.009452896118164062, 0.009317312240600586, 0.009315775871276856, 0.009291999816894532, 0.009273407936096191, 0.009287839889526367, 0.009293824195861817, 0.009262432098388671, 0.009273920059204101, 0.009293408393859863, 0.009293343544006348, 0.009330880165100098, 0.009287903785705567, 0.00926966381072998, 0.00930947208404541, 0.009453311920166015, 0.009278047561645507, 0.009201472282409667, 0.009331263542175294, 0.009273344039916993, 0.009267200469970703, 0.009284735679626465, 0.009252896308898926, 0.009236448287963868, 0.009229184150695801, 0.009250816345214843, 0.009284832000732423, 0.009222944259643555, 0.009518943786621093, 0.009407936096191406, 0.009324607849121093, 0.00930457592010498, 0.009425056457519532, 0.009283583641052246, 0.009467904090881347, 0.009242752075195312, 0.009289631843566895, 0.00928547191619873, 0.009242752075195312, 0.009254912376403808, 0.009334336280822754, 0.009660832405090332, 0.009266464233398438, 0.009409279823303222, 0.00922163200378418, 0.009245183944702149, 0.009266912460327148, 0.00924505615234375, 0.009300928115844726, 0.00925385570526123, 0.009256735801696777, 0.00934502410888672, 0.009263039588928223, 0.009220704078674317, 0.009312288284301758, 0.009251935958862305, 0.009263680458068848, 0.009187328338623046, 0.00903433609008789, 0.009310144424438477, 0.009361184120178223, 0.009350879669189453, 0.009290207862854004, 0.009477439880371094, 0.009307200431823731, 0.009276479721069335, 0.00930406379699707, 0.009276000022888184, 0.009291328430175782, 0.009377535820007324, 0.009314944267272949, 0.009339136123657226, 0.009301983833312988, 0.009321632385253907, 0.00929043197631836, 0.009332736015319825, 0.009293600082397461, 0.009255328178405763, 0.009305919647216797, 0.009291232109069824, 0.009271200180053712, 0.009571167945861816, 0.009295647621154784, 0.00933471965789795, 0.009310272216796875, 0.009709792137145996, 0.009309696197509766, 0.009388319969177246, 0.0094551362991333, 0.00937007999420166, 0.009434144020080566, 0.009367903709411622, 0.009301664352416993, 0.009374176025390625, 0.009345631599426269, 0.009271200180053712, 0.00930406379699707, 0.009631744384765625, 0.00935929584503174, 0.009351327896118164, 0.00927507209777832, 0.00927996826171875, 0.009363200187683106, 0.009275135993957519, 0.009322943687438965, 0.009436991691589356, 0.009347071647644043, 0.009305855751037598, 0.009371904373168946, 0.009276415824890137, 0.009339903831481934, 0.009406463623046875, 0.009357248306274414, 0.009307552337646484, 0.009370400428771972, 0.009398143768310547, 0.009359423637390136, 0.009347007751464844, 0.009267200469970703, 0.00931827163696289, 0.009377087593078613, 0.009158368110656739, 0.009416031837463378, 0.009450464248657227, 0.009379584312438965, 0.009367456436157226, 0.009423487663269043, 0.009392895698547364, 0.009302975654602051, 0.00932044792175293, 0.00932863998413086, 0.00974403190612793, 0.009664608001708984, 0.009421055793762206, 0.009391231536865234, 0.009415552139282227, 0.00939840030670166, 0.009391072273254395, 0.009423935890197754, 0.009293984413146973, 0.009279040336608887, 0.009312383651733399, 0.009363327980041504, 0.009557184219360351, 0.00951801586151123, 0.009336511611938476, 0.009390399932861329, 0.009302016258239745, 0.009353311538696289, 0.009319999694824218, 0.009361760139465332, 0.009403871536254884, 0.00933465576171875, 0.009347935676574708, 0.00935910415649414, 0.009283647537231446, 0.009309951782226562, 0.009257216453552247, 0.009302176475524903, 0.009273183822631837, 0.009367584228515625, 0.00937775993347168, 0.00936963176727295, 0.009315391540527344, 0.009363776206970214, 0.009361536026000976, 0.009418304443359376, 0.009331199645996094, 0.009363936424255372, 0.009273280143737793, 0.009315936088562012, 0.009310912132263184, 0.009324095726013183, 0.009287039756774902, 0.009337632179260254, 0.009395584106445313, 0.00935321617126465, 0.009310175895690918, 0.009345279693603516, 0.009323136329650879, 0.009289119720458984, 0.009332608222961426, 0.00931062412261963, 0.009304160118103028, 0.009390015602111817, 0.01005571174621582, 0.00972812843322754, 0.00993017578125, 0.00998243236541748, 0.00983580780029297, 0.009865471839904785, 0.009544159889221191, 0.00945257568359375, 0.009515392303466796, 0.009323103904724121, 0.009315808296203614, 0.009628191947937011, 0.009510687828063965, 0.00943286418914795, 0.009342752456665038, 0.009320927619934081, 0.009361056327819825, 0.009409055709838868, 0.009338879585266113, 0.009335840225219726, 0.009258272171020508, 0.009227423667907715, 0.009277440071105958, 0.009250559806823731, 0.009253664016723633, 0.009301664352416993, 0.009342623710632323, 0.009272000312805177, 0.009388031959533692, 0.009406496047973632, 0.00943286418914795, 0.009498815536499023, 0.009447456359863281, 0.009408479690551757, 0.009414239883422852, 0.00937820816040039, 0.009417823791503906, 0.009405344009399415, 0.009406047821044922, 0.009339296340942382, 0.009326687812805176, 0.00931011199951172, 0.00932863998413086, 0.009326592445373535, 0.009296159744262695, 0.009278176307678222, 0.009294848442077636, 0.009273344039916993, 0.009346240043640137, 0.00934342384338379, 0.009381312370300293, 0.009421759605407715, 0.009392127990722657, 0.00944870376586914, 0.009519488334655761, 0.009467583656311035, 0.009406559944152832, 0.009483967781066895, 0.009472224235534668, 0.009401023864746094, 0.009396032333374023, 0.00947219181060791, 0.009058303833007812, 0.009429280281066894, 0.009375455856323243, 0.009388031959533692, 0.009302016258239745, 0.009302080154418945, 0.009297599792480469, 0.009392416000366211, 0.009243680000305175, 0.009372608184814453, 0.009371871948242188, 0.009387455940246581, 0.009383359909057616, 0.009420831680297852, 0.00939743995666504, 0.009375616073608399, 0.009361151695251466, 0.009373760223388672, 0.009269023895263672, 0.009298144340515136, 0.009275391578674316, 0.009240575790405273, 0.009262656211853027, 0.009314751625061036, 0.009300000190734863, 0.009223872184753418, 0.009277728080749512, 0.009318431854248048, 0.009277503967285156, 0.00926694393157959, 0.00937929630279541, 0.00927830410003662, 0.009463647842407226, 0.01049942398071289, 0.010584095954895019, 0.009760383605957032, 0.009409536361694336, 0.009306303977966309, 0.00923423957824707, 0.009381407737731934, 0.009287775993347168, 0.009288415908813477, 0.009655967712402343, 0.009798720359802246, 0.009536352157592774, 0.009316448211669923, 0.009263168334960938, 0.009664704322814941, 0.009436927795410156, 0.009302016258239745, 0.009363455772399902, 0.009362848281860351, 0.00927945613861084, 0.009253567695617675, 0.009689023971557617, 0.009287263870239258, 0.009290111541748048, 0.009318240165710449, 0.00927359962463379, 0.009348223686218262, 0.009276224136352538, 0.009318464279174805, 0.00923027229309082, 0.009006752014160156, 0.009349472045898437, 0.009334783554077148, 0.009344160079956055, 0.009336992263793946, 0.009339103698730469, 0.009207903861999512, 0.009306655883789063, 0.009336671829223632, 0.009260767936706544, 0.009298208236694337, 0.00929587173461914, 0.009564448356628417, 0.010582752227783203, 0.010077183723449706, 0.009422623634338378, 0.009394335746765136, 0.009386048316955567, 0.009317728042602539, 0.009282208442687989, 0.009294112205505372, 0.009288736343383789, 0.009263263702392579, 0.009316608428955079, 0.00932483196258545, 0.00933683204650879, 0.009328543663024902, 0.009317567825317383, 0.00934988784790039, 0.009373855590820313, 0.009349120140075684, 0.009439231872558594, 0.009383456230163574, 0.009354880332946777, 0.009323360443115235, 0.009363327980041504, 0.009371487617492676, 0.00935580825805664, 0.009596159934997558, 0.009355487823486329, 0.009414527893066406, 0.00935977554321289, 0.009340031623840332, 0.00941759967803955, 0.009453248023986817, 0.009453344345092773, 0.00946668815612793, 0.00952905559539795, 0.009493599891662598, 0.009460639953613282, 0.009455615997314454, 0.009390048027038574, 0.009468992233276368, 0.00931270408630371, 0.009323040008544921, 0.009329728126525879, 0.009382847785949708, 0.009388031959533692, 0.009437184333801269, 0.009324128150939942, 0.009252544403076171, 0.009305088043212891, 0.009282367706298828]",tokens/s,106.9234032597796,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1065.504768,2079.260672,0.0,1684.013056,1657.439232,s,1,7.2145654296875,7.2145654296875,0.0,7.2145654296875,7.2145654296875,7.2145654296875,7.2145654296875,[7.2145654296875],,kWh,3.7684097791763326e-06,4.085566477662718e-07,1.0952786540022186e-06,5.272245080944823e-06,,MB,1341.435904,2119.10656,0.0,1713.373184,1302.298112,s,10,1.011987174987793,0.10119871749877932,0.0028366070230200846,0.10055105590820312,0.10325200500488281,0.1061389305114746,0.10844847091674804,"[0.1090258560180664, 0.09853689575195312, 0.10062416076660156, 0.10158761596679687, 0.0996248016357422, 0.10057234954833984, 0.10052976226806641, 0.09919296264648438, 0.10261046600341797, 0.09968230438232421]",tokens/s,2529.676327203336,kWh,3.3570094340517515e-06,3.7021805883598037e-07,2.1758606168046864e-06,5.903088109692418e-06,tokens/kWh,43367131.78644032,MB,1366.355968,2165.243904,0.0,1759.510528,1302.300672,s,10,9.428148559570314,0.9428148559570314,0.0034621751030130873,0.9426940612792969,0.9475249206542969,0.9475557647705078,0.9475804400634765,"[0.9435919799804687, 0.94119482421875, 0.9369241333007813, 0.9395125732421875, 0.939375, 0.9475866088867188, 0.9459053344726562, 0.94751806640625, 0.941796142578125, 0.944743896484375]",tokens/s,66.82117873084428,kWh,2.7282876182614957e-05,3.008836718986481e-06,1.5394820106395427e-05,4.5686533007996866e-05,tokens/kWh,1378962.154755158,,s,630,9.4258660144806,0.014961692086477128,0.00027714551945568324,0.014900368213653564,0.015108636951446533,0.015313551902770995,0.01607775957107544,"[0.015352224349975586, 0.015136735916137695, 0.014893376350402832, 0.014906240463256835, 0.014957375526428222, 0.014939488410949707, 0.018080415725708007, 0.016070079803466798, 0.015026399612426758, 0.01488111972808838, 0.014872575759887695, 0.014792703628540039, 0.014876576423645019, 0.014796895980834961, 0.014934111595153808, 0.015548319816589355, 0.015044608116149903, 0.014843328475952149, 0.014919391632080078, 0.014852959632873535, 0.014910719871520995, 0.014924544334411621, 0.01482096004486084, 0.014870944023132325, 0.014802495956420898, 0.015010239601135254, 0.014884832382202149, 0.014985247611999512, 0.014882816314697265, 0.014840991973876953, 0.014947168350219727, 0.01490124797821045, 0.01485756778717041, 0.014783136367797852, 0.014888287544250488, 0.014848671913146972, 0.014833120346069336, 0.014715488433837891, 0.01481926441192627, 0.01477836799621582, 0.01472332763671875, 0.01487388801574707, 0.014911840438842774, 0.01489094352722168, 0.014870719909667969, 0.014812191963195801, 0.014826463699340821, 0.01480294418334961, 0.01489305591583252, 0.0148602876663208, 0.01491532802581787, 0.014870783805847169, 0.014757887840270996, 0.014874624252319337, 0.015093759536743164, 0.014888959884643555, 0.014820799827575684, 0.014901951789855957, 0.014843008041381837, 0.014842623710632325, 0.014826656341552735, 0.01497993564605713, 0.01526153564453125, 0.015292256355285645, 0.01506390380859375, 0.014968511581420898, 0.014985631942749024, 0.014911775588989257, 0.015009311676025391, 0.014891200065612793, 0.014800895690917968, 0.014813183784484863, 0.014856224060058593, 0.014853471755981446, 0.014798784255981445, 0.014901280403137207, 0.01485654354095459, 0.014849632263183594, 0.014870559692382813, 0.014833663940429688, 0.01485689640045166, 0.014947487831115722, 0.014863200187683105, 0.014810688018798828, 0.014782912254333496, 0.015089568138122558, 0.01487235164642334, 0.014764351844787598, 0.014866687774658204, 0.014825216293334961, 0.014847264289855958, 0.014777055740356445, 0.014790656089782715, 0.014827520370483398, 0.014767871856689454, 0.01490873622894287, 0.01488582420349121, 0.014792832374572754, 0.014811008453369141, 0.014800895690917968, 0.014837759971618653, 0.015011679649353028, 0.014821536064147949, 0.01477222442626953, 0.014784511566162109, 0.014850048065185547, 0.016857088088989256, 0.01634105682373047, 0.015107456207275391, 0.01492137622833252, 0.014857119560241699, 0.014845952033996582, 0.014750816345214844, 0.01483356761932373, 0.014824447631835937, 0.015148096084594727, 0.014826399803161621, 0.014776351928710938, 0.014829216003417968, 0.014815584182739257, 0.014835488319396973, 0.01542576026916504, 0.014891008377075195, 0.014807040214538575, 0.014921728134155274, 0.014838879585266113, 0.015002623558044433, 0.01507472038269043, 0.014985407829284668, 0.015028512001037598, 0.014977024078369141, 0.01486847972869873, 0.014824607849121093, 0.014785375595092774, 0.014781503677368165, 0.014904255867004395, 0.014800895690917968, 0.014913536071777344, 0.014830911636352539, 0.014859199523925781, 0.014867584228515624, 0.014725760459899902, 0.01488486385345459, 0.014839808464050292, 0.014974080085754394, 0.014742239952087403, 0.01480521583557129, 0.014806976318359375, 0.014763104438781737, 0.014846176147460937, 0.014878560066223144, 0.014889087677001953, 0.014876992225646972, 0.014961055755615234, 0.014854144096374512, 0.014851391792297363, 0.014776512145996094, 0.014988863945007324, 0.014859199523925781, 0.01481113624572754, 0.014827232360839843, 0.014938400268554688, 0.01484716796875, 0.014839648246765137, 0.014843071937561035, 0.014860383987426758, 0.014830464363098145, 0.014771167755126952, 0.014882143974304199, 0.015006208419799804, 0.014782048225402833, 0.01477184009552002, 0.01480784034729004, 0.014864383697509765, 0.01490329647064209, 0.014971936225891113, 0.014914048194885255, 0.014885343551635743, 0.014870528221130372, 0.014782464027404785, 0.014870495796203613, 0.014882847785949707, 0.014880000114440918, 0.014946271896362304, 0.01488156795501709, 0.01484832000732422, 0.014864352226257324, 0.014851327896118163, 0.014821536064147949, 0.014996319770812989, 0.014979071617126465, 0.014938079833984375, 0.015054847717285156, 0.014940159797668457, 0.014896767616271973, 0.014928256034851074, 0.01504646396636963, 0.014929439544677734, 0.014954912185668945, 0.014946559906005859, 0.014904959678649903, 0.01490732765197754, 0.014964192390441895, 0.014947039604187012, 0.014827775955200195, 0.014828864097595216, 0.015259807586669921, 0.014938464164733887, 0.014887104034423828, 0.014942336082458495, 0.014886176109313965, 0.014838368415832519, 0.014899040222167969, 0.014897536277770997, 0.014974528312683106, 0.01498134422302246, 0.014936063766479492, 0.014921728134155274, 0.014819135665893554, 0.014782655715942382, 0.014858240127563477, 0.014858240127563477, 0.014776320457458495, 0.014796799659729003, 0.015081472396850586, 0.014751392364501953, 0.014860639572143555, 0.014948351860046387, 0.014831551551818848, 0.01485769557952881, 0.014867039680480957, 0.014816767692565918, 0.014817824363708496, 0.014816864013671875, 0.014882816314697265, 0.01487235164642334, 0.015159903526306152, 0.01487168025970459, 0.014988160133361817, 0.014877696037292481, 0.014856608390808105, 0.014793120384216308, 0.015179167747497559, 0.014947104454040527, 0.014911456108093262, 0.014870559692382813, 0.01487830352783203, 0.014786975860595703, 0.014905344009399414, 0.014855327606201172, 0.014875455856323242, 0.014874655723571778, 0.015102016448974609, 0.014970463752746582, 0.014923456192016602, 0.01491532802581787, 0.015059616088867188, 0.01483187198638916, 0.014838047981262207, 0.014824640274047852, 0.014917311668395996, 0.014936287879943848, 0.014930463790893555, 0.014870207786560058, 0.014921343803405762, 0.014938912391662597, 0.014804767608642579, 0.014850272178649902, 0.014858240127563477, 0.014910528182983399, 0.014824000358581543, 0.014983551979064941, 0.014870528221130372, 0.014980928421020508, 0.014950592041015625, 0.01493404769897461, 0.014893024444580079, 0.014879776000976562, 0.014916383743286132, 0.014907584190368653, 0.014890751838684081, 0.014870112419128417, 0.014862079620361329, 0.014877599716186523, 0.014794879913330079, 0.014833151817321777, 0.01488707160949707, 0.014864607810974121, 0.014933568000793457, 0.014804703712463379, 0.014908127784729004, 0.014854144096374512, 0.01480294418334961, 0.014802816390991212, 0.01485632038116455, 0.01509990406036377, 0.01501974391937256, 0.014936320304870606, 0.014896256446838379, 0.01496566390991211, 0.014920703887939453, 0.015007935523986816, 0.014899200439453125, 0.01525228786468506, 0.014927359580993652, 0.014850560188293458, 0.014849696159362793, 0.014858271598815918, 0.014925215721130371, 0.014807135581970214, 0.014951231956481934, 0.014907199859619141, 0.014872447967529297, 0.014918975830078125, 0.01488588809967041, 0.015405055999755859, 0.015144512176513672, 0.015151552200317383, 0.015052800178527831, 0.015038463592529297, 0.014933888435363769, 0.014997632026672363, 0.014888959884643555, 0.014970879554748535, 0.014956031799316406, 0.014882687568664552, 0.014866527557373046, 0.01484438419342041, 0.014835647583007813, 0.014760064125061036, 0.014718976020812988, 0.014817279815673828, 0.014804384231567384, 0.014852704048156739, 0.014945280075073243, 0.014943231582641601, 0.014954496383666992, 0.015050815582275391, 0.014944031715393066, 0.01497219181060791, 0.015065024375915528, 0.016918560028076172, 0.014969504356384278, 0.01546675205230713, 0.015015520095825196, 0.014995871543884277, 0.014948351860046387, 0.014942208290100097, 0.015067135810852051, 0.016990207672119142, 0.015140447616577148, 0.01521059226989746, 0.015153216361999513, 0.015179391860961913, 0.015088255882263184, 0.015138815879821778, 0.015161343574523926, 0.015333375930786132, 0.015075551986694335, 0.014855968475341796, 0.014949440002441406, 0.014836544036865235, 0.01514857578277588, 0.01486076831817627, 0.014921792030334472, 0.014880831718444825, 0.01476534366607666, 0.01488969612121582, 0.014831071853637695, 0.014780960083007812, 0.014808927536010742, 0.015091872215270996, 0.014948351860046387, 0.014875904083251953, 0.014840576171875, 0.014882816314697265, 0.014767680168151855, 0.014830016136169433, 0.015335328102111816, 0.015159392356872558, 0.015058943748474121, 0.015028351783752442, 0.014864255905151367, 0.014933856010437011, 0.014901408195495605, 0.014833312034606933, 0.014932319641113282, 0.014886272430419923, 0.014869119644165039, 0.01480303955078125, 0.014863871574401855, 0.014981535911560059, 0.01490732765197754, 0.014923839569091797, 0.014970848083496094, 0.014874239921569825, 0.014995871543884277, 0.014835712432861328, 0.014907391548156738, 0.014785759925842286, 0.014920479774475097, 0.01488652801513672, 0.014856063842773437, 0.014844415664672851, 0.014834943771362305, 0.014850591659545899, 0.014803168296813964, 0.014926848411560058, 0.014971296310424804, 0.014934623718261719, 0.015005536079406738, 0.014909600257873535, 0.014929920196533204, 0.01486847972869873, 0.014940159797668457, 0.014913536071777344, 0.014919072151184083, 0.014902144432067872, 0.014894816398620605, 0.014878080368041993, 0.01497548770904541, 0.014788736343383789, 0.014954719543457031, 0.01526352024078369, 0.015006912231445313, 0.014920512199401855, 0.01488691234588623, 0.016080896377563478, 0.01765171241760254, 0.015241215705871582, 0.015108096122741698, 0.01551360034942627, 0.015087103843688965, 0.01500547218322754, 0.01490403175354004, 0.014951807975769043, 0.014908032417297363, 0.014854144096374512, 0.015176735877990722, 0.014900192260742188, 0.014861568450927735, 0.015791744232177735, 0.015386112213134765, 0.015179648399353027, 0.01508176040649414, 0.015000255584716796, 0.015105088233947754, 0.014895296096801758, 0.01491811180114746, 0.014936351776123047, 0.01489305591583252, 0.014951680183410644, 0.014994175910949707, 0.01503007984161377, 0.014899392127990723, 0.015005696296691894, 0.015040127754211426, 0.014965120315551758, 0.01539891242980957, 0.014996800422668457, 0.01485689640045166, 0.014803071975708008, 0.01495580768585205, 0.015084128379821777, 0.014853792190551758, 0.015077728271484375, 0.014851712226867675, 0.014876095771789551, 0.014873536109924316, 0.014827520370483398, 0.014927647590637206, 0.015046879768371582, 0.015005599975585938, 0.014933695793151855, 0.015080127716064453, 0.015042271614074707, 0.01508556842803955, 0.014956543922424317, 0.01494758415222168, 0.014969120025634766, 0.014862815856933593, 0.01489305591583252, 0.014868000030517577, 0.015012543678283691, 0.014847935676574707, 0.014888352394104003, 0.014844351768493653, 0.014947936058044434, 0.014798848152160645, 0.014813599586486816, 0.014936063766479492, 0.015395936012268066, 0.016021888732910158, 0.015702272415161134, 0.015434016227722168, 0.01524665641784668, 0.01502182388305664, 0.015195072174072266, 0.015330975532531738, 0.014860639572143555, 0.014858240127563477, 0.014958592414855957, 0.014942144393920899, 0.015081536293029786, 0.015691776275634766, 0.015216608047485352, 0.014897151947021485, 0.014847999572753906, 0.014938112258911132, 0.01508556842803955, 0.01490073585510254, 0.014946816444396972, 0.01484124755859375, 0.01487286376953125, 0.014934592247009278, 0.014839839935302735, 0.014911199569702148, 0.01488486385345459, 0.014923040390014648, 0.015031007766723633, 0.014921024322509765, 0.014877280235290528, 0.014862144470214844, 0.014846240043640137, 0.014827360153198242, 0.01483894443511963, 0.014805824279785156, 0.014811327934265137, 0.014894335746765137, 0.015829631805419922, 0.014844032287597656, 0.014790656089782715, 0.014870528221130372, 0.014867839813232421, 0.014836352348327636, 0.015054847717285156, 0.015675007820129396, 0.014940544128417969, 0.014841856002807617, 0.014853152275085449, 0.014864831924438477, 0.014909312248229981, 0.01563100814819336, 0.015199232101440429, 0.014928288459777832, 0.014915679931640625, 0.014811488151550293, 0.014866592407226563, 0.014872575759887695, 0.01490054416656494, 0.014869183540344237, 0.014824895858764649, 0.014889535903930664, 0.014954496383666992, 0.014773695945739747, 0.014828096389770508, 0.014816800117492675, 0.014848480224609375, 0.01490944004058838, 0.014915743827819824, 0.014909279823303223, 0.01483334445953369, 0.014973247528076172, 0.014796799659729003, 0.014882847785949707, 0.01492147159576416, 0.014868703842163086, 0.015491711616516112, 0.015365344047546386, 0.015029024124145508, 0.014956128120422364, 0.014848287582397462, 0.015022208213806152, 0.014918720245361328, 0.01483801555633545, 0.014805695533752442, 0.014942527770996094, 0.01492140769958496, 0.01491763210296631, 0.015009792327880859, 0.014833024024963379, 0.015016736030578614, 0.015006943702697754, 0.015351903915405274, 0.014982975959777833, 0.015128479957580567, 0.014836000442504884, 0.014857983589172364, 0.014905376434326172, 0.014995807647705079, 0.01513308811187744, 0.01489510440826416, 0.015122688293457032, 0.014925503730773925, 0.014924863815307617, 0.014865407943725586, 0.014775615692138672, 0.015090368270874023, 0.01497929573059082, 0.015092991828918456, 0.015061535835266114, 0.01496678352355957, 0.015242815971374512, 0.014950207710266113, 0.014920415878295899, 0.015027135848999023, 0.015014880180358887, 0.015017984390258789, 0.014980607986450196, 0.014929439544677734, 0.014980064392089844, 0.014952192306518555, 0.015085184097290039, 0.01509440040588379, 0.014888416290283204, 0.01503286361694336, 0.014942208290100097, 0.014825695991516114, 0.015061056137084961, 0.015355615615844727, 0.014995231628417968, 0.015113504409790039, 0.014936863899230957, 0.015055007934570312, 0.01489254379272461, 0.014903807640075683, 0.014954015731811524, 0.014839967727661134, 0.014878080368041993, 0.014822336196899414]",tokens/s,66.83735998709885,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.638144,13880.918016,0.0,13478.395904,13476.849152,s,1,7.36861669921875,7.36861669921875,0.0,7.36861669921875,7.36861669921875,7.36861669921875,7.36861669921875,[7.36861669921875],,kWh,8.536695474989148e-06,9.340445897731681e-07,4.368892384004619e-06,1.3839632448766935e-05,,MB,1220.558848,14115.79904,0.0,13702.791168,13671.637504,s,10,12.48476940917969,1.2484769409179688,0.005002776956231827,1.2489312744140624,1.2538059692382812,1.2551531677246095,1.256230926513672,"[1.239541015625, 1.240701904296875, 1.2499635009765624, 1.2467474365234374, 1.247649658203125, 1.253506591796875, 1.2478990478515626, 1.2511883544921876, 1.251071533203125, 1.2565003662109375]",tokens/s,205.04984241981325,kWh,3.64970023104172e-05,4.024844575132593e-06,2.426346385519945e-05,6.478531074074925e-05,tokens/kWh,3951513.037028297,MB,1268.846592,14115.79904,0.0,13702.791168,13671.640064,s,10,37.65105639648437,3.765105639648438,0.0033735412183279532,3.7657999267578126,3.76849091796875,3.769096044921875,3.769580146484375,"[3.758876953125, 3.7631982421875, 3.759820556640625, 3.768198486328125, 3.766015625, 3.769701171875, 3.7651357421875, 3.7661689453125, 3.765584228515625, 3.7683564453125]",tokens/s,16.732598240160545,kWh,0.00011008657440666525,1.2143687170692753e-05,7.322514191340085e-05,0.00019545540349075885,tokens/kWh,322324.1664074979,,s,630,37.64839903259281,0.05975936354379805,0.00029764083370002124,0.05974302291870117,0.0600270320892334,0.060157479858398435,0.0612939622116089,"[0.06089161682128906, 0.05930793762207031, 0.05935523223876953, 0.059290878295898436, 0.059214431762695315, 0.059399520874023434, 0.059435039520263674, 0.059367198944091794, 0.059464702606201174, 0.0594530258178711, 0.05948796844482422, 0.05944790267944336, 0.05942195129394531, 0.0594870719909668, 0.05949158477783203, 0.059723678588867186, 0.059722591400146484, 0.05967862319946289, 0.059509918212890624, 0.05941686248779297, 0.05930604934692383, 0.05940035247802734, 0.059445823669433594, 0.05946895980834961, 0.05941718292236328, 0.059560062408447266, 0.0598419189453125, 0.05967526245117188, 0.059600894927978515, 0.05967359924316406, 0.05965897750854492, 0.05970937728881836, 0.05966473770141602, 0.05987116622924805, 0.059931808471679685, 0.059746814727783204, 0.059656608581542966, 0.05969919967651367, 0.05967052841186524, 0.059660350799560544, 0.05960480117797851, 0.05974848175048828, 0.05963792037963867, 0.05962956619262695, 0.05956796646118164, 0.05966966247558594, 0.059716415405273435, 0.05967465591430664, 0.05975449752807617, 0.06007603073120117, 0.05986918258666992, 0.06025830459594726, 0.059834369659423826, 0.05988524627685547, 0.05977225494384766, 0.05965628814697266, 0.059679615020751954, 0.05979868698120117, 0.05983113479614258, 0.05986832046508789, 0.059999073028564456, 0.059930622100830076, 0.059931873321533206, 0.06136832046508789, 0.05950239944458008, 0.059275455474853515, 0.05922582244873047, 0.059203872680664064, 0.05940224075317383, 0.05944220733642578, 0.05941347122192383, 0.05942620849609375, 0.05941862487792969, 0.05940838241577148, 0.059464126586914065, 0.05950479888916015, 0.059568126678466796, 0.05967452621459961, 0.059736160278320315, 0.060383262634277346, 0.060066814422607424, 0.05978006362915039, 0.05946588897705078, 0.05948400115966797, 0.05949184036254883, 0.059474079132080075, 0.0596157112121582, 0.05951884841918945, 0.05953945541381836, 0.0596234245300293, 0.059533344268798825, 0.059727840423583985, 0.05967801666259766, 0.05976953506469727, 0.05973209762573242, 0.05986291122436523, 0.059881473541259764, 0.05989990234375, 0.059883167266845706, 0.05979171371459961, 0.05972979354858399, 0.05970739364624023, 0.05973811340332031, 0.05967884826660156, 0.05986729431152344, 0.059670368194580076, 0.05969295883178711, 0.05958399963378906, 0.05968751907348633, 0.05978508758544922, 0.059805824279785154, 0.05991116714477539, 0.05989468765258789, 0.06002841567993164, 0.06006950378417969, 0.05986566543579101, 0.05989744186401367, 0.05987583923339844, 0.05983663940429688, 0.05988771057128906, 0.059880897521972655, 0.060228160858154293, 0.05984889602661133, 0.05983417510986328, 0.05983983993530274, 0.0598372802734375, 0.061480960845947265, 0.05956764984130859, 0.05937404632568359, 0.05927526473999024, 0.059293281555175784, 0.0592982063293457, 0.05939814376831055, 0.05934422302246094, 0.059335105895996096, 0.05941059112548828, 0.05933855819702148, 0.05938995361328125, 0.0593853759765625, 0.0595810546875, 0.05953955078125, 0.059585823059082034, 0.05996332931518555, 0.05978806304931641, 0.05966604614257812, 0.05961561584472656, 0.05954576110839844, 0.05957411193847656, 0.05960704040527344, 0.05955583953857422, 0.0594813117980957, 0.05943084716796875, 0.0594051513671875, 0.05949817657470703, 0.05963167953491211, 0.059660350799560544, 0.05960108947753906, 0.05979676818847656, 0.059798240661621094, 0.05983027267456055, 0.05979040145874023, 0.0597589111328125, 0.059635711669921876, 0.059609729766845705, 0.059610622406005856, 0.05968537521362305, 0.05980979156494141, 0.05976678466796875, 0.059690654754638674, 0.05968918228149414, 0.05973209762573242, 0.05975820922851562, 0.059773086547851566, 0.059813087463378906, 0.06007295989990234, 0.059889278411865234, 0.05972723388671875, 0.05989023971557617, 0.059791614532470706, 0.05974444961547851, 0.05969680023193359, 0.05975177764892578, 0.05982310485839844, 0.05982822418212891, 0.05979750442504883, 0.05980364990234375, 0.059815937042236325, 0.059944286346435546, 0.05983308792114258, 0.06141164779663086, 0.05964054489135742, 0.05938988876342773, 0.05938175964355469, 0.05940633773803711, 0.05935337448120117, 0.059305694580078124, 0.05935849761962891, 0.059329086303710935, 0.05944950485229492, 0.05933670425415039, 0.059557662963867185, 0.05958639907836914, 0.05964838409423828, 0.05971686553955078, 0.059783935546875, 0.06015955352783203, 0.05985686492919922, 0.05959507369995117, 0.05956774520874023, 0.05965673446655274, 0.05974617767333985, 0.05958787155151367, 0.05954032135009766, 0.05969510269165039, 0.05976678466796875, 0.059731201171875, 0.059582847595214844, 0.0595807991027832, 0.059721920013427736, 0.05979228973388672, 0.05984316635131836, 0.05998828887939453, 0.06005744171142578, 0.059816097259521486, 0.05980364990234375, 0.05978054428100586, 0.05979808044433594, 0.05982396697998047, 0.059680927276611326, 0.05973011016845703, 0.060284191131591794, 0.05973238372802735, 0.059805118560791015, 0.059697662353515625, 0.06007727813720703, 0.05992752075195312, 0.0599285774230957, 0.06003907012939453, 0.06035260772705078, 0.06025539016723633, 0.06024892807006836, 0.060335262298583985, 0.06025814437866211, 0.06001545715332031, 0.05994281768798828, 0.059848960876464845, 0.05992556762695313, 0.05994387054443359, 0.0599818229675293, 0.05989295959472656, 0.059992862701416017, 0.05984972763061523, 0.061110145568847654, 0.05972351837158203, 0.05932521438598633, 0.05935305786132813, 0.05937772750854492, 0.059496288299560544, 0.05947574234008789, 0.059478496551513674, 0.05944934463500977, 0.05957632064819336, 0.059584510803222655, 0.05970700836181641, 0.059560321807861326, 0.05971148681640625, 0.059668479919433595, 0.05981798553466797, 0.05982003021240234, 0.05975558471679687, 0.05971551895141602, 0.05959987258911133, 0.05966553497314453, 0.059663230895996094, 0.059563041687011715, 0.059575263977050784, 0.059494400024414064, 0.05955379104614258, 0.05961641693115234, 0.059646785736083986, 0.05964297485351563, 0.05978927993774414, 0.05985174560546875, 0.059807743072509766, 0.059998207092285157, 0.059990016937255856, 0.05982112121582031, 0.05981475067138672, 0.05987747192382813, 0.05984796905517578, 0.0597347526550293, 0.05973331069946289, 0.05962105560302734, 0.059716575622558596, 0.05959683227539062, 0.05974790573120117, 0.05984294509887695, 0.05997987365722656, 0.059918270111083985, 0.05999942398071289, 0.0599192008972168, 0.05994601440429687, 0.05993983840942383, 0.06000841522216797, 0.060191967010498046, 0.060076831817626956, 0.05999411010742187, 0.05993267059326172, 0.059868896484375, 0.05994035339355469, 0.05982287979125977, 0.05985279846191406, 0.05983955383300781, 0.05995206451416016, 0.06002998352050781, 0.061359710693359375, 0.05965865707397461, 0.059394046783447264, 0.059434398651123044, 0.059428607940673825, 0.05955670547485352, 0.059542625427246094, 0.059447776794433596, 0.05943494415283203, 0.05959635162353515, 0.059362239837646484, 0.05948416137695312, 0.05962303924560547, 0.05961705780029297, 0.05968137741088867, 0.0598548469543457, 0.05989379119873047, 0.05987324905395508, 0.0597724494934082, 0.059711360931396486, 0.0596319694519043, 0.05964620971679688, 0.059682334899902344, 0.059685344696044924, 0.05967462539672851, 0.05978726577758789, 0.05972377777099609, 0.059651840209960935, 0.05957247924804687, 0.05959884643554687, 0.05977907180786133, 0.05987100982666015, 0.059842784881591796, 0.0599400634765625, 0.05989251327514648, 0.05982614517211914, 0.059731998443603516, 0.05975363159179688, 0.05975331115722656, 0.05992428970336914, 0.05993695831298828, 0.059923839569091794, 0.059961982727050785, 0.05989785766601562, 0.05986105728149414, 0.05992607879638672, 0.059969921112060544, 0.05990399932861328, 0.06005526351928711, 0.06011318588256836, 0.06014156723022461, 0.060154945373535156, 0.060130241394042966, 0.06018025588989258, 0.06005785751342774, 0.05993830490112305, 0.060064224243164065, 0.060237823486328126, 0.06009004974365234, 0.060094497680664063, 0.06009395217895508, 0.060007198333740235, 0.059998207092285157, 0.06113299179077149, 0.05961804962158203, 0.05930390548706055, 0.05937343978881836, 0.05929571151733398, 0.05953126525878906, 0.05954927825927735, 0.05951532745361328, 0.059535358428955076, 0.059598400115966794, 0.05952153778076172, 0.059590625762939456, 0.059552799224853514, 0.05961004638671875, 0.05959395217895508, 0.059593505859375, 0.0595926399230957, 0.05966444778442383, 0.05972371292114258, 0.05959481430053711, 0.059529216766357425, 0.05962137603759766, 0.05957164764404297, 0.05954172897338867, 0.05970159912109375, 0.05965379333496094, 0.05962172698974609, 0.05965619277954102, 0.05963980865478516, 0.05968076705932617, 0.05979497528076172, 0.05981436920166015, 0.05976675033569336, 0.05991632080078125, 0.05979340744018555, 0.059832481384277346, 0.059891326904296875, 0.05988288116455078, 0.059695327758789066, 0.0596998405456543, 0.059649375915527346, 0.059837089538574216, 0.05969004821777344, 0.05975894546508789, 0.059566688537597653, 0.05968310546875, 0.059823486328125, 0.059815937042236325, 0.05999241638183594, 0.05988153457641601, 0.06017123031616211, 0.060260704040527344, 0.05990876770019531, 0.06007600021362305, 0.060006401062011716, 0.059947006225585936, 0.06003302383422852, 0.059975296020507815, 0.059951072692871095, 0.060026432037353514, 0.05997040176391601, 0.060071937561035155, 0.05997158432006836, 0.06174364852905274, 0.05974211120605469, 0.059445632934570315, 0.05939718246459961, 0.05933561706542969, 0.05932428741455078, 0.05946790313720703, 0.05944319915771484, 0.059364990234375, 0.059612766265869144, 0.05941904067993164, 0.059660255432128904, 0.05956233596801758, 0.05958659362792969, 0.059672607421875, 0.05970870590209961, 0.06005632019042969, 0.05994623947143555, 0.05975244903564453, 0.059665119171142575, 0.059551742553710936, 0.05959065628051758, 0.059625473022460934, 0.05961884689331055, 0.05968329620361328, 0.059641857147216794, 0.059463680267333986, 0.05964915084838867, 0.059722625732421875, 0.05977436828613281, 0.05974883270263672, 0.05972918319702149, 0.059829086303710935, 0.059837535858154295, 0.05978179168701172, 0.05987558364868164, 0.05973376083374023, 0.05975020980834961, 0.05976518249511719, 0.05978131103515625, 0.05981158447265625, 0.0599466552734375, 0.059787681579589844, 0.05979340744018555, 0.05986681747436524, 0.05991862487792969, 0.059799457550048826, 0.05989593505859375, 0.06002678298950195, 0.06033216094970703, 0.0602191047668457, 0.06021459197998047, 0.059898815155029296, 0.059830303192138674, 0.05973603057861328, 0.05974854278564453, 0.05982099151611328, 0.059937664031982425, 0.05986729431152344, 0.05982191848754883, 0.05982003021240234, 0.059907615661621096, 0.05986288070678711, 0.06142083358764648, 0.05956003189086914, 0.05936374282836914, 0.05936873626708984, 0.05948409652709961, 0.059646976470947265, 0.0594637451171875, 0.05937350463867187, 0.0594634895324707, 0.059617568969726566, 0.05950864028930664, 0.059549503326416016, 0.059523136138916015, 0.059603073120117187, 0.05960902404785156, 0.05960300827026367, 0.05967824172973633, 0.05990553665161133, 0.05957660675048828, 0.05957292938232422, 0.059529216766357425, 0.05960819244384766, 0.05959756851196289, 0.05957030487060547, 0.05961523056030273, 0.059807743072509766, 0.05974835205078125, 0.05978217697143555, 0.05975904083251953, 0.05990806579589844, 0.059922080993652344, 0.059835296630859375, 0.05995248031616211, 0.059907936096191404, 0.05981267166137695, 0.05984188842773437, 0.05974492645263672, 0.059813663482666014, 0.059611358642578126, 0.05966438293457031, 0.05963292694091797, 0.05957436752319336, 0.05961587142944336, 0.05975859069824219, 0.059795391082763674, 0.059870655059814454, 0.059902591705322264, 0.059950241088867186, 0.059896671295166015, 0.06016543960571289, 0.05992435073852539, 0.06003558349609375, 0.05999811172485352, 0.06001913452148438, 0.059893726348876956, 0.05987942504882812, 0.05980979156494141, 0.05983846282958984, 0.05987311935424805, 0.05997910308837891, 0.05993247985839844, 0.05996239852905273, 0.06006576156616211, 0.061601856231689456, 0.05962998580932617, 0.05954470443725586, 0.059491199493408205, 0.05944630432128906, 0.05937660980224609, 0.059512832641601565, 0.05942272186279297, 0.059420894622802735, 0.0594409294128418, 0.05945328140258789, 0.05948963165283203, 0.05959123229980469, 0.05963187026977539, 0.06007807922363281, 0.05975356674194336, 0.05986601638793945, 0.05985827255249023, 0.05969919967651367, 0.05959952163696289, 0.059612415313720704, 0.05957097625732422, 0.05962732696533203, 0.059641246795654294, 0.0596234245300293, 0.05970774459838867, 0.05968716812133789, 0.05977487945556641, 0.05989401626586914, 0.059791358947753906, 0.059652095794677736, 0.059734016418457034, 0.05993983840942383, 0.05992755126953125, 0.05987936019897461, 0.05988351821899414, 0.05978521728515625, 0.059813953399658205, 0.05974393463134765, 0.05974809646606445, 0.059740734100341794, 0.05992819213867188, 0.05986956787109375, 0.05983846282958984, 0.05996953582763672, 0.06002687835693359, 0.059936767578125, 0.059842750549316405, 0.06024579238891602, 0.06024512100219727, 0.0601317138671875, 0.06008067321777344, 0.0600777587890625, 0.060033344268798826, 0.05992816162109375, 0.05990646362304688, 0.06018191909790039, 0.05991238403320313, 0.05979347229003906, 0.05977737426757813, 0.05980160140991211, 0.05995280075073242, 0.05994915390014648]",tokens/s,16.733779289116644,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 1248, in __init__ self.transformer = FalconModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 973, in __init__ self.h = nn.ModuleList([FalconDecoderLayer(config, layer_idx=i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 973, in self.h = nn.ModuleList([FalconDecoderLayer(config, layer_idx=i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 721, in __init__ self.self_attention = FALCON_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 366, in __init__ self.query_key_value = FalconLinear(self.hidden_size, qkv_out_dim, bias=config.bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 450.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 424.12 MiB is free. Process 203040 has 14.32 GiB memory in use. Of the allocated memory 14.20 GiB is allocated by PyTorch, and 6.16 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,739.258368,3450.79808,0.0,3055.550464,2937.680896,s,1,7.3439462890625,7.3439462890625,0.0,7.3439462890625,7.3439462890625,7.3439462890625,7.3439462890625,[7.3439462890625],,kWh,7.681751145829215e-06,8.40129576004978e-07,3.286391518003695e-06,1.1808272239837887e-05,,MB,1068.863488,3520.004096,0.0,3114.27072,2817.473024,s,10,2.6321105651855463,0.2632110565185547,0.002689435497496622,0.26245011901855464,0.26671708374023434,0.26688536682128905,0.2670199932861328,"[0.26238107299804686, 0.2625191650390625, 0.26112960815429687, 0.26030831909179686, 0.2659143371582031, 0.25960931396484377, 0.26705364990234376, 0.26563711547851565, 0.2666796875, 0.2608782958984375]",tokens/s,972.603519723168,kWh,7.710339589473723e-06,8.499682402763044e-07,5.08705085326314e-06,1.3647358683013168e-05,tokens/kWh,18758208.525627933,MB,1094.914048,3520.004096,0.0,3114.27072,2877.80864,s,10,11.568907958984372,1.1568907958984376,0.0022785457809861944,1.1568141479492189,1.1604347778320312,1.1604911926269532,1.1605363244628906,"[1.1604222412109375, 1.15336181640625, 1.15709375, 1.1544727783203126, 1.1566710205078126, 1.160547607421875, 1.156957275390625, 1.1546573486328124, 1.156302001953125, 1.158422119140625]",tokens/s,54.45630670012757,kWh,3.367687294385985e-05,3.7132154301009313e-06,2.2316998847736745e-05,5.970708722169752e-05,tokens/kWh,1055151.1207719047,,s,630,11.56644199371338,0.01835943173605298,0.0003209969920466255,0.018289616584777832,0.018546284675598143,0.0187432110786438,0.019744583721160894,"[0.019453887939453126, 0.018830528259277345, 0.01853696060180664, 0.018365087509155272, 0.018214624404907228, 0.01823315238952637, 0.01832512092590332, 0.01828883171081543, 0.018188735961914063, 0.018196352005004884, 0.018296735763549805, 0.018154783248901366, 0.018243871688842773, 0.01816636848449707, 0.018135232925415037, 0.018240959167480468, 0.018158111572265625, 0.018204383850097657, 0.018325632095336913, 0.018389152526855468, 0.018384735107421876, 0.01846067237854004, 0.0198656005859375, 0.01984102439880371, 0.018390335083007813, 0.018277088165283204, 0.018292800903320312, 0.018261920928955077, 0.018371904373168945, 0.01853004837036133, 0.018463680267333984, 0.01850102424621582, 0.018246240615844726, 0.01829020881652832, 0.018315807342529297, 0.01853593635559082, 0.0183055362701416, 0.018237567901611327, 0.018239295959472657, 0.01830297660827637, 0.018343936920166014, 0.01822275161743164, 0.01828803253173828, 0.018283456802368165, 0.01830873680114746, 0.018313600540161134, 0.018397184371948243, 0.01855414390563965, 0.018504512786865233, 0.018479007720947266, 0.018550783157348632, 0.018451839447021483, 0.018412160873413085, 0.018460159301757813, 0.01839699172973633, 0.018536256790161132, 0.01848566436767578, 0.018379232406616212, 0.018358272552490236, 0.018347936630249022, 0.018329376220703124, 0.018392608642578124, 0.018329727172851563, 0.019767904281616212, 0.01886630439758301, 0.01852332878112793, 0.01838368034362793, 0.018229248046875, 0.018263168334960937, 0.018137983322143555, 0.01817804718017578, 0.018249727249145507, 0.018104320526123048, 0.018143072128295898, 0.018239648818969726, 0.018147232055664063, 0.018089855194091797, 0.01811244773864746, 0.018112384796142578, 0.01812931251525879, 0.018153472900390624, 0.018151071548461913, 0.018116767883300782, 0.01916876792907715, 0.018240032196044923, 0.01825939178466797, 0.018201311111450194, 0.018190336227416993, 0.018190336227416993, 0.01817804718017578, 0.018300256729125976, 0.01826220893859863, 0.018134880065917968, 0.018178688049316407, 0.018118656158447266, 0.018307104110717773, 0.018170015335083008, 0.01837808036804199, 0.018197151184082033, 0.01817990493774414, 0.018240991592407226, 0.01823798370361328, 0.01827235221862793, 0.01822096061706543, 0.018210271835327148, 0.018172447204589843, 0.01827599906921387, 0.018237791061401366, 0.01822710418701172, 0.01826348876953125, 0.018206687927246095, 0.018285247802734376, 0.018279455184936524, 0.018396127700805665, 0.01829478454589844, 0.018339168548583983, 0.018384639739990234, 0.018406303405761718, 0.018521631240844726, 0.018417823791503907, 0.01848556709289551, 0.018378751754760742, 0.018345407485961914, 0.018546239852905273, 0.018379776000976563, 0.01834592056274414, 0.019687488555908204, 0.018814207077026367, 0.018500192642211914, 0.01828659248352051, 0.01818435287475586, 0.01825584030151367, 0.0180861759185791, 0.01811404800415039, 0.018075103759765624, 0.01810518455505371, 0.018165760040283203, 0.01836025619506836, 0.01811414337158203, 0.018104736328125, 0.018167808532714845, 0.018744895935058594, 0.019987136840820312, 0.019451263427734376, 0.01813747215270996, 0.01819647979736328, 0.018137088775634767, 0.018198528289794923, 0.01819148826599121, 0.018149728775024413, 0.018317855834960938, 0.018155519485473632, 0.018275936126708983, 0.0182706241607666, 0.01825529670715332, 0.018264223098754882, 0.018223520278930663, 0.01828976058959961, 0.018393375396728515, 0.01833184051513672, 0.018155168533325196, 0.018279296875, 0.018202272415161133, 0.01826201629638672, 0.018218399047851563, 0.018207712173461912, 0.01820044708251953, 0.0181777286529541, 0.01820198440551758, 0.018498495101928712, 0.01827840042114258, 0.018233343124389647, 0.01822105598449707, 0.01830297660827637, 0.01827020835876465, 0.018379968643188478, 0.018319328308105468, 0.018445152282714844, 0.01840447998046875, 0.018498432159423827, 0.018579456329345705, 0.018525888442993164, 0.01848521614074707, 0.01847881507873535, 0.018461376190185546, 0.01853398323059082, 0.018358911514282227, 0.018388864517211913, 0.01829052734375, 0.01924390411376953, 0.018632095336914064, 0.018447071075439452, 0.018308191299438475, 0.018262624740600586, 0.018225151062011717, 0.0180731201171875, 0.01809401512145996, 0.01814790344238281, 0.018214912414550782, 0.018124544143676757, 0.018218816757202147, 0.018151872634887694, 0.018225151062011717, 0.01819443130493164, 0.018356224060058594, 0.018220895767211913, 0.018206880569458007, 0.018116607666015624, 0.018200159072875977, 0.018231296539306642, 0.01842598342895508, 0.018198816299438477, 0.018211904525756835, 0.01816419219970703, 0.01823967933654785, 0.01822559928894043, 0.01823289680480957, 0.018163904190063477, 0.018222623825073243, 0.01816428756713867, 0.018370559692382812, 0.018294015884399415, 0.018372480392456054, 0.0183604793548584, 0.01831599998474121, 0.018300832748413084, 0.01837065505981445, 0.01823539161682129, 0.018266111373901366, 0.018211904525756835, 0.018289600372314453, 0.018324960708618165, 0.018330144882202148, 0.018253055572509766, 0.01826278305053711, 0.018278079986572264, 0.018397504806518555, 0.018386943817138672, 0.01836851119995117, 0.018331647872924805, 0.018429439544677736, 0.01839923286437988, 0.018561119079589843, 0.018417888641357422, 0.018688192367553712, 0.018374656677246092, 0.018471200942993163, 0.01852582359313965, 0.018583263397216797, 0.018430335998535156, 0.018493440628051756, 0.018388256072998047, 0.019920671463012695, 0.019418975830078126, 0.018680192947387694, 0.018445472717285156, 0.01834480094909668, 0.018225151062011717, 0.01832352066040039, 0.018296512603759765, 0.018256128311157225, 0.01824947166442871, 0.018143487930297852, 0.01812227249145508, 0.018078176498413086, 0.018151071548461913, 0.018390752792358397, 0.0181429443359375, 0.018141120910644532, 0.018113504409790038, 0.018273759841918945, 0.018268159866333008, 0.018188831329345703, 0.01816166305541992, 0.018621856689453126, 0.018188896179199218, 0.018192800521850586, 0.018207679748535155, 0.018196863174438478, 0.018162975311279295, 0.018192895889282225, 0.018260480880737305, 0.018288639068603514, 0.018251264572143554, 0.018283008575439453, 0.018249887466430664, 0.01823030471801758, 0.018250560760498045, 0.01829478454589844, 0.018269664764404298, 0.01827280044555664, 0.018268159866333008, 0.018311168670654295, 0.018414880752563475, 0.01833462333679199, 0.018227008819580077, 0.018296831130981444, 0.018272256851196288, 0.018333120346069334, 0.01839366340637207, 0.018423999786376953, 0.01839286422729492, 0.018361568450927734, 0.018349056243896485, 0.018536224365234374, 0.018741151809692384, 0.018443391799926757, 0.01846784019470215, 0.018459903717041017, 0.018608896255493165, 0.018515552520751953, 0.01834435272216797, 0.018333311080932616, 0.018378400802612306, 0.018428512573242187, 0.01960960006713867, 0.018894847869873048, 0.01864076805114746, 0.018396863937377928, 0.01821446418762207, 0.018225471496582032, 0.018145439147949218, 0.018182559967041014, 0.018228607177734377, 0.01814790344238281, 0.018217023849487306, 0.01817366409301758, 0.018188352584838866, 0.01827043151855469, 0.01821696090698242, 0.018155296325683593, 0.01822492790222168, 0.018319807052612304, 0.018386335372924806, 0.018415647506713866, 0.018256351470947264, 0.018255231857299804, 0.018138944625854494, 0.018260896682739256, 0.0183045768737793, 0.018299327850341798, 0.018298879623413086, 0.01827436828613281, 0.01820460891723633, 0.018488895416259764, 0.018288383483886717, 0.018250431060791016, 0.018181535720825197, 0.01825424003601074, 0.018218591690063478, 0.018301536560058593, 0.01820467185974121, 0.018259967803955078, 0.018282112121582032, 0.018393184661865233, 0.01848758316040039, 0.018542015075683593, 0.021207616806030272, 0.018765823364257812, 0.018479103088378905, 0.01841766357421875, 0.018520320892333984, 0.018386016845703124, 0.01827702331542969, 0.018343936920166014, 0.01840332794189453, 0.018583839416503906, 0.01858460807800293, 0.01841231918334961, 0.01863462448120117, 0.01839926338195801, 0.0184597110748291, 0.018508384704589844, 0.01838729667663574, 0.018396480560302735, 0.018436800003051756, 0.018476736068725585, 0.018506048202514648, 0.019605951309204103, 0.019136512756347656, 0.01873945617675781, 0.01846451187133789, 0.018296831130981444, 0.0182825927734375, 0.018261920928955077, 0.018325504302978517, 0.018173952102661133, 0.01813827133178711, 0.01819343948364258, 0.018146976470947266, 0.01817795181274414, 0.018106048583984374, 0.01817888069152832, 0.018148128509521484, 0.018239967346191405, 0.018333471298217774, 0.018268896102905274, 0.018224992752075196, 0.018268512725830077, 0.018243392944335936, 0.01820057678222656, 0.018336864471435548, 0.018174879074096678, 0.018181535720825197, 0.018180320739746094, 0.0182030086517334, 0.01832246398925781, 0.018213855743408204, 0.01823299217224121, 0.018257728576660158, 0.018354463577270507, 0.018555135726928712, 0.01847222328186035, 0.01841744041442871, 0.018292831420898437, 0.018260448455810548, 0.01829052734375, 0.018235200881958007, 0.018256032943725586, 0.018210752487182617, 0.0182806396484375, 0.018275871276855468, 0.01835238456726074, 0.01826883125305176, 0.018274303436279296, 0.018284543991088868, 0.018354175567626953, 0.01838489532470703, 0.018422975540161132, 0.018328096389770506, 0.018657087326049804, 0.01851430320739746, 0.018394880294799805, 0.01840995216369629, 0.018582592010498045, 0.01836934471130371, 0.018296192169189453, 0.01834566307067871, 0.018328447341918946, 0.018535999298095702, 0.019448320388793947, 0.019371519088745116, 0.018810911178588866, 0.01856787109375, 0.018382303237915038, 0.018267711639404296, 0.018181951522827148, 0.018045024871826174, 0.018043296813964844, 0.01807200050354004, 0.018036800384521483, 0.01808332824707031, 0.018125247955322266, 0.01801420783996582, 0.01801625633239746, 0.01823744010925293, 0.018078720092773438, 0.01812761688232422, 0.018141311645507814, 0.018135168075561522, 0.018102272033691406, 0.018104320526123048, 0.018193471908569337, 0.018127552032470705, 0.018102527618408203, 0.018108415603637695, 0.018481151580810547, 0.01951900863647461, 0.018546688079833985, 0.018257919311523436, 0.018321887969970703, 0.018427616119384767, 0.018204959869384765, 0.018239168167114257, 0.01843132781982422, 0.018236703872680664, 0.01811404800415039, 0.018187488555908203, 0.01828963279724121, 0.01835212707519531, 0.018501312255859374, 0.018237760543823242, 0.01828220748901367, 0.01820086479187012, 0.018354175567626953, 0.018368543624877928, 0.018208703994750976, 0.018222143173217773, 0.018323551177978514, 0.018485536575317384, 0.01850569534301758, 0.01850022315979004, 0.018366464614868162, 0.0183767032623291, 0.018483327865600585, 0.018435007095336915, 0.01855526351928711, 0.018348415374755858, 0.018546880722045897, 0.018314720153808594, 0.018475391387939452, 0.018448671340942382, 0.018421472549438475, 0.01836828804016113, 0.01953596878051758, 0.01903545570373535, 0.018678112030029295, 0.01847270393371582, 0.018279008865356446, 0.018192384719848635, 0.018163711547851562, 0.018322656631469727, 0.01822115135192871, 0.018344640731811523, 0.018388351440429686, 0.01833228874206543, 0.018481151580810547, 0.01835212707519531, 0.018696191787719727, 0.01840153694152832, 0.01828428840637207, 0.018237056732177733, 0.018213375091552735, 0.018179616928100585, 0.01824188804626465, 0.018106367111206053, 0.01816991996765137, 0.01833568000793457, 0.018694143295288086, 0.018351551055908202, 0.01835475158691406, 0.01827164840698242, 0.018219743728637695, 0.018211936950683592, 0.018237344741821288, 0.018387840270996093, 0.018210079193115233, 0.01819107246398926, 0.01822892761230469, 0.018198495864868165, 0.018346208572387696, 0.018391168594360352, 0.018366464614868162, 0.018323455810546875, 0.0182108154296875, 0.01823766326904297, 0.018157407760620116, 0.018175935745239256, 0.01824563217163086, 0.01819647979736328, 0.018183839797973632, 0.018283039093017577, 0.01824924850463867, 0.0182989444732666, 0.01838307189941406, 0.018414976119995118, 0.01850227165222168, 0.01844428825378418, 0.018307104110717773, 0.018318431854248047, 0.018342720031738282, 0.01848860740661621, 0.018481952667236328, 0.018446079254150392, 0.01841177558898926, 0.018291711807250977, 0.018338048934936523, 0.019400863647460936, 0.018782207489013672, 0.018579391479492186, 0.018427967071533203, 0.018279808044433594, 0.018155616760253908, 0.01806800079345703, 0.018061216354370118, 0.01883145523071289, 0.018276351928710938, 0.01805721664428711, 0.018070592880249023, 0.018035648345947265, 0.018136287689208986, 0.018141759872436523, 0.01809667205810547, 0.01801593589782715, 0.018129215240478516, 0.018242496490478516, 0.018379520416259766, 0.018542400360107424, 0.018299072265625, 0.018155519485473632, 0.01817215919494629, 0.018267135620117187, 0.01833839988708496, 0.01826972770690918, 0.018170560836791992, 0.018120800018310547, 0.018116640090942382, 0.018321056365966797, 0.018155839920043944, 0.01816582489013672, 0.01814313507080078, 0.018141056060791017, 0.018317312240600587, 0.018141183853149414, 0.018183231353759766, 0.018112991333007814, 0.018133216857910157, 0.021786880493164063, 0.01957683181762695, 0.018312736511230467, 0.01821129608154297, 0.01817190361022949, 0.018241535186767577, 0.018714399337768556, 0.018602207183837892, 0.018382848739624022, 0.01840460777282715, 0.018228992462158203, 0.018355199813842774, 0.018331680297851562, 0.018513343811035157, 0.01847555160522461, 0.01839030456542969, 0.01856988716125488, 0.018596960067749024, 0.018422271728515623, 0.018411455154418947, 0.018291072845458986, 0.018446495056152343, 0.018296831130981444]",tokens/s,54.46791678395302,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.35552,15021.768704,0.0,14619.246592,14483.4816,s,1,7.912373046875,7.912373046875,0.0,7.912373046875,7.912373046875,7.912373046875,7.912373046875,[7.912373046875],,kWh,8.995470108372197e-06,9.845388316002827e-07,5.082504066000748e-06,1.5062513005973228e-05,,MB,1203.048448,15143.40352,0.0,14730.395648,14577.604608,s,10,13.156159790039062,1.3156159790039061,0.005337471729525891,1.3149917602539063,1.3230593750000001,1.3232458129882814,1.3233949633789064,"[1.31108349609375, 1.30609228515625, 1.31368798828125, 1.31493603515625, 1.3150474853515626, 1.3189141845703125, 1.3104158935546875, 1.3234322509765626, 1.3230179443359376, 1.3195322265625]",tokens/s,194.58565727806496,kWh,3.837358100916693e-05,4.2320856865374e-06,2.545768703279827e-05,6.806335372850261e-05,tokens/kWh,3761201.674268894,MB,1251.729408,15158.083584,0.0,14745.075712,14577.607168,s,10,40.71429443359375,4.071429443359376,0.002023012224082289,4.071148071289063,4.074223291015625,4.07442060546875,4.07457845703125,"[4.06958251953125, 4.069635009765625, 4.07135595703125, 4.070940185546875, 4.06946923828125, 4.0686845703125, 4.073430419921875, 4.072399169921875, 4.074179443359375, 4.074617919921875]",tokens/s,15.473680896706904,kWh,0.00011900352335374536,1.3127022046016977e-05,7.903164655860273e-05,0.00021116219195836506,tokens/kWh,298348.86357128614,,s,630,40.710387397766084,0.06461966253613668,0.0002519051176331557,0.06460451126098632,0.06491002655029297,0.06499120292663575,0.06546880096435546,"[0.06547280120849609, 0.06445116424560547, 0.06410765075683594, 0.06448198699951171, 0.06415328216552735, 0.06423776245117188, 0.06427433776855469, 0.06426665496826171, 0.0643051528930664, 0.06454431915283203, 0.06434457397460938, 0.06443321228027343, 0.06432447814941407, 0.06427177429199218, 0.06438755035400391, 0.06481632232666015, 0.06460284423828125, 0.06447740936279298, 0.06432895660400391, 0.06461628723144532, 0.06431001281738281, 0.06424352264404297, 0.06426393890380859, 0.0644881591796875, 0.06451776123046875, 0.0645999984741211, 0.06454447937011719, 0.06457161712646485, 0.06433113861083985, 0.0645841293334961, 0.06484435272216797, 0.06482940673828125, 0.06467337799072266, 0.06448713684082032, 0.0645782699584961, 0.06464848327636719, 0.06454972839355469, 0.06449510192871094, 0.0645083236694336, 0.0645362548828125, 0.06472528076171875, 0.06469017791748047, 0.06466764831542969, 0.06503337860107422, 0.06448419189453125, 0.064829345703125, 0.06486640167236328, 0.06481100463867187, 0.06459919738769532, 0.06479753875732422, 0.06470041656494141, 0.06462576293945313, 0.06470317077636718, 0.06461052703857421, 0.06483468627929688, 0.0648037109375, 0.06470861053466796, 0.06492774200439454, 0.06467378997802735, 0.06483081817626953, 0.06495423889160157, 0.06493385314941406, 0.06489376068115234, 0.06544268798828125, 0.06426588439941407, 0.06404115295410157, 0.06445206451416016, 0.0643686752319336, 0.06439497375488282, 0.06420985412597656, 0.0642989730834961, 0.06430313873291016, 0.06428262329101563, 0.06428169250488282, 0.06420572662353516, 0.0642088623046875, 0.06452227020263672, 0.06477388763427734, 0.06456755065917968, 0.06469631958007813, 0.06442540740966797, 0.06427295684814453, 0.06448947143554687, 0.06453453063964844, 0.06446284484863281, 0.06468812561035156, 0.06448127746582032, 0.064505859375, 0.06466339111328125, 0.06458163452148437, 0.06445891571044922, 0.06469017791748047, 0.06480659484863281, 0.06479904174804688, 0.0647779541015625, 0.06451443481445313, 0.06449878692626954, 0.06460089874267579, 0.06465948486328126, 0.06446800231933594, 0.06449772644042968, 0.064540771484375, 0.0648240966796875, 0.06460211181640625, 0.06460415649414063, 0.06469852447509766, 0.0646756820678711, 0.06458306884765624, 0.06457609558105469, 0.0645990753173828, 0.06480995178222657, 0.06463459014892578, 0.06469865417480469, 0.06473308563232422, 0.06469337463378906, 0.06450070190429688, 0.06461427307128906, 0.06482275390625, 0.0649296646118164, 0.06469615936279297, 0.06474412536621094, 0.06467814636230469, 0.0650035171508789, 0.06478851318359374, 0.06505049896240235, 0.06488483428955077, 0.06581267547607422, 0.06436483001708984, 0.0642841567993164, 0.06442444610595703, 0.06404262542724609, 0.06438877105712891, 0.06450003051757812, 0.06444892883300782, 0.06441350555419922, 0.06424393463134766, 0.06429078674316406, 0.0643420181274414, 0.06452591705322265, 0.06450422668457031, 0.06455296325683593, 0.06454476928710938, 0.06449152374267578, 0.06452214050292969, 0.06450502777099609, 0.06452009582519531, 0.06424473571777344, 0.06456864166259765, 0.0645528335571289, 0.0645797119140625, 0.0643646697998047, 0.06442355346679687, 0.06442489624023437, 0.06467788696289062, 0.06525897979736328, 0.06475984191894531, 0.06452275085449219, 0.06448947143554687, 0.0645255355834961, 0.06452713775634765, 0.06456857299804687, 0.0645841293334961, 0.06454918670654297, 0.06472294616699219, 0.0647352294921875, 0.0646328353881836, 0.0644701156616211, 0.06444124603271484, 0.06484786987304687, 0.06492364501953125, 0.06468608093261718, 0.0647022705078125, 0.06466713714599609, 0.06505133056640625, 0.06463897705078125, 0.06479462432861328, 0.06463811492919921, 0.06499727630615235, 0.06482189178466796, 0.06485228729248046, 0.06465740966796875, 0.06462054443359375, 0.06464726257324219, 0.06476790618896484, 0.06488243103027344, 0.06492991638183594, 0.06462448120117187, 0.06483177947998046, 0.0650478744506836, 0.06555270385742187, 0.06443532562255859, 0.06401299285888672, 0.06427871704101562, 0.06460006713867188, 0.06443145751953125, 0.06432425689697266, 0.06435635375976563, 0.06426009368896485, 0.06437824249267578, 0.06455101013183594, 0.0646313247680664, 0.0643604507446289, 0.06443417358398437, 0.06493389129638671, 0.06493119812011719, 0.0645269775390625, 0.06448729705810546, 0.06419644927978516, 0.06436022186279297, 0.06436700439453125, 0.0644854736328125, 0.06424371337890625, 0.0644764175415039, 0.06457625579833984, 0.06460211181640625, 0.06466047668457031, 0.06460243225097656, 0.06438387298583985, 0.06485769653320313, 0.06482147216796875, 0.06488243103027344, 0.06456361389160156, 0.06455075073242188, 0.0644026870727539, 0.06483596801757813, 0.06447065734863282, 0.06445692443847656, 0.06441423797607422, 0.06482758331298828, 0.06463875579833984, 0.06460553741455079, 0.06438162994384766, 0.06463843536376954, 0.06469891357421875, 0.06508515167236328, 0.06490550231933594, 0.06477823638916015, 0.06458573150634765, 0.06461440277099609, 0.06482249450683594, 0.06479312133789063, 0.06445260620117188, 0.06459008026123046, 0.06468402862548828, 0.06484786987304687, 0.06481430053710938, 0.0648609619140625, 0.0646797103881836, 0.06502627563476562, 0.06474752044677734, 0.0650341796875, 0.0647660140991211, 0.06576876831054687, 0.06459590148925781, 0.06423423767089843, 0.06413337707519531, 0.06428793334960937, 0.06440402984619141, 0.0643051528930664, 0.06421491241455078, 0.06412713623046876, 0.06444847869873047, 0.06426214599609376, 0.06432358551025391, 0.06439730834960937, 0.06440345764160156, 0.06441983795166016, 0.0645950698852539, 0.06471497344970703, 0.06459203338623047, 0.06447154998779298, 0.06450176239013672, 0.0643045425415039, 0.06445116424560547, 0.06440550231933594, 0.06439321899414062, 0.0645277099609375, 0.06437340545654296, 0.06448332977294922, 0.06462892913818359, 0.0645010528564453, 0.06479510498046875, 0.0647393569946289, 0.06493170928955078, 0.06480499267578126, 0.0646484146118164, 0.06448003387451172, 0.0645038070678711, 0.06444646453857422, 0.06459801483154297, 0.06462258911132812, 0.06474476623535157, 0.06433030700683594, 0.06447731018066406, 0.06459715270996094, 0.06474816131591797, 0.06461872100830078, 0.06477823638916015, 0.06458953857421874, 0.06486863708496093, 0.06478873443603515, 0.06482099151611329, 0.06465740966796875, 0.06459391784667969, 0.06473113250732422, 0.06457344055175782, 0.06484377288818359, 0.06491270446777343, 0.06446749114990234, 0.06455728149414063, 0.06502799987792969, 0.06489910125732422, 0.06485830688476563, 0.06477811431884765, 0.06498707580566407, 0.06550959777832031, 0.06442396545410156, 0.06407161712646485, 0.06440589141845703, 0.0643031005859375, 0.06421836853027343, 0.06416255950927735, 0.06415763092041016, 0.06413113403320313, 0.06429081726074219, 0.0643663330078125, 0.06440278625488281, 0.06444124603271484, 0.06442160034179688, 0.06464691162109375, 0.06483334350585937, 0.06468412780761719, 0.06427836608886718, 0.06423836517333985, 0.06442803192138671, 0.06426009368896485, 0.064110595703125, 0.06429695892333985, 0.06406918334960937, 0.06426428985595703, 0.06459571075439453, 0.06474825286865235, 0.06470966339111328, 0.06455177307128906, 0.06448947143554687, 0.06469222259521484, 0.0648207015991211, 0.06454873657226562, 0.06444118499755859, 0.06456435394287109, 0.06468473815917969, 0.06459356689453125, 0.06457379150390626, 0.06452428436279296, 0.06461644744873046, 0.06461622619628907, 0.06485533142089844, 0.06479097747802734, 0.06481708526611328, 0.06458153533935547, 0.06492412567138672, 0.06482089233398437, 0.06479468536376953, 0.06468041229248046, 0.0645959701538086, 0.06476934051513672, 0.06473798370361328, 0.0645959701538086, 0.06457942199707031, 0.06465347290039063, 0.06483148956298829, 0.0648539810180664, 0.06502207946777344, 0.06475081634521485, 0.06494671630859375, 0.06475081634521485, 0.06492793273925782, 0.06482637023925782, 0.06549977874755859, 0.06442189025878907, 0.06416127777099609, 0.06429446411132812, 0.06417708587646484, 0.06446393585205078, 0.06445315551757813, 0.06422505950927734, 0.0641761245727539, 0.06462857818603515, 0.06448822021484375, 0.06455439758300781, 0.06446345520019531, 0.06436249542236328, 0.06462207794189453, 0.06488333129882813, 0.06464908599853515, 0.0645626220703125, 0.06450460815429687, 0.06462982177734375, 0.06450863647460937, 0.06465878295898438, 0.06449836730957031, 0.06440902709960937, 0.06437689971923828, 0.06463945770263672, 0.06462854766845703, 0.06475590515136719, 0.06448025512695313, 0.06471900939941407, 0.064735107421875, 0.06475670623779296, 0.06477356719970703, 0.06448390197753906, 0.06457344055175782, 0.06470451354980469, 0.06466969299316407, 0.06458573150634765, 0.06450176239013672, 0.06482323455810547, 0.0646072998046875, 0.06488556671142579, 0.06492588806152344, 0.06458777618408203, 0.06480691528320312, 0.06494412994384766, 0.06487859344482422, 0.06476310729980468, 0.06455785369873047, 0.064827392578125, 0.06464717102050781, 0.06494550323486328, 0.06467855834960938, 0.06469222259521484, 0.06483148956298829, 0.0649912338256836, 0.06479808044433594, 0.06470870208740234, 0.06492345428466798, 0.06503692626953125, 0.064834716796875, 0.06491792297363282, 0.06475215911865234, 0.06543494415283203, 0.0644719009399414, 0.06422454071044922, 0.0642763214111328, 0.06418521881103516, 0.06464307403564454, 0.06427648162841797, 0.06419046020507813, 0.06447513580322266, 0.06463276672363281, 0.06436172485351563, 0.06454867553710937, 0.06426019287109375, 0.06455375671386719, 0.0647066879272461, 0.06478768157958985, 0.06455375671386719, 0.0643862075805664, 0.064299072265625, 0.06450438690185546, 0.06442825317382812, 0.06457270050048829, 0.06437075042724609, 0.0645146255493164, 0.06451414489746093, 0.06476102447509766, 0.06464387512207032, 0.06449759674072265, 0.06448751831054687, 0.06498016357421875, 0.06477702331542969, 0.06467526245117187, 0.06444892883300782, 0.06468358612060547, 0.064698974609375, 0.06469971466064453, 0.06463148498535157, 0.06452633666992187, 0.06461644744873046, 0.064716796875, 0.06461849975585937, 0.06464307403564454, 0.06459187316894531, 0.06480796813964844, 0.06488162994384766, 0.06494822692871094, 0.06477619171142578, 0.064753662109375, 0.06475126647949218, 0.06484384155273437, 0.06478880310058593, 0.06486144256591797, 0.06456803131103515, 0.06469017791748047, 0.06481664276123048, 0.06480127716064453, 0.06459129333496094, 0.06470297241210937, 0.06491654205322266, 0.064932861328125, 0.06484188842773438, 0.0649393310546875, 0.06492829132080079, 0.06549298858642578, 0.0644107208251953, 0.0641995849609375, 0.06425395202636719, 0.06415155029296875, 0.0643604507446289, 0.06417203521728515, 0.06431718444824219, 0.064368896484375, 0.06430220794677734, 0.06438182067871094, 0.0646266860961914, 0.06451190185546875, 0.06474556732177734, 0.06469948577880859, 0.06488976287841797, 0.06457727813720703, 0.0645286407470703, 0.06443971252441406, 0.06459782409667969, 0.06435100555419922, 0.06457138824462891, 0.06440470123291016, 0.06451074981689453, 0.0644520034790039, 0.06466425323486329, 0.06447503662109375, 0.06461030578613282, 0.06467388916015625, 0.06490921783447266, 0.06497052764892577, 0.0648171844482422, 0.06454496002197266, 0.06476534271240235, 0.06468428802490235, 0.06476630401611329, 0.06451983642578125, 0.06461603546142577, 0.06457843017578126, 0.0645853729248047, 0.06462214660644532, 0.06467241668701172, 0.06450790405273438, 0.06499116516113282, 0.06486637115478516, 0.06498713684082032, 0.06477161407470704, 0.06467632293701171, 0.0648603515625, 0.06479830169677735, 0.06473136138916015, 0.06473939514160157, 0.06470585632324219, 0.06473792266845703, 0.06486835479736328, 0.06489292907714844, 0.06470211029052735, 0.06466124725341797, 0.0649010238647461, 0.06514313507080079, 0.06506841278076173, 0.0651090850830078, 0.0652405776977539, 0.0654590072631836, 0.06440140533447265, 0.06416998291015626, 0.06423763275146484, 0.06422710418701172, 0.06432988739013672, 0.06422061157226562, 0.0642558364868164, 0.06434825897216796, 0.06455155181884766, 0.0645071029663086, 0.06457807922363282, 0.06437299346923828, 0.0647188491821289, 0.06476812744140625, 0.0649087371826172, 0.06463260650634765, 0.06438159942626953, 0.06446080017089843, 0.06476143646240234, 0.06452291107177735, 0.06441958618164062, 0.06435430145263672, 0.06444032287597656, 0.06451814270019532, 0.06477005004882813, 0.06469961547851563, 0.06458857727050782, 0.06460006713867188, 0.06516748809814453, 0.06493376159667968, 0.06484992218017578, 0.06454704284667968, 0.06461007690429688, 0.0647741470336914, 0.06471609497070313, 0.06460486602783203, 0.06453244781494141, 0.06456060791015625, 0.06473990631103516, 0.06453862762451172, 0.06475981140136719, 0.06449971008300781, 0.06486131286621094, 0.06479875183105469, 0.06487715148925781, 0.06483993530273438, 0.06484745788574219, 0.06490972900390625, 0.06487026977539062, 0.06479475402832031, 0.06463267517089843, 0.06468780517578125, 0.06483513641357422, 0.06475360107421875, 0.06483586883544921, 0.06474156951904297, 0.06492729949951172, 0.06494099426269531, 0.06501990509033204, 0.06479776000976563, 0.06502249908447266, 0.0652064971923828]",tokens/s,15.47516592864871,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 1195, in __init__ self.model = MixtralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in __init__ [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 757, in __init__ self.block_sparse_moe = MixtralSparseMoeBlock(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in __init__ self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 672, in __init__ self.w3 = nn.Linear(self.hidden_dim, self.ffn_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 100.12 MiB is free. Process 179382 has 14.64 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 49.54 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 891, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 823, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 374, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 116, in __init__ self.v_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 123669 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 3.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1262, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1030, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1030, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 797, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 402, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 81073 has 14.73 GiB memory in use. Of the allocated memory 12.27 GiB is allocated by PyTorch, and 2.34 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.140864,3354.329088,0.0,2959.081472,2942.567424,s,1,7.48135009765625,7.48135009765625,0.0,7.48135009765625,7.48135009765625,7.48135009765625,7.48135009765625,[7.48135009765625],,kWh,1.0317070429154758e-05,1.1307956339052472e-06,3.3486137899982338e-06,1.4796479853058239e-05,,MB,1108.537344,3545.16992,0.0,3139.436544,3105.830912,s,10,2.5526253204345704,0.25526253204345706,0.0032615790050734743,0.254351676940918,0.2591282501220703,0.26135854339599607,0.26314277801513675,"[0.25863262939453124, 0.2520792694091797, 0.2542895355224609, 0.2635888366699219, 0.25307455444335936, 0.2544411163330078, 0.252842041015625, 0.254413818359375, 0.255574462890625, 0.25368905639648437]",tokens/s,1002.8890568100194,kWh,7.693318503070439e-06,8.48431306636885e-07,5.088183895105415e-06,1.3629933704812739e-05,tokens/kWh,18782189.667555477,MB,1134.329856,3587.11296,0.0,3181.379584,3162.0096,s,10,13.320837524414063,1.3320837524414064,0.010960308835285407,1.3341849365234375,1.3424161987304688,1.3445538635253906,1.346263995361328,"[1.3348258056640625, 1.329916259765625, 1.3039884033203124, 1.341941162109375, 1.3466915283203125, 1.3388785400390626, 1.3343978271484376, 1.3339720458984374, 1.3292396240234374, 1.326986328125]",tokens/s,47.29432356226501,kWh,3.850215672317936e-05,4.246395864887071e-06,2.49971735064944e-05,6.774572609456083e-05,tokens/kWh,929947.9632421881,,s,630,13.317967199325548,0.021139630475119936,0.0004475622198116647,0.021108351707458495,0.021405736923217775,0.021646073341369625,0.023140482158660904,"[0.021833984375, 0.021306880950927733, 0.021195552825927735, 0.02118454360961914, 0.021015647888183595, 0.021033056259155275, 0.020974239349365233, 0.02094095993041992, 0.021659648895263672, 0.02223865509033203, 0.021121599197387694, 0.021217248916625978, 0.021082304000854493, 0.020973407745361328, 0.02104934310913086, 0.021352127075195314, 0.021195072174072266, 0.021053440093994142, 0.02337785530090332, 0.02159212875366211, 0.02106777572631836, 0.021054975509643553, 0.0210068473815918, 0.021184511184692383, 0.02147737693786621, 0.021200895309448242, 0.021020671844482423, 0.020979711532592774, 0.020916223526000977, 0.021108863830566406, 0.02112499237060547, 0.0210882568359375, 0.02117849540710449, 0.02107187271118164, 0.021050432205200195, 0.02100511932373047, 0.02101641654968262, 0.021037216186523437, 0.02063564872741699, 0.02084864044189453, 0.02136911964416504, 0.02136444854736328, 0.021158912658691405, 0.021185535430908203, 0.021143552780151367, 0.021073919296264648, 0.02111052894592285, 0.02115814399719238, 0.02109644889831543, 0.020907424926757814, 0.02106540870666504, 0.02101340866088867, 0.021019712448120117, 0.020785184860229493, 0.020589471817016602, 0.0209215030670166, 0.022244192123413085, 0.021233375549316407, 0.021301536560058593, 0.02109971237182617, 0.021086271286010743, 0.021033504486083984, 0.021063455581665037, 0.021380319595336914, 0.020991104125976563, 0.021048704147338868, 0.020976415634155275, 0.02112892723083496, 0.02088083267211914, 0.020973600387573243, 0.020910335540771485, 0.020911455154418945, 0.020947807312011717, 0.020795263290405274, 0.020844736099243165, 0.021308639526367187, 0.021358816146850586, 0.021047903060913087, 0.020997535705566405, 0.020933216094970702, 0.020958335876464843, 0.021136159896850585, 0.021221664428710936, 0.021040800094604493, 0.021117088317871093, 0.021107839584350585, 0.02110323143005371, 0.021239423751831056, 0.021079872131347658, 0.020884288787841796, 0.0211494083404541, 0.021518815994262697, 0.022140384674072266, 0.021156192779541016, 0.02117251205444336, 0.02138412857055664, 0.021217472076416017, 0.020951648712158204, 0.021546527862548827, 0.021117408752441405, 0.02133363151550293, 0.021086591720581055, 0.021157087326049803, 0.020970079421997072, 0.020967008590698243, 0.021233247756958007, 0.021285888671875, 0.02114460754394531, 0.021117408752441405, 0.021198911666870116, 0.021263008117675782, 0.020953216552734376, 0.02087286376953125, 0.020772159576416014, 0.020996799468994142, 0.02124595260620117, 0.02109235191345215, 0.02108201599121094, 0.021047199249267578, 0.021129440307617188, 0.021151519775390624, 0.021106624603271486, 0.021313056945800782, 0.02098454475402832, 0.02085500717163086, 0.020627231597900392, 0.025465856552124022, 0.021867679595947265, 0.020703296661376953, 0.02128771209716797, 0.020574207305908202, 0.020930656433105467, 0.020615072250366212, 0.020625024795532226, 0.020816255569458007, 0.020501855850219727, 0.020576927185058595, 0.020494335174560546, 0.020558847427368163, 0.020536319732666015, 0.020864831924438478, 0.020586687088012694, 0.02050009536743164, 0.020443519592285155, 0.0204202880859375, 0.02048646354675293, 0.020508096694946288, 0.020478111267089844, 0.02050089645385742, 0.020532928466796874, 0.020633535385131838, 0.020799871444702148, 0.020707328796386718, 0.02069708824157715, 0.02052230453491211, 0.02055027198791504, 0.020664384841918945, 0.02058448028564453, 0.020551647186279297, 0.020512767791748047, 0.020531200408935548, 0.020485664367675783, 0.02046614456176758, 0.020410367965698242, 0.02046771240234375, 0.020534271240234374, 0.02051584053039551, 0.020463615417480468, 0.020531200408935548, 0.020572160720825194, 0.020596736907958983, 0.020590591430664062, 0.02063564872741699, 0.020537343978881836, 0.020507776260375976, 0.020572736740112306, 0.021655231475830077, 0.020709856033325195, 0.020465824127197267, 0.020414464950561522, 0.020525056838989256, 0.020492000579833983, 0.020572160720825194, 0.02050281524658203, 0.02046335983276367, 0.02056550407409668, 0.020781824111938477, 0.020545536041259766, 0.020535232543945313, 0.02125619125366211, 0.021133279800415038, 0.021141536712646486, 0.02124799919128418, 0.021296127319335938, 0.021138431549072266, 0.02128428840637207, 0.021332735061645507, 0.021277568817138673, 0.021272863388061523, 0.0212957763671875, 0.021181535720825196, 0.021203872680664062, 0.02129052734375, 0.021238239288330078, 0.02134534454345703, 0.021300224304199217, 0.022749120712280274, 0.02287513542175293, 0.02124083137512207, 0.021333471298217773, 0.02170217514038086, 0.022010879516601564, 0.02146214485168457, 0.021266815185546874, 0.02122217559814453, 0.021153184890747072, 0.021186880111694336, 0.021448703765869142, 0.02124799919128418, 0.021207040786743164, 0.021253599166870116, 0.02104368019104004, 0.02105708885192871, 0.02120083236694336, 0.021190271377563477, 0.021177280426025392, 0.021171871185302733, 0.021121120452880858, 0.021266687393188478, 0.021147327423095705, 0.02116640090942383, 0.021195808410644532, 0.021153087615966796, 0.02103875160217285, 0.02104265594482422, 0.021388992309570313, 0.021577856063842774, 0.021409984588623046, 0.021238304138183593, 0.021200992584228515, 0.02117827224731445, 0.021106719970703125, 0.021207008361816406, 0.021103872299194335, 0.021134336471557616, 0.021362272262573243, 0.021135200500488283, 0.021229888916015623, 0.021364543914794924, 0.021190847396850586, 0.021127168655395507, 0.0210882568359375, 0.021634880065917968, 0.021311103820800783, 0.02136025619506836, 0.021177248001098634, 0.02107187271118164, 0.02127052879333496, 0.021209087371826172, 0.021190271377563477, 0.021202592849731444, 0.02115247917175293, 0.021182207107543944, 0.021278751373291015, 0.02121340751647949, 0.021122432708740233, 0.021219167709350586, 0.021351200103759765, 0.022511135101318358, 0.02142665672302246, 0.02135856056213379, 0.021226655960083007, 0.021185407638549803, 0.021075679779052735, 0.021219615936279298, 0.02122528076171875, 0.021108991622924806, 0.02101478385925293, 0.021200576782226564, 0.021214879989624024, 0.0214052791595459, 0.021123199462890624, 0.021260927200317383, 0.021169408798217774, 0.021069631576538086, 0.023248863220214844, 0.0233604793548584, 0.021357952117919923, 0.02140985679626465, 0.02125062370300293, 0.021296735763549804, 0.0215513916015625, 0.02125632095336914, 0.02267087936401367, 0.021575935363769533, 0.021428287506103514, 0.021313247680664064, 0.02131603240966797, 0.021181568145751953, 0.021787519454956054, 0.02124492835998535, 0.021115135192871094, 0.02109913635253906, 0.021289087295532225, 0.02145484733581543, 0.0213309440612793, 0.02144358444213867, 0.021247711181640625, 0.021168415069580077, 0.0211167049407959, 0.021125343322753905, 0.021087520599365233, 0.02134204864501953, 0.021276895523071288, 0.021254816055297852, 0.021783199310302735, 0.021296543121337892, 0.02130601692199707, 0.02127257537841797, 0.021301248550415038, 0.021168127059936523, 0.02126857566833496, 0.021217344284057617, 0.021465951919555665, 0.021345279693603517, 0.02130534362792969, 0.021268672943115234, 0.02139731216430664, 0.022196224212646484, 0.02129305648803711, 0.021118688583374023, 0.021364511489868163, 0.021380992889404298, 0.021574272155761718, 0.02125823974609375, 0.021190656661987304, 0.021127168655395507, 0.02108415985107422, 0.021103776931762696, 0.021154655456542968, 0.021229087829589845, 0.02126019287109375, 0.021135295867919922, 0.021205631256103516, 0.021215232849121093, 0.02131865692138672, 0.021363712310791014, 0.021189823150634765, 0.02106822395324707, 0.02113692855834961, 0.0211812801361084, 0.0212457275390625, 0.02119830322265625, 0.02114841651916504, 0.020973567962646485, 0.021064735412597655, 0.021056480407714843, 0.021204992294311522, 0.021129215240478515, 0.021364160537719726, 0.02106835174560547, 0.021114879608154297, 0.02123161506652832, 0.0211615047454834, 0.021156320571899413, 0.021202943801879884, 0.02113865661621094, 0.021184736251831055, 0.02124473571777344, 0.021122400283813476, 0.021099199295043947, 0.02109561538696289, 0.021012832641601562, 0.021196992874145507, 0.02123161506652832, 0.022130687713623046, 0.021125247955322266, 0.021128480911254882, 0.02150399971008301, 0.02106915283203125, 0.021060447692871093, 0.020957183837890626, 0.02116783905029297, 0.021106624603271486, 0.02112246322631836, 0.021028959274291992, 0.02072812843322754, 0.021128992080688476, 0.021239839553833007, 0.021026912689208983, 0.021003103256225585, 0.020920095443725587, 0.020948223114013672, 0.020926752090454102, 0.02120137596130371, 0.02097727966308594, 0.020945056915283203, 0.020927871704101562, 0.021322080612182617, 0.020916351318359373, 0.02105209541320801, 0.020932287216186524, 0.020932607650756836, 0.021143327713012694, 0.021059200286865233, 0.021389919281005858, 0.021180383682250975, 0.021092256546020507, 0.021167552947998047, 0.021328575134277345, 0.02150547218322754, 0.02205936050415039, 0.021088064193725584, 0.020908447265625, 0.020983808517456053, 0.021020320892333983, 0.021470815658569335, 0.021037824630737306, 0.024286624908447265, 0.023694976806640625, 0.0210994873046875, 0.021122880935668945, 0.021075328826904296, 0.02095552062988281, 0.02068115234375, 0.02089369583129883, 0.021451839447021483, 0.021174495697021484, 0.021015264511108397, 0.021089279174804687, 0.020906816482543944, 0.021061824798583983, 0.021012479782104493, 0.02121478462219238, 0.021096160888671875, 0.021062368392944335, 0.02105958366394043, 0.020977664947509765, 0.020946271896362306, 0.020852544784545898, 0.020812639236450194, 0.021603967666625975, 0.02116441535949707, 0.021301248550415038, 0.021168127059936523, 0.021608448028564452, 0.02220412826538086, 0.021254432678222655, 0.02103500747680664, 0.021045055389404297, 0.021072191238403322, 0.020982656478881836, 0.021497888565063475, 0.020992223739624023, 0.02112588882446289, 0.02106982421875, 0.02084883117675781, 0.021087648391723633, 0.021252511978149414, 0.021145599365234375, 0.0210882568359375, 0.02107404708862305, 0.021006208419799804, 0.021013664245605468, 0.020988767623901367, 0.021276735305786134, 0.02109791946411133, 0.021077728271484374, 0.021059680938720703, 0.02102751922607422, 0.021155712127685546, 0.020973983764648436, 0.021064640045166016, 0.0209846076965332, 0.02143436813354492, 0.02102176094055176, 0.02098681640625, 0.02106777572631836, 0.021339231491088868, 0.02154572868347168, 0.021133472442626953, 0.021022048950195313, 0.02105411148071289, 0.020971519470214844, 0.02125209617614746, 0.021032447814941405, 0.02096998405456543, 0.020973567962646485, 0.021147647857666017, 0.021069055557250978, 0.02108470344543457, 0.021240032196044922, 0.021690624237060547, 0.021812992095947267, 0.021111040115356444, 0.021012224197387696, 0.020943199157714844, 0.021057184219360352, 0.02153385543823242, 0.02139632034301758, 0.021168127059936523, 0.021112831115722656, 0.02104115104675293, 0.021116064071655272, 0.021465087890625, 0.02104934310913086, 0.021317632675170898, 0.021401344299316408, 0.021164287567138673, 0.020992000579833983, 0.021013919830322265, 0.020989568710327148, 0.02105839920043945, 0.020936511993408204, 0.02097555160522461, 0.02079372787475586, 0.02094198417663574, 0.021275487899780274, 0.021153791427612305, 0.02108563232421875, 0.02097823905944824, 0.020971616744995116, 0.02099955177307129, 0.02100399971008301, 0.021025600433349608, 0.02082761573791504, 0.020922208786010744, 0.02202899169921875, 0.021259967803955077, 0.02228665542602539, 0.0210513916015625, 0.0209998722076416, 0.02102617645263672, 0.02117433547973633, 0.021346336364746095, 0.021043935775756837, 0.020899999618530275, 0.021032608032226563, 0.0213240966796875, 0.02104115104675293, 0.02110233688354492, 0.021078239440917967, 0.020967456817626955, 0.02117532730102539, 0.020890592575073242, 0.02086092758178711, 0.021466400146484373, 0.021222047805786133, 0.021151391983032227, 0.020962751388549805, 0.0210599365234375, 0.02111756706237793, 0.020979360580444337, 0.02103273582458496, 0.02095756721496582, 0.020985727310180665, 0.020980031967163085, 0.020983808517456053, 0.020975616455078124, 0.02097260856628418, 0.020999103546142577, 0.02111692810058594, 0.020922176361083983, 0.021043392181396486, 0.021008384704589843, 0.021149696350097655, 0.020944320678710937, 0.02152409553527832, 0.020810144424438477, 0.021338144302368165, 0.0209072322845459, 0.020828224182128905, 0.02098044776916504, 0.020930559158325195, 0.02107792091369629, 0.020764768600463866, 0.020851999282836913, 0.02126483154296875, 0.02117475128173828, 0.021118783950805665, 0.021018527984619142, 0.020957279205322265, 0.021032960891723632, 0.021018016815185548, 0.020867679595947267, 0.02102841567993164, 0.02101043128967285, 0.021172672271728515, 0.021028959274291992, 0.020977567672729493, 0.021052576065063475, 0.02134899139404297, 0.020979328155517576, 0.021035392761230468, 0.021062143325805666, 0.02100387191772461, 0.020889055252075194, 0.020812448501586915, 0.021323263168334963, 0.021208768844604493, 0.021117727279663087, 0.020932031631469727, 0.020933216094970702, 0.021006303787231444, 0.02101203155517578, 0.02131180763244629, 0.024764575958251954, 0.02135785675048828, 0.021234399795532228, 0.021270463943481446, 0.021692224502563476, 0.021104896545410156, 0.022007295608520508, 0.021066240310668945, 0.021188928604125978, 0.021292736053466797, 0.021004703521728514, 0.021165664672851563, 0.02072985649108887, 0.02064793586730957, 0.020494335174560546, 0.02056172752380371, 0.020389503479003906, 0.020437568664550782, 0.020658176422119142, 0.02050048065185547, 0.020569183349609374, 0.020695968627929686, 0.02067865562438965, 0.020507776260375976]",tokens/s,47.304516565553946,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,813.715456,3354.329088,0.0,2959.081472,2942.567424,s,1,7.60247119140625,7.60247119140625,0.0,7.60247119140625,7.60247119140625,7.60247119140625,7.60247119140625,[7.60247119140625],,kWh,1.027498409583245e-05,1.124794598929495e-06,4.711670436002846e-06,1.611144913076479e-05,,MB,1118.08512,3545.16992,0.0,3139.436544,3105.830912,s,10,2.5615629425048834,0.2561562942504883,0.0025241374356524703,0.25539284515380856,0.26039235534667965,0.26045658416748046,0.26050796722412106,"[0.2603780822753906, 0.2574383544921875, 0.2528868408203125, 0.2541678009033203, 0.25575640869140626, 0.2550292816162109, 0.2546710662841797, 0.2536778869628906, 0.2605208129882812, 0.2570364074707031]",tokens/s,999.3898480966645,kWh,7.679832204056866e-06,8.469384555319108e-07,5.085201436578925e-06,1.3611972096167704e-05,tokens/kWh,18806973.610537585,MB,1144.561664,3587.11296,0.0,3181.379584,3162.0096,s,10,13.417264282226563,1.3417264282226564,0.013599975796685194,1.3432182006835938,1.3592529663085937,1.3610109436035156,1.3624173254394532,"[1.3265428466796876, 1.3588623046875, 1.3477608642578125, 1.3466318359375, 1.34207275390625, 1.3443636474609375, 1.3353577880859375, 1.31404443359375, 1.3627689208984375, 1.33885888671875]",tokens/s,46.954430258524575,kWh,3.888306745719286e-05,4.2885395841120465e-06,2.506946157602105e-05,6.824106861732596e-05,tokens/kWh,923197.7352711724,,s,630,13.414208038330063,0.021292393711635042,0.0005596877692547637,0.021259455680847167,0.021615834045410155,0.022005284976959225,0.02318074527740479,"[0.02122380828857422, 0.020987552642822267, 0.02083875274658203, 0.020719200134277343, 0.02167843246459961, 0.0205980167388916, 0.02066633605957031, 0.02083718490600586, 0.020845888137817382, 0.02263644790649414, 0.021711904525756835, 0.020838176727294922, 0.020822015762329102, 0.02083430480957031, 0.02065555191040039, 0.020869695663452148, 0.02089574432373047, 0.0209749755859375, 0.020836544036865235, 0.02113580894470215, 0.021360479354858398, 0.020811935424804688, 0.020969472885131835, 0.02066227149963379, 0.02062131118774414, 0.020551071166992188, 0.02053590393066406, 0.02062950325012207, 0.020549631118774413, 0.021269535064697264, 0.020786144256591796, 0.020676607131958007, 0.020793216705322266, 0.02068809509277344, 0.020581279754638672, 0.020642976760864257, 0.020616031646728514, 0.02067865562438965, 0.020774944305419922, 0.020943904876708986, 0.020870271682739257, 0.020727615356445312, 0.020727807998657227, 0.020917728424072267, 0.021040735244750978, 0.021232160568237304, 0.021495296478271485, 0.021324703216552734, 0.02145052719116211, 0.021397727966308594, 0.02136195182800293, 0.021284927368164064, 0.022167680740356445, 0.021407808303833008, 0.02129484748840332, 0.02139561653137207, 0.021470048904418945, 0.02145155143737793, 0.02151910400390625, 0.021424320220947264, 0.021409887313842774, 0.021597728729248047, 0.021488000869750976, 0.021881311416625977, 0.021443904876708983, 0.02135465621948242, 0.021456735610961914, 0.02142473602294922, 0.021325759887695313, 0.021406976699829102, 0.02133475112915039, 0.02135481643676758, 0.021377952575683593, 0.021386016845703126, 0.022263168334960937, 0.021505760192871093, 0.021564064025878907, 0.021763423919677734, 0.02161552047729492, 0.02145894432067871, 0.02149580764770508, 0.021546016693115233, 0.021470176696777345, 0.021213279724121094, 0.02145471954345703, 0.021467168807983397, 0.022299840927124025, 0.021452831268310546, 0.021343008041381836, 0.021336063385009766, 0.021480640411376952, 0.021420864105224608, 0.02146633529663086, 0.02124880027770996, 0.02123075294494629, 0.0213656005859375, 0.021475040435791015, 0.021487648010253907, 0.02156972885131836, 0.023054399490356446, 0.022042623519897463, 0.02148761558532715, 0.021634592056274413, 0.021317375183105468, 0.02159814453125, 0.021299680709838866, 0.02141747283935547, 0.0214304313659668, 0.021364704132080078, 0.021347007751464843, 0.021420032501220702, 0.021348352432250976, 0.021336063385009766, 0.021317983627319338, 0.021400768280029295, 0.021154272079467774, 0.02126643180847168, 0.02125823974609375, 0.02151628875732422, 0.021640928268432617, 0.02147545623779297, 0.02143657684326172, 0.021393760681152344, 0.021406784057617187, 0.023015232086181642, 0.024418752670288087, 0.02168012809753418, 0.021169279098510744, 0.021162879943847655, 0.021434463500976563, 0.02137868881225586, 0.021133600234985353, 0.021174272537231444, 0.021131263732910157, 0.021227519989013673, 0.021198848724365234, 0.02211862373352051, 0.021614368438720704, 0.021710847854614256, 0.02145075225830078, 0.021223424911499023, 0.02128108787536621, 0.02122659111022949, 0.0211746883392334, 0.021403839111328125, 0.0215118408203125, 0.021544832229614258, 0.02135641670227051, 0.021267040252685547, 0.02140390396118164, 0.021192447662353515, 0.021213184356689452, 0.02110588836669922, 0.02128156852722168, 0.021301248550415038, 0.021151744842529296, 0.021102592468261717, 0.02126665687561035, 0.020952287673950194, 0.024524831771850587, 0.02122707176208496, 0.020874399185180664, 0.021102399826049806, 0.020743488311767578, 0.02100704002380371, 0.022416608810424805, 0.021646112442016602, 0.021581823348999024, 0.022279199600219728, 0.021314527511596678, 0.021263744354248045, 0.02117043113708496, 0.021361024856567382, 0.021446304321289064, 0.02125971221923828, 0.021273151397705078, 0.021256479263305664, 0.021198911666870116, 0.021267967224121095, 0.021387487411499023, 0.021221824645996094, 0.02156732749938965, 0.02130668830871582, 0.021289663314819338, 0.02168422317504883, 0.021261407852172853, 0.021424543380737304, 0.02136252784729004, 0.021185184478759767, 0.02188047981262207, 0.02115190315246582, 0.021336544036865236, 0.021114944458007812, 0.02106572723388672, 0.02136809539794922, 0.021592159271240235, 0.02159881591796875, 0.021263904571533203, 0.0210150089263916, 0.021147680282592774, 0.021376096725463867, 0.021313631057739257, 0.021316608428955077, 0.02214860725402832, 0.021268287658691407, 0.02139187240600586, 0.021180416107177736, 0.021311487197875977, 0.021194944381713866, 0.021243711471557618, 0.021204416275024413, 0.021113407135009764, 0.02130473518371582, 0.021080160140991212, 0.021375423431396486, 0.021497919082641603, 0.021477344512939454, 0.021302623748779295, 0.0215631046295166, 0.021289440155029298, 0.02198358345031738, 0.02294528007507324, 0.02134310340881348, 0.021241216659545897, 0.02128486442565918, 0.021287391662597657, 0.021213119506835937, 0.021202943801879884, 0.02134364891052246, 0.02120355224609375, 0.022134559631347656, 0.021079263687133788, 0.02146611213684082, 0.021729280471801758, 0.021307392120361326, 0.02122547149658203, 0.021153791427612305, 0.02115692710876465, 0.02112403106689453, 0.02127872085571289, 0.02143846321105957, 0.021238912582397462, 0.02117932891845703, 0.021249984741210936, 0.0212541446685791, 0.021048479080200196, 0.02147769546508789, 0.02115135955810547, 0.02103593635559082, 0.021352447509765626, 0.022439071655273438, 0.021257055282592772, 0.021649152755737304, 0.021547359466552736, 0.0215097599029541, 0.02141744041442871, 0.021077024459838868, 0.021036991119384764, 0.02137654495239258, 0.02134009552001953, 0.021148191452026368, 0.021053440093994142, 0.020995264053344728, 0.021138240814208984, 0.021253952026367186, 0.02129859161376953, 0.021215744018554687, 0.02117238426208496, 0.02136457633972168, 0.021090591430664062, 0.021120351791381838, 0.021191328048706055, 0.021397504806518555, 0.02139952087402344, 0.021348384857177733, 0.021336063385009766, 0.021157888412475585, 0.021211135864257814, 0.021332000732421873, 0.021198816299438476, 0.021151744842529296, 0.020967424392700194, 0.02103091239929199, 0.02128281593322754, 0.021331199645996092, 0.022072063446044923, 0.021421760559082032, 0.021137311935424806, 0.021170591354370116, 0.021110784530639647, 0.02126028823852539, 0.021172224044799806, 0.021338111877441408, 0.021437471389770507, 0.021540895462036132, 0.021273536682128905, 0.021254207611083983, 0.021075904846191405, 0.021280128479003905, 0.021259199142456053, 0.021472543716430665, 0.021387680053710938, 0.021560672760009766, 0.021324447631835938, 0.02127769660949707, 0.02110361671447754, 0.021839872360229492, 0.021310848236083986, 0.02146771240234375, 0.02093881607055664, 0.02141788864135742, 0.021553247451782227, 0.02166783905029297, 0.021310623168945313, 0.021184576034545897, 0.02168822479248047, 0.02104470443725586, 0.021189599990844726, 0.021583871841430666, 0.02163408088684082, 0.022155807495117186, 0.021166528701782227, 0.02120412826538086, 0.021271392822265624, 0.02122137641906738, 0.021011680603027345, 0.021085311889648437, 0.021350048065185548, 0.024834367752075197, 0.021546592712402345, 0.02120841598510742, 0.021218048095703126, 0.021567487716674806, 0.02112512016296387, 0.021510143280029297, 0.021736703872680663, 0.021349119186401366, 0.021362176895141603, 0.02142255973815918, 0.021403743743896485, 0.021088191986083984, 0.0212807674407959, 0.021024480819702148, 0.021094688415527342, 0.021202943801879884, 0.021204736709594725, 0.021047552108764647, 0.021127168655395507, 0.02106572723388672, 0.021073919296264648, 0.02088960075378418, 0.02111692810058594, 0.021034112930297853, 0.02130828857421875, 0.020786687850952147, 0.021209152221679687, 0.0214716796875, 0.02141209602355957, 0.021454559326171876, 0.02116217613220215, 0.02119868850708008, 0.021134815216064452, 0.02127324867248535, 0.02127017593383789, 0.021012704849243165, 0.02123776054382324, 0.021180767059326172, 0.02208118438720703, 0.021354143142700194, 0.021305248260498046, 0.020998592376708983, 0.02143436813354492, 0.021390527725219727, 0.021293888092041014, 0.02107151985168457, 0.02122172737121582, 0.02129088020324707, 0.02134614372253418, 0.02181340789794922, 0.0211680965423584, 0.0211343994140625, 0.021046207427978515, 0.02126438331604004, 0.021235712051391603, 0.021006336212158205, 0.021342208862304687, 0.02118377685546875, 0.021199199676513673, 0.021342592239379884, 0.02116761589050293, 0.021182464599609374, 0.02093516731262207, 0.021243072509765624, 0.021455007553100584, 0.021393312454223632, 0.021426048278808594, 0.021234560012817382, 0.021153791427612305, 0.02108940887451172, 0.021180479049682618, 0.02119353675842285, 0.020998144149780275, 0.022128639221191407, 0.021429279327392577, 0.021238527297973632, 0.021395103454589844, 0.022294431686401366, 0.021851903915405275, 0.021373023986816408, 0.021379295349121093, 0.021215839385986326, 0.02130067253112793, 0.021154367446899414, 0.02102681541442871, 0.020940576553344727, 0.020832223892211912, 0.02086265563964844, 0.021137344360351563, 0.020875936508178712, 0.021136959075927733, 0.02103113555908203, 0.021098688125610353, 0.020962560653686523, 0.020986623764038086, 0.021190464019775392, 0.021381183624267577, 0.021098047256469726, 0.020918176651000975, 0.02086524772644043, 0.02102112007141113, 0.021308544158935547, 0.021170751571655273, 0.021203487396240235, 0.021114656448364258, 0.02099836730957031, 0.020975391387939454, 0.02105548858642578, 0.020813695907592772, 0.020861055374145506, 0.020968479156494142, 0.021102783203125, 0.021472000122070314, 0.021866592407226562, 0.02105958366394043, 0.02079689598083496, 0.02075641632080078, 0.020970079421997072, 0.021650560379028322, 0.021046207427978515, 0.02075436782836914, 0.020714719772338866, 0.020769567489624025, 0.021236991882324217, 0.02075315284729004, 0.020750335693359375, 0.020612192153930665, 0.020847488403320312, 0.020883487701416015, 0.02086297607421875, 0.02074985694885254, 0.020619743347167967, 0.020775968551635743, 0.02055062484741211, 0.02057401657104492, 0.02053548812866211, 0.020682687759399413, 0.020643903732299806, 0.020594688415527345, 0.020606752395629882, 0.020705440521240234, 0.020650047302246094, 0.022099967956542968, 0.020821216583251954, 0.020620063781738283, 0.02063155174255371, 0.020683839797973634, 0.020650400161743163, 0.020617599487304687, 0.020750495910644533, 0.020766687393188477, 0.020670719146728515, 0.020709152221679687, 0.02067865562438965, 0.020672416687011717, 0.020721151351928712, 0.020613311767578125, 0.02174118423461914, 0.020697887420654298, 0.02087881660461426, 0.02068662452697754, 0.020760448455810546, 0.020687040328979493, 0.020649856567382812, 0.020766944885253907, 0.020908639907836913, 0.021407743453979493, 0.021163328170776367, 0.020883712768554687, 0.020911840438842772, 0.020646400451660156, 0.020817535400390625, 0.021161951065063477, 0.021058111190795897, 0.020738079071044923, 0.02147737693786621, 0.021331968307495116, 0.021101696014404297, 0.02119708824157715, 0.0213090877532959, 0.02144879913330078, 0.022098783493041993, 0.021712896347045898, 0.023810176849365233, 0.02156844711303711, 0.02154537582397461, 0.02191209602355957, 0.02149580764770508, 0.021452096939086913, 0.021663583755493165, 0.021344287872314453, 0.021490495681762697, 0.02209324836730957, 0.02146566390991211, 0.02143436813354492, 0.021311487197875977, 0.021370880126953123, 0.021312543869018555, 0.021388256072998046, 0.021352447509765626, 0.021579776763916016, 0.021448703765869142, 0.02128691291809082, 0.021403776168823243, 0.021410943984985352, 0.022600160598754884, 0.021603872299194336, 0.021618656158447266, 0.021604415893554687, 0.02153750419616699, 0.021741567611694337, 0.02143027114868164, 0.021327871322631836, 0.021484703063964845, 0.021439327239990234, 0.021381343841552734, 0.02147052764892578, 0.021444608688354492, 0.0214102725982666, 0.02130352020263672, 0.021169952392578125, 0.021264192581176757, 0.021239999771118165, 0.021514240264892577, 0.02162073516845703, 0.02123075294494629, 0.021269344329833986, 0.02109235191345215, 0.021319135665893555, 0.023232351303100585, 0.02972947120666504, 0.021136640548706054, 0.02114761543273926, 0.02138528060913086, 0.020902624130249025, 0.020970592498779295, 0.02103388786315918, 0.02100662422180176, 0.021309440612792968, 0.021105663299560547, 0.02165225601196289, 0.021205087661743165, 0.02127020835876465, 0.021146047592163087, 0.0212807674407959, 0.021237119674682618, 0.021151968002319336, 0.02114761543273926, 0.021168447494506835, 0.020958879470825195, 0.021121503829956055, 0.02102252769470215, 0.021002431869506837, 0.021102304458618163, 0.021176607131958007, 0.02147532844543457, 0.02127462387084961, 0.02127257537841797, 0.021559295654296876, 0.021276416778564452, 0.021177696228027343, 0.021169055938720704, 0.02126233673095703, 0.021067520141601563, 0.020983680725097657, 0.021227615356445313, 0.02118275260925293, 0.021153791427612305, 0.021102592468261717, 0.021391008377075197, 0.0214531192779541, 0.02138319969177246, 0.021313440322875975, 0.022146528244018554, 0.021250688552856445, 0.021180448532104493, 0.021274911880493165, 0.021176223754882813, 0.021135135650634764, 0.021061632156372072, 0.020923839569091798, 0.021052032470703124, 0.021574752807617188, 0.021417823791503907, 0.021141952514648437, 0.021518911361694336, 0.021535999298095704, 0.021588735580444336, 0.021178367614746094, 0.023399456024169922, 0.022023040771484374, 0.021243999481201172, 0.021317472457885744, 0.021016288757324218, 0.021033311843872072, 0.02082195281982422, 0.02060310363769531, 0.0211778564453125, 0.02075276756286621, 0.02058559989929199, 0.020624319076538087]",tokens/s,46.96512818347703,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 21458 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 718, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 176.12 MiB is free. Process 45827 has 14.57 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1035.137024,10975.379456,0.0,10580.13184,10162.029568,s,1,11.9223359375,11.9223359375,0.0,11.9223359375,11.9223359375,11.9223359375,11.9223359375,[11.9223359375],,kWh,7.199695445816208e-06,7.769629309695722e-07,3.88500310800205e-06,1.186166148478783e-05,,MB,1304.940544,11094.91712,0.0,10689.183744,9358.065152,s,10,8.74055206298828,0.874055206298828,0.0030499145467565874,0.8741604309082032,0.8765967407226563,0.8782952270507812,0.8796540161132812,"[0.8684814453125, 0.8739532470703125, 0.8738178100585937, 0.8712845458984375, 0.8759263916015625, 0.871088623046875, 0.8799937133789062, 0.8754193725585937, 0.8743676147460937, 0.8762192993164063]",tokens/s,292.88767820974107,kWh,2.5562439670831813e-05,2.8187286797481014e-06,1.693825429133269e-05,4.5319422641912595e-05,tokens/kWh,5648792.175106937,MB,1330.46272,11094.91712,0.0,10689.183744,9397.6704,s,10,30.260364257812498,3.0260364257812498,0.006906549708564813,3.0271842041015624,3.032440185546875,3.0346143798828127,3.0363537353515624,"[3.017893310546875, 3.017557373046875, 3.01461865234375, 3.02799169921875, 3.02494384765625, 3.03678857421875, 3.03181982421875, 3.030417236328125, 3.026376708984375, 3.03195703125]",tokens/s,20.81931316597913,kWh,8.862553406583476e-05,9.774984543296727e-06,5.896827865606729e-05,0.00015736879726519878,tokens/kWh,400333.491103907,,s,630,30.256514354705796,0.048026213261437786,0.00030421240737976484,0.047994447708129884,0.04833654632568359,0.04852303009033203,0.049130749359130865,"[0.04891849517822266, 0.04798441696166992, 0.04778211212158203, 0.0475681266784668, 0.04751385498046875, 0.04756710433959961, 0.04762361526489258, 0.04757183837890625, 0.04763846588134766, 0.04765695953369141, 0.047582942962646486, 0.04776700973510742, 0.04768851089477539, 0.047900543212890626, 0.04771184158325195, 0.047624736785888674, 0.048045406341552736, 0.04780303955078125, 0.04794582366943359, 0.047857566833496096, 0.04783718490600586, 0.048133983612060546, 0.04780252838134766, 0.04798659133911133, 0.047715713500976566, 0.0477949104309082, 0.047590656280517576, 0.047827518463134766, 0.04771648025512695, 0.048003135681152345, 0.04777337646484375, 0.04888739013671875, 0.04814102554321289, 0.04781260681152344, 0.04777983856201172, 0.04798668670654297, 0.0477388801574707, 0.047882240295410154, 0.04790000152587891, 0.04789465713500977, 0.047884990692138675, 0.047994239807128906, 0.04806243133544922, 0.04896979141235352, 0.04839059066772461, 0.04791910552978516, 0.047890430450439454, 0.047652126312255856, 0.04801545715332031, 0.047982814788818356, 0.04804003143310547, 0.04805372619628906, 0.04786057662963867, 0.047751167297363284, 0.04800662231445312, 0.04789072036743164, 0.047798526763916015, 0.047634078979492185, 0.04788054275512695, 0.048056320190429686, 0.04799465560913086, 0.04797872161865235, 0.04788947296142578, 0.048936767578125, 0.04811139297485351, 0.04753654479980469, 0.047512958526611325, 0.04758156967163086, 0.047743198394775394, 0.04771023941040039, 0.04762003326416016, 0.04752297592163086, 0.04779100799560547, 0.04794367980957031, 0.04768767929077149, 0.04777308654785156, 0.04773129653930664, 0.048920574188232424, 0.047726593017578124, 0.04777983856201172, 0.047850784301757814, 0.047559391021728514, 0.04774092864990234, 0.04802969741821289, 0.04813820648193359, 0.04767951965332031, 0.0477388801574707, 0.04767334365844727, 0.047876094818115236, 0.04770406341552735, 0.04779008102416992, 0.048353279113769534, 0.04785587310791015, 0.047701663970947265, 0.04795097732543945, 0.04820256042480469, 0.04795929718017578, 0.04787907028198242, 0.047898624420166014, 0.04792444610595703, 0.04791542434692383, 0.04778140640258789, 0.04795068740844727, 0.04781603240966797, 0.04797507095336914, 0.0481927375793457, 0.047817665100097655, 0.04779401779174805, 0.047833087921142575, 0.04791296005249023, 0.04793139266967773, 0.047967647552490236, 0.047931999206542966, 0.04765081787109375, 0.04801945495605469, 0.04796211242675781, 0.04798831939697266, 0.04779459381103516, 0.04808415985107422, 0.04788716888427735, 0.047923198699951174, 0.04799871826171875, 0.047846881866455075, 0.047991134643554687, 0.04814393615722656, 0.047966911315917966, 0.048801631927490235, 0.04803379058837891, 0.04772975921630859, 0.047713184356689455, 0.04758323287963867, 0.04768153762817383, 0.04747673416137695, 0.047621150970458985, 0.04745929718017578, 0.04759961700439453, 0.04760543823242187, 0.04747043228149414, 0.04771683120727539, 0.048107521057128906, 0.04774256134033203, 0.04758774566650391, 0.04754431915283203, 0.04755580902099609, 0.047473438262939455, 0.04771558380126953, 0.047741439819335936, 0.04776716613769531, 0.04753062438964844, 0.04785712051391602, 0.04784592056274414, 0.047688831329345704, 0.0476864013671875, 0.04777791976928711, 0.04782889556884766, 0.047564640045166015, 0.04756006240844726, 0.04754496002197266, 0.04815238571166992, 0.0498221435546875, 0.04794572830200195, 0.0478699836730957, 0.04760367965698242, 0.04799897766113281, 0.04766515350341797, 0.047816703796386716, 0.04779417419433594, 0.047851520538330077, 0.047662334442138674, 0.04828448104858398, 0.047881919860839846, 0.04785177612304688, 0.04785971069335938, 0.04778406524658203, 0.047932735443115236, 0.04801923370361328, 0.04779292678833008, 0.047922977447509764, 0.0479664306640625, 0.04776937484741211, 0.04810720062255859, 0.04807939147949219, 0.04789209747314453, 0.04802803039550781, 0.048024574279785154, 0.048021663665771486, 0.04806934356689453, 0.04809036636352539, 0.04808793640136719, 0.048917152404785155, 0.04823859024047852, 0.04758937454223633, 0.0478532485961914, 0.04785120010375977, 0.04803417587280273, 0.04773455810546875, 0.04761183929443359, 0.047882110595703124, 0.048399009704589845, 0.04778927993774414, 0.047877185821533205, 0.04773638534545899, 0.047953983306884766, 0.04765910339355469, 0.04790393447875976, 0.04758201599121094, 0.0478474235534668, 0.047777793884277345, 0.048166526794433596, 0.04821347045898437, 0.048218879699707035, 0.047947902679443356, 0.047965248107910155, 0.04796460723876953, 0.04808902359008789, 0.0480846061706543, 0.04782783889770508, 0.048154495239257813, 0.0478394546508789, 0.04801289749145508, 0.047990398406982424, 0.047859584808349606, 0.047952224731445316, 0.04782735824584961, 0.04813020706176758, 0.048027393341064456, 0.04833715057373047, 0.04856614303588867, 0.047978271484375, 0.0481099853515625, 0.047992767333984374, 0.04799283218383789, 0.04799897766113281, 0.04801740646362305, 0.04804636764526367, 0.0483521614074707, 0.048021793365478516, 0.04803142547607422, 0.04805718231201172, 0.04789657592773437, 0.048130046844482424, 0.04825702285766602, 0.048174816131591795, 0.04811932754516601, 0.04817081451416016, 0.04812486267089844, 0.048418815612792966, 0.04820751953125, 0.04812243270874023, 0.04925212860107422, 0.04845977783203125, 0.04827660751342774, 0.04901744079589844, 0.048205249786376955, 0.0476717758178711, 0.047867904663085936, 0.047661056518554686, 0.047615550994873045, 0.04756649780273438, 0.04786460876464844, 0.047884288787841796, 0.047855072021484375, 0.047921791076660156, 0.04789238357543945, 0.04797644805908203, 0.04792835235595703, 0.0478873291015625, 0.04783420944213867, 0.04763536071777344, 0.04792729568481445, 0.04792934417724609, 0.048901729583740235, 0.0485601921081543, 0.048173408508300784, 0.04826931381225586, 0.04796982574462891, 0.047765888214111325, 0.04800931167602539, 0.04767129516601563, 0.047935489654541016, 0.047933441162109375, 0.04797340774536133, 0.04808393478393555, 0.04780803298950195, 0.04801993560791016, 0.047933441162109375, 0.04788864135742187, 0.04811750411987305, 0.04812799835205078, 0.04786742401123047, 0.04802608108520508, 0.04793958282470703, 0.048096702575683596, 0.048000896453857425, 0.047962783813476566, 0.04815372848510742, 0.04791321563720703, 0.04803855895996094, 0.04801923370361328, 0.047943809509277346, 0.04798883056640625, 0.04814137649536133, 0.04801836776733399, 0.04812799835205078, 0.04803379058837891, 0.047890430450439454, 0.04809891128540039, 0.04806492614746094, 0.04824448013305664, 0.048197856903076174, 0.04804816055297852, 0.04813955307006836, 0.04803247833251953, 0.04819968032836914, 0.048121856689453124, 0.049148223876953126, 0.048205825805664064, 0.047836318969726566, 0.04775203323364258, 0.0478267822265625, 0.04773289489746094, 0.0479268798828125, 0.047954334259033206, 0.04792729568481445, 0.0481743049621582, 0.047964897155761715, 0.04796627044677734, 0.04816617584228516, 0.048056896209716794, 0.04815068817138672, 0.04811907196044922, 0.047909343719482425, 0.048236801147460935, 0.047967521667480466, 0.04825980758666992, 0.0482242546081543, 0.0481607666015625, 0.04800102233886719, 0.04791676712036133, 0.04784912109375, 0.04829248046875, 0.04843110275268555, 0.04815052795410156, 0.047981983184814454, 0.048097217559814456, 0.04824044799804687, 0.04806268692016601, 0.048005950927734374, 0.048153472900390626, 0.048202529907226566, 0.048342239379882815, 0.04815353775024414, 0.04839424133300781, 0.048311489105224606, 0.04827628707885742, 0.048527359008789066, 0.048353374481201174, 0.04813737487792969, 0.04829436874389648, 0.048160224914550784, 0.048261470794677734, 0.04810316848754883, 0.04806291198730469, 0.048009185791015624, 0.04827791976928711, 0.048256256103515624, 0.0482966079711914, 0.048244735717773435, 0.048261119842529294, 0.04845363235473633, 0.048611328125, 0.048814079284667966, 0.04883251190185547, 0.04831436920166016, 0.04836966323852539, 0.04862774276733398, 0.04838396835327148, 0.04817667388916016, 0.04935887908935547, 0.048524513244628906, 0.0486506233215332, 0.048002975463867184, 0.04809571075439453, 0.047783935546875, 0.047833087921142575, 0.047736831665039066, 0.047818016052246094, 0.04787606430053711, 0.047971073150634765, 0.04782284927368164, 0.04803529739379883, 0.04801385498046875, 0.04788019180297851, 0.048057376861572264, 0.04794214248657227, 0.04796163177490234, 0.04788729476928711, 0.04818329620361328, 0.048078529357910155, 0.04815699386596679, 0.04809231948852539, 0.04797552108764649, 0.04908796691894531, 0.04814470291137695, 0.04790678405761719, 0.04806390380859375, 0.04788489532470703, 0.047939521789550785, 0.0478331527709961, 0.04804822540283203, 0.048072608947753906, 0.048092193603515625, 0.04789145660400391, 0.04784950256347656, 0.04794262313842773, 0.04797539138793945, 0.04799235153198242, 0.04861385726928711, 0.0481525764465332, 0.04844553756713867, 0.04853359985351562, 0.04810895919799805, 0.048029376983642576, 0.048317054748535156, 0.04823459243774414, 0.048107521057128906, 0.04793974304199219, 0.04814937591552734, 0.048161758422851565, 0.048213630676269534, 0.048038272857666015, 0.04825702285766602, 0.04796575927734375, 0.04796051025390625, 0.04825497436523438, 0.048325984954833985, 0.04799910354614258, 0.048140640258789065, 0.048250686645507815, 0.04824496078491211, 0.048468128204345706, 0.04905292892456055, 0.04824671936035156, 0.048029823303222655, 0.0479667854309082, 0.04791843032836914, 0.0477949104309082, 0.04793142318725586, 0.04793753433227539, 0.04820560073852539, 0.04796847915649414, 0.047912639617919923, 0.04797062301635742, 0.04787152099609375, 0.048019073486328126, 0.048096096038818356, 0.047946975708007815, 0.04772534561157227, 0.04820572662353516, 0.047734878540039063, 0.04808499145507812, 0.048322528839111326, 0.04835023880004883, 0.04815564727783203, 0.04824169540405274, 0.047807456970214844, 0.04823859024047852, 0.047925247192382815, 0.04808086395263672, 0.047962142944335935, 0.04820377731323242, 0.047998046875, 0.04795452880859375, 0.04788665771484375, 0.048078014373779294, 0.04798486328125, 0.04789670562744141, 0.047702495574951174, 0.04799283218383789, 0.04806860733032227, 0.047908863067626956, 0.04800710296630859, 0.04842313766479492, 0.048080734252929684, 0.04803763198852539, 0.0502110710144043, 0.048091136932373046, 0.04792272186279297, 0.048056800842285155, 0.04823040008544922, 0.048115711212158206, 0.04801891326904297, 0.048108062744140624, 0.0480008316040039, 0.04805855941772461, 0.04802764892578125, 0.048227424621582034, 0.04800198364257813, 0.04799484634399414, 0.04805980682373047, 0.048257568359375, 0.04804364776611328, 0.048159168243408206, 0.048510974884033206, 0.04917436981201172, 0.048175998687744144, 0.04767465591430664, 0.047561439514160156, 0.04771635055541992, 0.0476956787109375, 0.04784352111816406, 0.04875263977050781, 0.047711872100830076, 0.047798656463623045, 0.04791910552978516, 0.04774092864990234, 0.04790476989746094, 0.04793753433227539, 0.04785091018676758, 0.04792486572265625, 0.04782179260253906, 0.04760780715942383, 0.04834099197387695, 0.04852121734619141, 0.04793660736083984, 0.04809328079223633, 0.0479136962890625, 0.04776121520996094, 0.047925537109375, 0.04816486358642578, 0.047742462158203124, 0.0477209587097168, 0.047876094818115236, 0.04782489776611328, 0.04789443206787109, 0.047914913177490234, 0.047556224822998046, 0.04804169464111328, 0.04801385498046875, 0.04795129776000977, 0.047954784393310544, 0.0479595832824707, 0.047621761322021484, 0.04811407852172851, 0.04804191970825195, 0.04801567840576172, 0.04801087951660156, 0.04833647918701172, 0.048053249359130856, 0.04817919921875, 0.0482344970703125, 0.04918044662475586, 0.048311614990234376, 0.048177345275878906, 0.048024158477783206, 0.04823871994018555, 0.048138240814208984, 0.04818739318847656, 0.048105472564697264, 0.04814451217651367, 0.048347007751464846, 0.048137313842773435, 0.0480307502746582, 0.048094303131103515, 0.04804307174682617, 0.048223968505859374, 0.04807238388061524, 0.04890236663818359, 0.04805785751342773, 0.04768928146362305, 0.04806729507446289, 0.04760400009155273, 0.047767551422119144, 0.048347137451171876, 0.04781238555908203, 0.04766326522827148, 0.04795808029174805, 0.04799478530883789, 0.0480313606262207, 0.04850735855102539, 0.047923198699951174, 0.04818739318847656, 0.04799270248413086, 0.047782142639160155, 0.048108928680419924, 0.04811196899414062, 0.04793487930297852, 0.048331775665283204, 0.048468929290771484, 0.04821894454956055, 0.04793324661254883, 0.04802163314819336, 0.048240703582763673, 0.04807475280761719, 0.04819148635864258, 0.04782470321655274, 0.04802988815307617, 0.04796364974975586, 0.048083553314208986, 0.048076160430908205, 0.048347679138183594, 0.04788188934326172, 0.04817750549316406, 0.048162815093994144, 0.04819148635864258, 0.04833280181884766, 0.04834415817260742, 0.04809532928466797, 0.04803462219238281, 0.048035072326660155, 0.04814720153808594, 0.04826012802124023, 0.04834336090087891, 0.048091808319091794, 0.04797257614135742, 0.04807190322875977, 0.048035808563232425, 0.047981151580810545, 0.048082782745361326, 0.047945758819580075, 0.04811750411987305, 0.04816099166870117, 0.04817318344116211, 0.04834243011474609, 0.04833135986328125, 0.04816444778442383, 0.048323009490966795, 0.04841062545776367, 0.048578048706054686, 0.04847244644165039]",tokens/s,20.821962259575876,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 707, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.96 GiB. GPU 0 has a total capacity of 14.74 GiB of which 662.12 MiB is free. Process 133732 has 14.09 GiB memory in use. Of the allocated memory 13.97 GiB is allocated by PyTorch, and 6.66 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 270, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 69027 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 905.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 234.12 MiB is free. Process 166477 has 14.51 GiB memory in use. Of the allocated memory 14.39 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 362.12 MiB is free. Process 172561 has 14.38 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 1.78 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,888.139776,14333.902848,0.0,13931.380736,13915.964416,s,1,7.699271484375,7.699271484375,0.0,7.699271484375,7.699271484375,7.699271484375,7.699271484375,[7.699271484375],,kWh,9.265860012434738e-06,1.014012948439188e-06,4.089725493983121e-06,1.4369598454857047e-05,,MB,1216.462848,14749.138944,0.0,14336.131072,14291.630592,s,10,12.230245727539062,1.2230245727539062,0.004948853368918331,1.2242154541015626,1.2277965698242188,1.229052032470703,1.2300564025878906,"[1.211764892578125, 1.218526611328125, 1.21953955078125, 1.2303074951171875, 1.2240753173828125, 1.2243555908203125, 1.223915771484375, 1.224945068359375, 1.227517578125, 1.2252978515625]",tokens/s,209.31713532423984,kWh,3.570688490833315e-05,3.937980205092723e-06,2.3707796743999144e-05,6.335266185742501e-05,tokens/kWh,4040872.040643332,MB,1233.8176,14916.911104,0.0,14503.903232,14463.49568,s,10,40.320679931640626,4.032067993164063,0.003285971275315522,4.031379272460938,4.036291333007812,4.036915686035156,4.037415168457031,"[4.02819970703125, 4.030496337890625, 4.030724365234375, 4.02852978515625, 4.02836328125, 4.0320341796875, 4.036152587890625, 4.0375400390625, 4.032628173828125, 4.036011474609375]",tokens/s,15.624736514069138,kWh,0.00011790713006917637,1.3006058357620072e-05,7.829747930460096e-05,0.0002092106677313974,tokens/kWh,301131.8719219653,,s,630,40.317330455780066,0.06399576262822228,0.0002626721850871139,0.06399390411376954,0.0642992218017578,0.06441533279418946,0.0647574169921875,"[0.06477001953125, 0.06366617584228515, 0.06347395324707031, 0.06355734252929687, 0.06396902465820313, 0.06348112106323242, 0.06345542526245117, 0.06345606231689453, 0.06349382400512696, 0.06374233627319335, 0.06440128326416016, 0.06370921707153321, 0.06357353591918945, 0.06367484664916992, 0.0642498550415039, 0.06378700637817383, 0.06389759826660156, 0.06378860855102539, 0.06374649429321289, 0.06376243209838867, 0.06380547332763672, 0.06363951873779297, 0.06362009429931641, 0.06375936126708984, 0.06378201675415039, 0.06381804656982422, 0.06379945755004883, 0.06381814575195313, 0.06378227233886719, 0.06380534362792968, 0.06393324661254883, 0.06399990463256836, 0.06384400177001953, 0.06398531341552735, 0.06390854263305665, 0.06406553649902344, 0.0639815673828125, 0.0638966064453125, 0.06391907119750977, 0.06396752166748047, 0.0639928970336914, 0.06401686096191406, 0.06402639770507812, 0.06401837158203125, 0.0640823974609375, 0.06411244964599609, 0.0641619873046875, 0.06416998291015626, 0.06402607727050781, 0.06419264221191406, 0.06416015625, 0.06417407989501953, 0.06408582305908203, 0.06408767700195313, 0.06405097961425782, 0.06413555145263672, 0.06411875152587891, 0.06421263885498046, 0.06419449615478516, 0.06420473480224609, 0.06416831970214844, 0.06431292724609375, 0.06438102722167968, 0.06467314910888672, 0.0636888313293457, 0.0634516487121582, 0.06348723220825195, 0.06355020904541016, 0.06353532791137695, 0.06346934509277344, 0.0635792007446289, 0.06362547302246094, 0.06368915176391601, 0.06370249557495117, 0.06372041702270508, 0.0636885108947754, 0.06370243072509765, 0.06388969421386718, 0.06398089599609375, 0.0637531852722168, 0.06362345504760743, 0.06384758377075195, 0.06376505661010742, 0.06375638580322265, 0.06383983993530273, 0.06376057434082032, 0.06383628845214843, 0.06381977462768555, 0.0639691505432129, 0.06388243103027344, 0.06386147308349609, 0.06389401626586914, 0.06403651428222656, 0.06407129669189453, 0.06415977478027343, 0.06402499389648438, 0.06419251251220703, 0.06386687850952148, 0.0639214096069336, 0.06390569686889648, 0.06388617706298828, 0.06391609573364258, 0.0639540786743164, 0.06403116607666015, 0.06407612609863281, 0.06394265747070313, 0.06427555084228516, 0.06417526245117187, 0.06421494293212891, 0.06419446563720703, 0.0641514892578125, 0.06416178894042969, 0.06408172607421875, 0.06410851287841797, 0.06419888305664062, 0.06406082916259766, 0.06401289367675782, 0.064036865234375, 0.06412445068359375, 0.06569213104248046, 0.06427056121826172, 0.06422054290771484, 0.0643193588256836, 0.06434441375732422, 0.06428486633300781, 0.06424098968505859, 0.06448047637939452, 0.06366479873657227, 0.06403202819824219, 0.06349008178710938, 0.06355846405029297, 0.06356784057617187, 0.06441577911376953, 0.06361836624145507, 0.06360492706298829, 0.06370569610595703, 0.0637985610961914, 0.0637465591430664, 0.06370022583007813, 0.06378956985473633, 0.06379967880249024, 0.06392211151123046, 0.06384143829345704, 0.06385760116577148, 0.06373782348632813, 0.06368057632446289, 0.06358972930908204, 0.06375593566894532, 0.06444947052001954, 0.06383190536499024, 0.06383427047729492, 0.06387625503540038, 0.06389820861816406, 0.06388483047485352, 0.06392825698852539, 0.06406201934814452, 0.06418163299560548, 0.06407651519775391, 0.06405337524414062, 0.06405084991455078, 0.06405091094970704, 0.06400390625, 0.06397216033935547, 0.0640184326171875, 0.06408502197265625, 0.06406857299804687, 0.06408601379394531, 0.06404914855957031, 0.06405120086669921, 0.06418841552734375, 0.06408806610107422, 0.06410034942626953, 0.06433113861083985, 0.0641211166381836, 0.06415805053710938, 0.06410758209228516, 0.06408470153808593, 0.06405347442626953, 0.06400204467773438, 0.06409420776367188, 0.06403072357177735, 0.06408191680908203, 0.06406934356689453, 0.06417414093017579, 0.06414921569824218, 0.06414182281494141, 0.0641371841430664, 0.06428233337402343, 0.06414982604980468, 0.06446134185791015, 0.06369398498535156, 0.06352521514892578, 0.06397798538208008, 0.06390374374389649, 0.06362662506103516, 0.06372006225585937, 0.06361907196044922, 0.06368255996704102, 0.06363935852050781, 0.06387731170654297, 0.0636098861694336, 0.06360076904296875, 0.06369161605834961, 0.06371123123168945, 0.06380934524536133, 0.06389574432373046, 0.06382108688354492, 0.06377139282226563, 0.06365385437011718, 0.0637534065246582, 0.06381856155395507, 0.0637583351135254, 0.0637501449584961, 0.06388147354125977, 0.06401132965087891, 0.06388806533813476, 0.06394899368286133, 0.06381343841552735, 0.06385820770263671, 0.06393904113769532, 0.06390988922119141, 0.0640093765258789, 0.06391187286376954, 0.06381046295166015, 0.06389311981201172, 0.06396460723876952, 0.06396614456176758, 0.06388694381713868, 0.06390620803833008, 0.0639918098449707, 0.06403072357177735, 0.0639323501586914, 0.06396912002563476, 0.06397564697265624, 0.06412879943847656, 0.06409760284423828, 0.06406636810302735, 0.06449699401855469, 0.0646561279296875, 0.06411174774169921, 0.06413152313232422, 0.06408460998535156, 0.0641493148803711, 0.06408707427978516, 0.06425276947021484, 0.06413938903808594, 0.06416726684570312, 0.06413788604736329, 0.06409203338623047, 0.0641046371459961, 0.06425593566894532, 0.06418144226074218, 0.0648702392578125, 0.0638966064453125, 0.06362108612060546, 0.06362521743774414, 0.06378201675415039, 0.06354608154296874, 0.06356905746459961, 0.06356825637817383, 0.06355827331542968, 0.06374339294433594, 0.06365363311767579, 0.0636338882446289, 0.063670654296875, 0.06364518356323243, 0.06386534500122071, 0.06396681594848633, 0.06390803146362305, 0.06392035293579101, 0.06384844970703125, 0.06376812744140625, 0.06368915176391601, 0.06376857757568359, 0.06382387161254882, 0.06388735961914062, 0.0637880973815918, 0.0638669776916504, 0.06375305557250976, 0.06377875137329102, 0.06393814468383789, 0.06408649444580078, 0.06397951889038087, 0.06403276824951172, 0.06407328033447265, 0.06399014282226563, 0.06382115173339843, 0.06396707153320312, 0.06397017669677735, 0.06390351867675781, 0.06388700866699219, 0.06390636825561523, 0.06385161590576172, 0.0638309440612793, 0.06394265747070313, 0.06402662658691406, 0.06409571075439453, 0.06407222747802735, 0.06422118377685547, 0.0642452163696289, 0.0640763168334961, 0.06406777954101563, 0.0640767364501953, 0.06402726745605469, 0.06402687835693359, 0.0641638412475586, 0.06400819396972657, 0.06404271697998047, 0.06402191925048828, 0.06413811492919921, 0.06424371337890625, 0.06419478607177734, 0.06435203552246094, 0.06430255889892578, 0.06443891143798829, 0.06448934173583984, 0.06373171234130859, 0.06365184020996094, 0.06359859085083008, 0.06362521743774414, 0.06371532821655274, 0.06363264083862305, 0.06372019195556641, 0.06356377410888672, 0.0637089614868164, 0.06372169494628906, 0.06377676773071289, 0.06375228881835937, 0.06374800109863281, 0.06393849563598633, 0.06404029083251953, 0.06402243041992188, 0.06412191772460937, 0.06381475067138671, 0.06372172927856445, 0.06379766464233398, 0.06377027130126953, 0.06381603240966797, 0.06375334548950196, 0.06374284744262695, 0.06391193771362305, 0.06385795211791992, 0.06392310333251953, 0.06388716888427734, 0.06400409698486328, 0.06408396911621093, 0.06395289611816406, 0.06394265747070313, 0.06390374374389649, 0.06390988922119141, 0.06385049438476563, 0.06380064010620118, 0.06384534454345703, 0.06392351913452149, 0.06390326309204102, 0.06423974609375, 0.0642915802001953, 0.06414854431152343, 0.06416185760498047, 0.0642015380859375, 0.06430111694335937, 0.06438301086425781, 0.06423072052001953, 0.06411945343017578, 0.06409011077880859, 0.06408370971679687, 0.06413116455078124, 0.06417340850830078, 0.06418515014648438, 0.06411811065673828, 0.06423401641845704, 0.06423564910888672, 0.06421094512939453, 0.06441478729248047, 0.06435635375976563, 0.06449452972412109, 0.06435024261474609, 0.0648416976928711, 0.06495315551757813, 0.06388092803955078, 0.06375033569335938, 0.06367023849487305, 0.06364982223510743, 0.06363545608520507, 0.0636673927307129, 0.06377264022827149, 0.06370185470581055, 0.06380134582519531, 0.06372351837158204, 0.06373990249633789, 0.06386633682250976, 0.06384896087646484, 0.06396633529663086, 0.06405599975585938, 0.06391033554077148, 0.06397110366821289, 0.06405919647216797, 0.06386032104492187, 0.06383631896972657, 0.06384684753417968, 0.0638279685974121, 0.06376838302612305, 0.06384761428833008, 0.06399491119384766, 0.06387609481811524, 0.06409919738769532, 0.06396473693847657, 0.06410726165771484, 0.0641157455444336, 0.06403282928466797, 0.0640519027709961, 0.06406880187988281, 0.0641170883178711, 0.06395747375488281, 0.06399980926513672, 0.06405667114257813, 0.06409097290039062, 0.06404444885253906, 0.06405078125, 0.06417407989501953, 0.0642768325805664, 0.06423619079589844, 0.06428057861328125, 0.06436393737792968, 0.0642883529663086, 0.06429574584960937, 0.0644181137084961, 0.06428659057617188, 0.06425395202636719, 0.06422322845458985, 0.06425843048095703, 0.06424543762207031, 0.06423136138916016, 0.06427677154541016, 0.06434076690673828, 0.06426719665527343, 0.06424745941162109, 0.06431983947753907, 0.06438297271728516, 0.06445846557617188, 0.06442217254638671, 0.0647265625, 0.06380783843994141, 0.06368675231933593, 0.06367987060546874, 0.06358806228637695, 0.06358083343505859, 0.06362300872802734, 0.06368697738647461, 0.06385187149047851, 0.06393308639526367, 0.06379724884033203, 0.06368835067749024, 0.06372569656372071, 0.06383135986328126, 0.06414608001708984, 0.06412518310546875, 0.06409420776367188, 0.06413085174560547, 0.06387939071655273, 0.0638479995727539, 0.06380793762207031, 0.06380134582519531, 0.0639161605834961, 0.06381737518310547, 0.06386460876464843, 0.06396358489990234, 0.06387097549438477, 0.06393219375610351, 0.0639361915588379, 0.06404940795898438, 0.06438070678710937, 0.06424739074707031, 0.0646317138671875, 0.0642675552368164, 0.06444630432128906, 0.06406432342529297, 0.06400211334228516, 0.06400208282470703, 0.06403177642822265, 0.06431839752197266, 0.06415360260009766, 0.06419455718994141, 0.06408710479736328, 0.0640992660522461, 0.06418637084960938, 0.06428031921386719, 0.06426153564453126, 0.06431625366210937, 0.06426624298095703, 0.06420671844482422, 0.06418009948730469, 0.06408163452148438, 0.06417462158203124, 0.06412902069091797, 0.06471897888183593, 0.06447446441650391, 0.06440399932861328, 0.06422905731201171, 0.06438038635253907, 0.06414832305908204, 0.06467174530029297, 0.06442201232910157, 0.06435369873046876, 0.06455689239501954, 0.0637558708190918, 0.06364601516723632, 0.0636459846496582, 0.06361494445800782, 0.06375568008422852, 0.06365439987182617, 0.06365404891967773, 0.06366803359985351, 0.06377484893798828, 0.06373347091674805, 0.06388150405883788, 0.0638218231201172, 0.06389318466186523, 0.06387139129638672, 0.06414118194580078, 0.06397545623779297, 0.06381318283081054, 0.06377721786499023, 0.06382364654541016, 0.06380156707763672, 0.06390099334716796, 0.06371120071411132, 0.06382236862182618, 0.0639224624633789, 0.06383564758300782, 0.0638875846862793, 0.06395427322387695, 0.06399283218383789, 0.06402031707763672, 0.06409420776367188, 0.06417005157470704, 0.06405068969726563, 0.06401241302490235, 0.06386310577392579, 0.06396108627319336, 0.06397747039794922, 0.06406953430175781, 0.06396899032592773, 0.06402082824707031, 0.06395865631103516, 0.06406716918945313, 0.06417628479003906, 0.06415443420410157, 0.06413295745849609, 0.06417417907714844, 0.0643439712524414, 0.0641269760131836, 0.0641678695678711, 0.06421305847167968, 0.06422956848144531, 0.0641063003540039, 0.06409241485595703, 0.0641553955078125, 0.06419251251220703, 0.06415987396240234, 0.06415302276611329, 0.06423596954345703, 0.06419369506835937, 0.06421119689941407, 0.06438102722167968, 0.06480332946777344, 0.06436022186279297, 0.06496060943603515, 0.06411830139160156, 0.06371366500854492, 0.06348390579223633, 0.06356787109375, 0.06366822433471679, 0.06371299362182617, 0.06366851043701172, 0.06366412734985352, 0.06384960174560547, 0.06366707229614257, 0.06373580932617187, 0.06387628936767578, 0.06377872085571289, 0.06388748931884766, 0.06399055862426759, 0.0641514892578125, 0.06404307556152344, 0.06379900741577148, 0.06370537567138672, 0.06369311904907227, 0.0637347183227539, 0.06390617752075195, 0.0639114875793457, 0.06407046508789062, 0.06393561553955078, 0.06454156494140625, 0.06409830474853516, 0.06404489898681641, 0.06414147186279297, 0.06414335632324218, 0.06417951965332032, 0.0640887680053711, 0.06413318634033204, 0.06407788848876952, 0.06397103881835937, 0.06403084564208984, 0.06398364639282227, 0.06401023864746094, 0.06402662658691406, 0.06402620697021484, 0.06400656127929688, 0.06415519714355469, 0.06421292877197265, 0.06423731231689453, 0.06430796813964844, 0.0645959701538086, 0.06437264251708984, 0.06432105255126953, 0.06415644836425781, 0.06411030578613282, 0.06420448303222656, 0.06448576354980469, 0.06425804901123047, 0.06421094512939453, 0.06429901123046874, 0.06416748809814453, 0.06424825286865235, 0.06414540863037109, 0.06432358551025391, 0.0643051528930664, 0.06427168273925782, 0.06444518280029297]",tokens/s,15.626034583092814,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 54.12 MiB is free. Process 75057 has 14.69 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 1.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 272, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 406.12 MiB is free. Process 78164 has 14.34 GiB memory in use. Of the allocated memory 14.23 GiB is allocated by PyTorch, and 1.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 711, in __init__ self.transformer = CodeGenModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 472, in __init__ self.h = nn.ModuleList([CodeGenBlock(config, layer_idx=i) for i in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 472, in self.h = nn.ModuleList([CodeGenBlock(config, layer_idx=i) for i in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 303, in __init__ self.mlp = CodeGenMLP(inner_dim, config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 281, in __init__ self.fc_in = nn.Linear(embed_dim, intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 252.12 MiB is free. Process 92146 has 14.49 GiB memory in use. Of the allocated memory 14.38 GiB is allocated by PyTorch, and 2.15 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,788.598784,2266.89024,0.0,1864.368128,1714.486272,s,1,7.7987841796875,7.7987841796875,0.0,7.7987841796875,7.7987841796875,7.7987841796875,7.7987841796875,[7.7987841796875],,kWh,4.674984212510935e-06,5.08075107937644e-07,9.069451700066677e-07,6.090004490455247e-06,,MB,1153.757184,2281.570304,0.0,1868.562432,1692.384256,s,10,1.5786035919189454,0.15786035919189453,0.0010114715769312791,0.1580458450317383,0.1588689987182617,0.159097013092041,0.15927942459106445,"[0.15838067626953126, 0.15604380798339842, 0.15792970275878906, 0.15627183532714845, 0.1579791717529297, 0.1593250274658203, 0.1585751953125, 0.15716732788085938, 0.15811251831054687, 0.15881832885742186]",tokens/s,1621.6864151994437,kWh,4.821300242692093e-06,5.313671562015256e-07,3.202493454524473e-06,8.555160853418093e-06,tokens/kWh,29923458.411389057,MB,1192.771584,2430.468096,0.0,2017.460224,1714.458624,s,10,15.817534179687502,1.5817534179687502,0.008062151450193357,1.5838314208984374,1.5916007080078125,1.5919495239257813,1.5922285766601563,"[1.5880164794921876, 1.5834056396484375, 1.591523193359375, 1.579268310546875, 1.5842572021484376, 1.5722794189453124, 1.58643798828125, 1.5690067138671875, 1.59229833984375, 1.5710408935546876]",tokens/s,39.82921692111978,kWh,4.5732177542723363e-05,5.044322129872749e-06,2.283680515467532e-05,7.361330482727142e-05,tokens/kWh,855823.5518405972,,s,630,15.809636312484738,0.025094660813467843,0.0006999974068956112,0.024994815826416016,0.025419071197509767,0.02559389991760254,0.028200954914093065,"[0.025421663284301756, 0.025087936401367188, 0.02530668830871582, 0.025221471786499024, 0.02504310417175293, 0.025092031478881834, 0.025749568939208985, 0.025277727127075194, 0.025512672424316405, 0.025591999053955077, 0.025371871948242187, 0.02517238426208496, 0.02521107292175293, 0.02523756790161133, 0.025513919830322265, 0.02550704002380371, 0.02537513542175293, 0.025358720779418944, 0.025622528076171876, 0.025067520141601563, 0.025221120834350585, 0.025259424209594726, 0.025330272674560547, 0.02513088035583496, 0.025397567749023436, 0.02522707176208496, 0.025095296859741212, 0.024850944519042968, 0.025059711456298827, 0.025069120407104493, 0.02502684783935547, 0.024882848739624024, 0.02502092742919922, 0.02521660804748535, 0.025732608795166017, 0.024968032836914063, 0.025101951599121094, 0.02498809623718262, 0.024987424850463867, 0.024878496170043944, 0.025092832565307616, 0.02537481689453125, 0.02521603202819824, 0.02515017509460449, 0.025024799346923827, 0.02516966438293457, 0.02538912010192871, 0.02536000061035156, 0.025047584533691405, 0.02532352066040039, 0.02638447952270508, 0.024963071823120117, 0.02493244743347168, 0.02471865653991699, 0.024723360061645508, 0.024883808135986327, 0.02508153533935547, 0.025070240020751953, 0.02487228775024414, 0.024926528930664063, 0.025163967132568358, 0.02506118392944336, 0.02507366371154785, 0.02554287910461426, 0.02526185607910156, 0.02507151985168457, 0.025008415222167967, 0.024768703460693358, 0.025028608322143556, 0.025216127395629884, 0.025526784896850587, 0.02519455909729004, 0.025043264389038086, 0.025078847885131837, 0.025068704605102538, 0.024974143981933594, 0.02540438461303711, 0.025468191146850585, 0.024996576309204103, 0.025210880279541017, 0.024923231124877928, 0.025197471618652344, 0.025282079696655274, 0.02543449592590332, 0.0255897274017334, 0.02572051239013672, 0.025040864944458008, 0.025276575088500976, 0.025181535720825196, 0.024994432449340822, 0.0251909122467041, 0.025230783462524414, 0.02494118309020996, 0.02494054412841797, 0.025003807067871094, 0.024985599517822265, 0.02527027130126953, 0.024844032287597656, 0.024801536560058592, 0.0247511043548584, 0.024894464492797853, 0.024961055755615233, 0.025350400924682617, 0.024853664398193358, 0.02500009536743164, 0.02504924774169922, 0.02503113555908203, 0.025546239852905273, 0.025458976745605467, 0.025046335220336915, 0.024949440002441405, 0.024948415756225587, 0.02497158432006836, 0.024899456024169923, 0.02496473693847656, 0.024814367294311523, 0.024706239700317382, 0.024541343688964844, 0.024759775161743165, 0.02524633598327637, 0.02542777633666992, 0.024988128662109376, 0.024967168807983397, 0.025249792098999024, 0.02575472068786621, 0.025779104232788085, 0.025077856063842774, 0.025595455169677733, 0.025365055084228514, 0.025367551803588868, 0.02525663948059082, 0.025084096908569335, 0.02516543960571289, 0.025446783065795897, 0.025067520141601563, 0.027270912170410156, 0.025280160903930662, 0.025125471115112305, 0.024963071823120117, 0.02489958381652832, 0.024997888565063478, 0.02514486312866211, 0.02490969657897949, 0.024541791915893556, 0.024676448822021486, 0.024825759887695312, 0.0246824951171875, 0.024839712142944337, 0.025440576553344727, 0.025059488296508788, 0.0248668155670166, 0.024772607803344726, 0.02474188804626465, 0.025114656448364258, 0.02501398468017578, 0.02488470458984375, 0.024961471557617188, 0.025121023178100586, 0.025779647827148436, 0.02519011116027832, 0.025138111114501954, 0.02494054412841797, 0.025044511795043946, 0.02514921569824219, 0.024932416915893554, 0.025118719100952147, 0.024919679641723633, 0.024867328643798828, 0.02528486442565918, 0.025082111358642578, 0.024903936386108397, 0.024858495712280274, 0.024907648086547853, 0.024860671997070313, 0.025336992263793944, 0.032938720703125, 0.025888383865356444, 0.02482431983947754, 0.025161727905273438, 0.025126815795898438, 0.02518966484069824, 0.02562928009033203, 0.02511039924621582, 0.024897216796875, 0.025049951553344725, 0.025067327499389648, 0.02514761543273926, 0.025362207412719728, 0.025430015563964844, 0.02516377639770508, 0.02511177635192871, 0.024976160049438475, 0.025340160369873046, 0.024929759979248046, 0.024701215744018554, 0.026330144882202148, 0.025335840225219727, 0.025216991424560548, 0.025286943435668945, 0.024998592376708983, 0.024953983306884767, 0.02490012741088867, 0.024867168426513674, 0.025257984161376954, 0.02511369514465332, 0.025032800674438478, 0.025727807998657228, 0.025418783187866213, 0.024769344329833985, 0.02545680046081543, 0.025143392562866212, 0.024827199935913084, 0.025082271575927736, 0.024887487411499022, 0.02544041633605957, 0.025481056213378907, 0.025587648391723634, 0.025083967208862305, 0.025010175704956054, 0.024840192794799806, 0.024976863861083984, 0.024885791778564453, 0.024799232482910157, 0.02504457664489746, 0.02499420738220215, 0.024876928329467772, 0.02476038360595703, 0.025110591888427736, 0.024797183990478516, 0.024604480743408205, 0.02484217643737793, 0.0251712646484375, 0.024889375686645506, 0.024791967391967772, 0.025137344360351564, 0.025302848815917968, 0.024651584625244142, 0.024677888870239258, 0.024836799621582032, 0.025341951370239257, 0.02503219223022461, 0.024903360366821288, 0.02483692741394043, 0.024854143142700194, 0.024635391235351564, 0.02521126365661621, 0.02470911979675293, 0.025142431259155273, 0.024885696411132814, 0.02501171112060547, 0.025582271575927733, 0.02493052864074707, 0.025403295516967773, 0.025154848098754883, 0.024865087509155274, 0.024948640823364256, 0.024845056533813477, 0.02493401527404785, 0.0250534725189209, 0.025325151443481447, 0.02514998435974121, 0.025237119674682618, 0.025092319488525392, 0.02493417549133301, 0.025127103805541992, 0.02496291160583496, 0.02531715202331543, 0.025348480224609376, 0.02518550491333008, 0.025254688262939452, 0.025040767669677735, 0.025188480377197266, 0.025186304092407227, 0.02588057518005371, 0.025143199920654297, 0.025130943298339845, 0.02489593505859375, 0.024989408493041994, 0.02481724739074707, 0.02478223991394043, 0.02512179183959961, 0.024827903747558593, 0.025163520812988283, 0.024725664138793946, 0.024748128890991213, 0.024955968856811523, 0.024880064010620116, 0.02507980728149414, 0.02506287956237793, 0.024838911056518555, 0.024927104949951172, 0.024984479904174805, 0.02488047981262207, 0.024617919921875, 0.025210592269897462, 0.024899839401245117, 0.024730432510375978, 0.024793472290039063, 0.024660255432128905, 0.02482204818725586, 0.025013952255249022, 0.02492243194580078, 0.024836095809936523, 0.02495692825317383, 0.024944639205932616, 0.02496099281311035, 0.024827680587768554, 0.02485443115234375, 0.024841888427734375, 0.025443008422851562, 0.032290817260742184, 0.024929439544677735, 0.02502332878112793, 0.02549504089355469, 0.024990207672119142, 0.02497769546508789, 0.025049535751342774, 0.024949024200439453, 0.02490547180175781, 0.024938751220703125, 0.024651264190673827, 0.02504550361633301, 0.025042335510253907, 0.024754783630371095, 0.025016128540039064, 0.024920223236083984, 0.025400991439819335, 0.025084447860717774, 0.024797023773193358, 0.02498262405395508, 0.02519343948364258, 0.02485446357727051, 0.024880800247192383, 0.02461311912536621, 0.024821855545043944, 0.02457804870605469, 0.024626399993896483, 0.024868736267089842, 0.024611743927001953, 0.025001983642578125, 0.024821760177612305, 0.02465996742248535, 0.024641536712646486, 0.02488652801513672, 0.025173887252807618, 0.029971296310424805, 0.025409568786621095, 0.02494259262084961, 0.025026559829711914, 0.02497331237792969, 0.024814624786376954, 0.024761407852172852, 0.02462224006652832, 0.024563583374023437, 0.024527744293212892, 0.024641088485717774, 0.024864351272583008, 0.02480009651184082, 0.025092096328735353, 0.025087743759155273, 0.024889856338500976, 0.025068735122680662, 0.025045568466186524, 0.024776031494140625, 0.024777280807495118, 0.02454537582397461, 0.02454035186767578, 0.024599615097045897, 0.02495782470703125, 0.02475289535522461, 0.025030784606933594, 0.024983039855957033, 0.02482022476196289, 0.02503651237487793, 0.024979391098022462, 0.02466009521484375, 0.024653823852539062, 0.024562976837158204, 0.025106176376342774, 0.02523347282409668, 0.029853887557983398, 0.024925792694091797, 0.024621248245239258, 0.024789247512817383, 0.024777727127075197, 0.025057600021362304, 0.02473846435546875, 0.02536787223815918, 0.024517152786254885, 0.024652128219604493, 0.024667871475219726, 0.024477792739868165, 0.024481952667236326, 0.02456150436401367, 0.024626272201538086, 0.025994144439697265, 0.025179967880249024, 0.024845855712890625, 0.02858083152770996, 0.02495859146118164, 0.024837215423583983, 0.024681087493896484, 0.024977760314941408, 0.026129728317260743, 0.025045215606689455, 0.02501452827453613, 0.025038944244384766, 0.0246560001373291, 0.02474380874633789, 0.024578176498413085, 0.026822656631469727, 0.02494611167907715, 0.025166528701782227, 0.024844160079956056, 0.0260578556060791, 0.025019039154052736, 0.024785120010375975, 0.02485830307006836, 0.025018720626831054, 0.024838111877441407, 0.025116672515869142, 0.02519251251220703, 0.024879264831542968, 0.02553628730773926, 0.025049087524414062, 0.02571673583984375, 0.02508297538757324, 0.024973567962646485, 0.025493568420410156, 0.02604092788696289, 0.025024736404418945, 0.025226367950439452, 0.025285280227661133, 0.02480496025085449, 0.0247238712310791, 0.02495692825317383, 0.025132896423339844, 0.0248505916595459, 0.02473779106140137, 0.024762367248535155, 0.024963071823120117, 0.025292415618896485, 0.02506380844116211, 0.025135103225708007, 0.025118719100952147, 0.02485990333557129, 0.02506604766845703, 0.02521660804748535, 0.025207103729248045, 0.025168127059936523, 0.025273984909057617, 0.025087871551513673, 0.02481612777709961, 0.02497337532043457, 0.0250634880065918, 0.025216960906982423, 0.025113792419433595, 0.024883232116699218, 0.0246115837097168, 0.02466739273071289, 0.024478464126586913, 0.024407808303833007, 0.02425267219543457, 0.024276607513427733, 0.024645536422729493, 0.024333919525146484, 0.024520639419555665, 0.024716224670410156, 0.024965024948120116, 0.024813663482666014, 0.02469171142578125, 0.024904703140258787, 0.02486419105529785, 0.024748512268066406, 0.02478499221801758, 0.024698463439941407, 0.024636032104492188, 0.024581472396850587, 0.024746431350708007, 0.024627199172973634, 0.024782976150512694, 0.024815488815307617, 0.02465177536010742, 0.02465897560119629, 0.024888288497924804, 0.028604415893554686, 0.025133056640625, 0.024907007217407226, 0.02488175964355469, 0.02480143928527832, 0.024713216781616212, 0.02471900749206543, 0.02502627182006836, 0.024680448532104493, 0.024735679626464845, 0.02483475112915039, 0.024737247467041014, 0.024732351303100586, 0.024815040588378905, 0.02492163276672363, 0.024833023071289064, 0.024836063385009766, 0.024797088623046876, 0.025210880279541017, 0.025257984161376954, 0.0252043514251709, 0.02504742431640625, 0.024788480758666992, 0.025043455123901368, 0.02487215995788574, 0.02550163269042969, 0.02497417640686035, 0.02492620849609375, 0.025075168609619142, 0.025072160720825194, 0.024885183334350587, 0.0255916805267334, 0.02536854362487793, 0.025355552673339842, 0.025299840927124024, 0.025433216094970703, 0.025265087127685548, 0.024952032089233397, 0.025046848297119142, 0.02516067123413086, 0.024986751556396486, 0.024906623840332032, 0.025196544647216795, 0.0248319034576416, 0.0249836483001709, 0.02511680030822754, 0.025442176818847657, 0.02503811264038086, 0.025040672302246093, 0.02501750373840332, 0.02577782440185547, 0.032923809051513674, 0.02514851188659668, 0.025041215896606444, 0.025004608154296875, 0.024962528228759766, 0.025157632827758788, 0.025346591949462892, 0.025077472686767577, 0.024959264755249025, 0.024999935150146483, 0.02461212730407715, 0.02534844779968262, 0.02502079963684082, 0.02492006492614746, 0.026238624572753905, 0.026122592926025392, 0.025100288391113282, 0.0253439998626709, 0.025030656814575194, 0.02484543991088867, 0.02491801643371582, 0.024928224563598632, 0.02484739112854004, 0.02522319984436035, 0.024932031631469728, 0.025071552276611328, 0.02493631935119629, 0.025365856170654295, 0.025350559234619142, 0.025348703384399415, 0.02491187286376953, 0.025090240478515626, 0.025577407836914062, 0.025495552062988282, 0.024995199203491213, 0.024858911514282225, 0.02507321548461914, 0.02521999931335449, 0.024977151870727538, 0.025010047912597658, 0.025659648895263672, 0.025075168609619142, 0.025080352783203124, 0.02489753532409668, 0.024807167053222657, 0.025059743881225584, 0.025208671569824218, 0.024985599517822265, 0.024983552932739257, 0.02471116828918457, 0.025002111434936525, 0.024851327896118165, 0.02489632034301758, 0.024811487197875976, 0.024778688430786133, 0.024870752334594726, 0.024662464141845704, 0.02475951957702637, 0.02468739128112793, 0.024757951736450196, 0.02515590476989746, 0.024735231399536133, 0.02476288032531738, 0.024698591232299803, 0.024729759216308593, 0.024799360275268554, 0.02482099151611328, 0.02494495964050293, 0.02512735939025879, 0.024672256469726563, 0.025169631958007813, 0.02481955146789551, 0.02474233627319336, 0.024803327560424804, 0.024627199172973634, 0.02469465637207031, 0.02503424072265625, 0.02488598442077637, 0.024922176361083983, 0.025315168380737305, 0.025004032135009766, 0.024743776321411132, 0.024750368118286133, 0.024786367416381835, 0.024883647918701172, 0.025116672515869142, 0.024737535476684572, 0.02482815933227539, 0.024851583480834962, 0.02475916862487793, 0.024802528381347656, 0.02489014434814453, 0.02494879913330078, 0.024851871490478517]",tokens/s,39.84911401804317,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 344.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 66.12 MiB is free. Process 158867 has 14.67 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 1.71 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 354, in __init__ self.v_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 155967 has 14.74 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 90.39 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 353, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 163557 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,899.64544,15848.046592,0.0,15445.52448,15444.76416,s,1,7.5738876953125,7.5738876953125,0.0,7.5738876953125,7.5738876953125,7.5738876953125,7.5738876953125,[7.5738876953125],,kWh,9.61040027499583e-06,1.0395666691662227e-06,5.349170946002124e-06,1.599913789016418e-05,,MB,1235.755008,16051.470336,0.0,15638.462464,15617.135104,s,10,12.308637084960939,1.2308637084960936,0.0030884243436946337,1.23214453125,1.2340649658203124,1.2341809814453126,1.2342737939453126,"[1.2240340576171875, 1.226977783203125, 1.2298043212890626, 1.232192626953125, 1.232096435546875, 1.2299013671875, 1.2324566650390625, 1.232837646484375, 1.2342969970703126, 1.2340391845703125]",tokens/s,207.9840344897231,kWh,3.594379752541651e-05,3.964142869908809e-06,2.3885907997599905e-05,6.379384839292521e-05,tokens/kWh,4012926.1119852834,MB,1273.094144,16053.567488,0.0,15640.559616,15627.418112,s,10,40.06385961914062,4.006385961914062,0.003115739604579235,4.006652221679687,4.0103304443359375,4.010727600097656,4.011045324707031,"[4.0003583984375, 4.001973388671875, 4.011124755859375, 4.00795556640625, 4.0102421875, 4.00684521484375, 4.00569580078125, 4.006459228515625, 4.00719384765625, 4.00601123046875]",tokens/s,15.72489535429122,kWh,0.00011707049495458458,1.2913642619896471e-05,7.77648677673999e-05,0.00020774900534188093,tokens/kWh,303250.54936520354,,s,630,40.060713191986096,0.06358843363807315,0.00029104791481328915,0.06356172752380371,0.06389447937011719,0.06400498123168945,0.06475618728637696,"[0.06477043151855469, 0.0632279052734375, 0.06309628677368163, 0.06298268890380859, 0.0629043197631836, 0.0629389762878418, 0.06310927963256836, 0.06324192047119141, 0.06321369552612305, 0.06324652862548828, 0.0633979835510254, 0.06325190353393555, 0.0633730239868164, 0.06335929489135741, 0.06366867065429688, 0.06349824142456055, 0.06339583969116211, 0.06341551971435547, 0.06328819274902343, 0.06314761734008789, 0.0631319694519043, 0.06338694381713868, 0.06346207809448243, 0.06323920059204101, 0.06324262237548828, 0.06323260879516601, 0.06345702362060547, 0.06333670425415039, 0.06343251037597657, 0.0635533103942871, 0.06348627090454101, 0.06337750244140625, 0.06336716842651367, 0.06336511993408203, 0.06334668731689454, 0.0636448974609375, 0.06352566528320312, 0.06355148696899414, 0.06343068695068359, 0.06340604782104492, 0.06331596755981446, 0.06352441787719726, 0.06357263946533204, 0.06374156951904297, 0.06370729446411133, 0.06388121414184571, 0.06355551910400391, 0.0638771858215332, 0.06363699340820313, 0.06405785369873047, 0.06390364837646484, 0.063631103515625, 0.06372796630859374, 0.0637663345336914, 0.0635368309020996, 0.06351923370361329, 0.06369004821777344, 0.06377951812744141, 0.0636866569519043, 0.06386073684692382, 0.06390579223632813, 0.06387916946411133, 0.06379852676391602, 0.06498633575439453, 0.0633476791381836, 0.06318080139160157, 0.06305791854858399, 0.06301900863647461, 0.06303948974609375, 0.06315008163452149, 0.0630797119140625, 0.06342704010009766, 0.06332220840454102, 0.06349635314941406, 0.06353100967407227, 0.06326867294311524, 0.06317200088500977, 0.06323712158203125, 0.06363727951049805, 0.06350812911987305, 0.06350064086914063, 0.06340950393676757, 0.06325315093994141, 0.06317660903930664, 0.06333052825927735, 0.06326464080810547, 0.06329344177246093, 0.06338150405883788, 0.06346752166748047, 0.06349532699584962, 0.06343270492553711, 0.06346575927734376, 0.06357651138305664, 0.06387702560424804, 0.0635107536315918, 0.06357196807861328, 0.06350438308715821, 0.06349641418457032, 0.06349391937255859, 0.06357756805419922, 0.06349203109741211, 0.06342716979980469, 0.06351193618774414, 0.0634332160949707, 0.06371340942382812, 0.06372700881958007, 0.06362992095947266, 0.0636080322265625, 0.06354745483398437, 0.06370377731323242, 0.0635880012512207, 0.06361084747314454, 0.06368499374389648, 0.06366336059570313, 0.06379801559448242, 0.06354108810424805, 0.06353526306152343, 0.06356528091430665, 0.06367081451416015, 0.06393654251098632, 0.06384185409545898, 0.06374851226806641, 0.06388115310668946, 0.06370240020751954, 0.06374675369262696, 0.06381494522094727, 0.06478438568115234, 0.06334054565429688, 0.06317846298217773, 0.06329987335205078, 0.06312140655517579, 0.06331990432739258, 0.06341238403320312, 0.06368051147460937, 0.06340403366088868, 0.06339782333374024, 0.06332012939453124, 0.06366617584228515, 0.06336886215209961, 0.06366604614257812, 0.06393920135498046, 0.063868896484375, 0.06366809463500976, 0.0638914566040039, 0.06334054565429688, 0.06321078491210938, 0.06329212951660156, 0.06343065643310547, 0.06378060913085938, 0.06374956893920898, 0.06355027389526367, 0.06366617584228515, 0.06346665573120117, 0.06370595169067383, 0.06370240020751954, 0.0640047378540039, 0.06383567810058594, 0.06379363250732421, 0.06346934509277344, 0.06360700988769531, 0.06361907196044922, 0.06349737548828124, 0.06375708770751953, 0.06352259063720703, 0.06359296035766601, 0.06392563247680665, 0.06370086288452148, 0.06352851104736328, 0.06366009521484375, 0.06369292831420899, 0.06387955093383789, 0.06395897674560547, 0.06401481628417968, 0.06361686325073242, 0.0636475830078125, 0.06369500732421875, 0.06388041687011718, 0.0638639030456543, 0.06405097961425782, 0.0638743667602539, 0.0637031021118164, 0.06345798492431641, 0.06344095993041993, 0.06372959899902343, 0.06373545455932617, 0.06426659393310546, 0.06413072204589844, 0.06389379119873047, 0.06361094284057617, 0.06470086669921875, 0.0634326400756836, 0.06348601531982422, 0.06347331237792969, 0.06350252914428711, 0.06326073455810546, 0.06299558258056641, 0.06301913452148437, 0.0631550407409668, 0.06346924972534179, 0.06356739044189454, 0.06376867294311524, 0.06376931381225585, 0.0637787857055664, 0.0638546257019043, 0.06345110321044922, 0.06383206558227539, 0.06347980880737304, 0.06340403366088868, 0.06329081726074219, 0.06322396850585937, 0.06324694442749024, 0.06315356826782227, 0.063912353515625, 0.06339993667602539, 0.0635143051147461, 0.06356806564331055, 0.06376271820068359, 0.06350991821289062, 0.06365027236938477, 0.06369891357421875, 0.06357401657104492, 0.06383369445800781, 0.06360924911499023, 0.06340198516845703, 0.06342041778564453, 0.06370300674438477, 0.0635076789855957, 0.06344534301757812, 0.06337993621826171, 0.06367027282714843, 0.06394060897827149, 0.06390579223632813, 0.06404710388183593, 0.06368051147460937, 0.06369279861450196, 0.06365401458740234, 0.06361689758300781, 0.06372755050659179, 0.06383417510986328, 0.06360905456542969, 0.06374745559692382, 0.06359395217895508, 0.06379206466674804, 0.0636416015625, 0.06376448059082031, 0.06372275161743164, 0.06369152069091796, 0.06378905487060547, 0.06376176071166992, 0.06375286483764649, 0.0639733772277832, 0.06381897735595703, 0.06491340637207031, 0.0633589744567871, 0.06315622329711915, 0.0633589744567871, 0.06329337692260742, 0.06327302551269531, 0.06346956634521485, 0.06326428985595703, 0.06322019195556641, 0.06323606491088868, 0.0632724494934082, 0.06356358337402344, 0.06360086441040039, 0.06384009552001953, 0.06380169677734375, 0.0635209617614746, 0.06354751968383789, 0.06343884658813477, 0.06328028869628906, 0.06336188888549804, 0.06323199844360351, 0.06349619293212891, 0.06350223922729492, 0.06352700805664062, 0.0636146240234375, 0.06349971389770508, 0.06360483169555664, 0.0636321907043457, 0.06358793640136719, 0.06354780960083008, 0.06359408187866211, 0.06352022552490234, 0.06384531021118164, 0.06388844680786133, 0.06431430053710938, 0.06384598541259766, 0.0634977912902832, 0.06343910217285156, 0.06350294494628907, 0.06343452835083008, 0.06351692962646484, 0.06384761428833008, 0.06382672119140625, 0.06393548965454102, 0.06429593658447266, 0.06394060897827149, 0.06375993728637695, 0.06352758407592773, 0.0635547218322754, 0.06364019012451172, 0.06345523071289062, 0.06377372741699219, 0.06390431976318359, 0.06373417663574218, 0.06385372924804687, 0.0638873291015625, 0.0637341423034668, 0.06390425491333007, 0.0637132797241211, 0.06405120086669921, 0.06376652908325195, 0.06377875137329102, 0.06458326721191407, 0.0647213134765625, 0.06320435333251953, 0.06320220947265626, 0.06298796844482422, 0.06308652877807618, 0.06347760009765625, 0.06337123107910156, 0.06420137786865235, 0.06399795150756836, 0.06357196807861328, 0.0635654411315918, 0.06332787322998047, 0.06326348876953125, 0.06338873672485351, 0.06363584136962891, 0.06352544021606445, 0.0634224624633789, 0.06336511993408203, 0.06359462356567383, 0.06333427047729492, 0.06316851043701172, 0.06379670333862304, 0.06348649597167969, 0.06340607833862305, 0.06326681518554687, 0.06354329681396484, 0.06353919982910156, 0.0633098258972168, 0.06356076812744141, 0.06365670394897462, 0.0635470085144043, 0.0635172462463379, 0.06341007995605469, 0.06347129440307617, 0.0636522560119629, 0.06365903854370117, 0.06397190475463867, 0.06400358581542968, 0.0636773452758789, 0.06343791961669921, 0.0635462417602539, 0.0635269432067871, 0.06343020629882813, 0.06351507186889649, 0.06348713684082032, 0.06359328079223633, 0.06359823989868164, 0.06386521530151368, 0.06366115188598633, 0.06375619125366211, 0.06381667327880859, 0.0638421745300293, 0.06381372833251953, 0.06400006103515625, 0.06395084762573242, 0.06373523330688477, 0.06370521545410156, 0.06358012771606446, 0.06346716690063477, 0.06363423919677734, 0.06414691162109375, 0.06379971313476562, 0.06374208068847656, 0.06478604888916016, 0.06335318374633789, 0.06308982467651367, 0.06310591888427734, 0.06312534332275391, 0.06326259231567383, 0.06326710510253906, 0.0632463035583496, 0.06338297653198242, 0.06324195098876953, 0.06338576126098633, 0.0633740463256836, 0.06337526321411133, 0.06344825744628907, 0.06354217529296875, 0.06348556900024414, 0.06345913696289063, 0.06375660705566406, 0.06349235153198242, 0.06332169723510742, 0.06320694351196289, 0.06322675323486328, 0.06321971130371094, 0.06332006454467773, 0.06364339065551758, 0.06364390563964843, 0.06348384094238281, 0.06348966217041016, 0.06355561447143555, 0.06360310363769531, 0.06371705627441407, 0.06357436752319336, 0.06357334518432617, 0.06374668884277344, 0.06350617599487304, 0.06353440093994141, 0.0634951057434082, 0.06355152130126954, 0.06338560104370117, 0.06334611129760742, 0.06335132980346679, 0.06352191925048828, 0.06370598220825195, 0.06366003036499024, 0.06378204727172851, 0.06375305557250976, 0.06374003219604492, 0.06400518035888672, 0.06393900680541992, 0.06361126327514649, 0.06369494247436523, 0.06374288177490234, 0.06387609481811524, 0.06364569473266601, 0.06375603103637695, 0.06402886199951172, 0.06392393493652344, 0.0637624626159668, 0.06368716812133789, 0.06388470458984374, 0.0639389762878418, 0.06386892700195312, 0.06417558288574218, 0.06503440093994141, 0.06330163192749023, 0.06320537567138672, 0.06334259033203125, 0.0631992301940918, 0.06332006454467773, 0.06316345596313476, 0.0631346549987793, 0.06322521591186524, 0.06334233474731445, 0.06326678466796876, 0.06315510559082031, 0.06362931060791016, 0.06359654235839844, 0.06367440032958985, 0.06360249710083007, 0.06381584167480468, 0.06361820983886719, 0.06343561553955078, 0.06363737487792968, 0.0634021110534668, 0.06361702346801758, 0.06349107360839844, 0.06354431915283203, 0.06339596939086914, 0.06356489562988281, 0.06357094573974609, 0.06349596786499023, 0.06350643157958985, 0.06353919982910156, 0.0636313591003418, 0.06354534530639648, 0.06388121414184571, 0.06360678482055664, 0.0634593276977539, 0.06342041778564453, 0.063467041015625, 0.0634372787475586, 0.06336012649536132, 0.06353804779052734, 0.06353513717651367, 0.06355145645141602, 0.06359244918823242, 0.06355558395385742, 0.06358838272094726, 0.06395872116088867, 0.06377020645141601, 0.0638421745300293, 0.06374483108520508, 0.06372351837158204, 0.0635247688293457, 0.06375638580322265, 0.06395286560058594, 0.063823486328125, 0.06373958587646485, 0.0638880958557129, 0.0636416015625, 0.06378496170043946, 0.06376857757568359, 0.0638230094909668, 0.0637322883605957, 0.06387535858154297, 0.06376857757568359, 0.06496864318847656, 0.06348601531982422, 0.0632874870300293, 0.06309049606323242, 0.06323199844360351, 0.06319071960449218, 0.06327526473999023, 0.06321075057983398, 0.06330995178222656, 0.06337760162353516, 0.0633218231201172, 0.0636014404296875, 0.0633590087890625, 0.06333436965942382, 0.06352668762207031, 0.06350460815429687, 0.06356480026245118, 0.06390067291259766, 0.06338764953613281, 0.0632116470336914, 0.06338870239257813, 0.06341718292236329, 0.0633807029724121, 0.06357276916503907, 0.06343884658813477, 0.06347689437866211, 0.06347644805908204, 0.06362464141845703, 0.06352928161621094, 0.06358848190307617, 0.06351283264160157, 0.0635781135559082, 0.06351788711547851, 0.06407376098632812, 0.06402333068847656, 0.06377641677856445, 0.06369702529907227, 0.06360444641113282, 0.06334310531616211, 0.06342841720581055, 0.06354457473754883, 0.06356268692016602, 0.06362112045288086, 0.06379520034790039, 0.06406348419189453, 0.06375628662109376, 0.0637825927734375, 0.06392863845825195, 0.06372351837158204, 0.06388940811157226, 0.06362108612060546, 0.06351628875732422, 0.06365129470825195, 0.06361388778686523, 0.0635863037109375, 0.06368566513061523, 0.06366499328613281, 0.06366016006469727, 0.0640260467529297, 0.0639060173034668, 0.06385811233520508, 0.06381779098510743, 0.06397423934936523, 0.064659423828125, 0.06331651306152344, 0.06322345733642579, 0.0631278076171875, 0.06312566375732422, 0.06340838241577149, 0.06324969482421874, 0.06329417419433593, 0.06333436965942382, 0.06344707107543945, 0.06339174270629883, 0.06340927886962891, 0.0633762550354004, 0.06395084762573242, 0.06341980743408203, 0.0634230728149414, 0.06353033447265626, 0.06347375869750976, 0.06336767959594726, 0.06331951904296874, 0.06318345642089844, 0.06328688049316407, 0.06336553573608399, 0.06337945556640626, 0.06343824005126954, 0.06346403121948242, 0.06350969696044922, 0.06355401611328125, 0.06353305435180664, 0.06357846450805664, 0.06366006469726562, 0.06360601425170899, 0.06350630569458007, 0.0635953598022461, 0.06332160186767578, 0.0634967041015625, 0.0634777603149414, 0.06348720169067383, 0.06358473587036133, 0.06353257751464844, 0.06352115249633788, 0.06371779251098633, 0.06375193786621093, 0.06370643234252929, 0.06375878524780274, 0.06370764923095704, 0.06368268966674805, 0.06372032165527344, 0.06382899093627929, 0.06378496170043946, 0.0637929916381836, 0.06359817504882813, 0.06351308822631836, 0.06358943939208984, 0.06385744094848633, 0.0638773422241211, 0.06372335815429687, 0.06394281768798828, 0.06375219345092774, 0.06415974426269531, 0.0642682876586914, 0.06383820724487305, 0.06419769287109375]",tokens/s,15.726130410629532,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,888.410112,7981.62944,0.0,7579.107328,7559.873536,s,1,7.98647900390625,7.98647900390625,0.0,7.98647900390625,7.98647900390625,7.98647900390625,7.98647900390625,[7.98647900390625],,kWh,6.595712491631881e-06,7.199241342627945e-07,2.9538912520027383e-06,1.0269527877897414e-05,,MB,1229.955072,8233.28768,0.0,7820.279808,7767.767552,s,10,6.223565368652343,0.6223565368652344,0.0019468524734183393,0.6230224914550782,0.6243068908691406,0.6243541595458985,0.6243919744873047,"[0.6199192504882812, 0.6198401489257812, 0.6220650634765625, 0.6189981079101563, 0.623899658203125, 0.6229447021484374, 0.6231002807617188, 0.62429638671875, 0.6244014282226562, 0.624100341796875]",tokens/s,411.3397784643731,kWh,1.8264466738802314e-05,2.014243567409359e-06,1.2190391696750152e-05,3.2469102002961826e-05,tokens/kWh,7884418.853858284,MB,1247.502336,8296.20224,0.0,7883.194368,7838.569472,s,10,21.115360107421875,2.1115360107421877,0.0035622630998414016,2.1121798095703124,2.1156560791015626,2.115872839355469,2.116046247558594,"[2.10785986328125, 2.10476904296875, 2.10962890625, 2.113976318359375, 2.11359765625, 2.108720703125, 2.110761962890625, 2.116089599609375, 2.11434814453125, 2.11560791015625]",tokens/s,29.836100203593503,kWh,6.143396183786433e-05,6.776353842029849e-06,4.075956732984901e-05,0.00010896988300974317,tokens/kWh,578141.393382675,,s,630,21.112289115905753,0.033511570025247246,0.00021202990223383422,0.0334955997467041,0.033730576324462885,0.033813448143005374,0.03433732643127442,"[0.03475308990478516, 0.03346022415161133, 0.033331199645996096, 0.033363967895507815, 0.03324668884277344, 0.033279777526855465, 0.03323161697387695, 0.03313868713378906, 0.0332347183227539, 0.03321673583984375, 0.03324262237548828, 0.03326617431640625, 0.0333427848815918, 0.03340265655517578, 0.03338489532470703, 0.03325939178466797, 0.03323555374145508, 0.033263614654541016, 0.0331794548034668, 0.03323305511474609, 0.033289375305175783, 0.03322150421142578, 0.033291553497314455, 0.033245918273925784, 0.03322867202758789, 0.033361377716064455, 0.03332361602783203, 0.03343366241455078, 0.03351551818847656, 0.03344998550415039, 0.033413120269775394, 0.03347824096679688, 0.033455745697021484, 0.03340774536132812, 0.033517601013183594, 0.03339878463745117, 0.033398624420166015, 0.03342147064208984, 0.03331686401367188, 0.03336806488037109, 0.03343113708496094, 0.03338396835327148, 0.033618175506591796, 0.03366156768798828, 0.03353545761108399, 0.03351375961303711, 0.033394943237304686, 0.03340201568603516, 0.03338940811157227, 0.03352764892578125, 0.033509536743164064, 0.03346432113647461, 0.033396736145019534, 0.03371417617797851, 0.03377907180786133, 0.033616382598876955, 0.03357913589477539, 0.034677886962890626, 0.03419020843505859, 0.033580158233642576, 0.03355123138427735, 0.0335093765258789, 0.0335175666809082, 0.03398665618896484, 0.03344976043701172, 0.03317558288574219, 0.03323923110961914, 0.033309696197509765, 0.033209342956542966, 0.03311001586914063, 0.03309568023681641, 0.0331096305847168, 0.0332435188293457, 0.03322265625, 0.033216129302978514, 0.03326809692382812, 0.033337345123291014, 0.03332710266113281, 0.0332163200378418, 0.03326995086669922, 0.033326656341552734, 0.03340332794189453, 0.03332707214355469, 0.03343772888183594, 0.03332815933227539, 0.033411998748779294, 0.033325119018554686, 0.03339443206787109, 0.033318241119384764, 0.03333827209472656, 0.033339038848876956, 0.03341756820678711, 0.03343564987182617, 0.033381889343261716, 0.03340502548217773, 0.0333922233581543, 0.03339299011230469, 0.03337673568725586, 0.033396736145019534, 0.033355777740478515, 0.03335372924804687, 0.03339206314086914, 0.03334105682373047, 0.033417728424072264, 0.033442241668701175, 0.03350527954101563, 0.033468414306640625, 0.033553825378417966, 0.033613502502441404, 0.03357174301147461, 0.03343942260742187, 0.033521984100341795, 0.03344793701171875, 0.03351347351074219, 0.033538047790527346, 0.033469760894775394, 0.033474369049072264, 0.033559425354003906, 0.033675262451171875, 0.03361177444458008, 0.03365052795410156, 0.03354000091552734, 0.03356902313232422, 0.03355356979370117, 0.033543006896972656, 0.03343155288696289, 0.034404319763183595, 0.03362412643432617, 0.033478046417236326, 0.03328470230102539, 0.033238208770751954, 0.0333276481628418, 0.03333148956298828, 0.033230846405029296, 0.03328745651245117, 0.033290977478027346, 0.03316326522827148, 0.0331960334777832, 0.03342540740966797, 0.03316121673583984, 0.033301727294921875, 0.0334813117980957, 0.033415359497070314, 0.033298431396484376, 0.03321760177612305, 0.03331987380981445, 0.03329193496704102, 0.033323360443115235, 0.03333660888671875, 0.03330326461791992, 0.033329151153564454, 0.03327590560913086, 0.03330047988891602, 0.033392894744873045, 0.03339238357543945, 0.03364044952392578, 0.03350924682617187, 0.033525886535644533, 0.033545856475830076, 0.03353974533081055, 0.033552478790283204, 0.033667713165283206, 0.033430816650390625, 0.033532638549804684, 0.03345568084716797, 0.033493247985839844, 0.03351366424560547, 0.03348438262939453, 0.03360195159912109, 0.033389598846435546, 0.03347964859008789, 0.03344076919555664, 0.03369062423706055, 0.03363840103149414, 0.033734657287597655, 0.03370393753051758, 0.033449440002441405, 0.03350582504272461, 0.03352576065063476, 0.033519615173339845, 0.03358662414550781, 0.03354272079467773, 0.033627391815185544, 0.03362054443359375, 0.03362015914916992, 0.03357900619506836, 0.033675006866455075, 0.03393468856811523, 0.03413904190063476, 0.03403523254394531, 0.03369827270507812, 0.03328160095214844, 0.0332476806640625, 0.03330252838134766, 0.03328348922729492, 0.03327238464355469, 0.033584415435791014, 0.03330335998535156, 0.03339443206787109, 0.03341043090820312, 0.03341379165649414, 0.03352912139892578, 0.0335711669921875, 0.03354268646240234, 0.03371523284912109, 0.03368601608276367, 0.03342793655395508, 0.03356447982788086, 0.03350486373901367, 0.03356528091430664, 0.03338649749755859, 0.03343360137939453, 0.03334143829345703, 0.03340662384033203, 0.033441120147705075, 0.03363094329833984, 0.033535743713378904, 0.03353654479980469, 0.033554431915283206, 0.03343360137939453, 0.03344998550415039, 0.03335987091064453, 0.0334516487121582, 0.033439617156982425, 0.033482593536376955, 0.03350390243530273, 0.0335093765258789, 0.03332502365112305, 0.03339811325073242, 0.033495742797851565, 0.033518913269042966, 0.03374713516235352, 0.03365119934082031, 0.03357900619506836, 0.03384867095947266, 0.0338070068359375, 0.03365273666381836, 0.03370332717895508, 0.03374883270263672, 0.03370880126953125, 0.033587200164794925, 0.03358924865722656, 0.03379334259033203, 0.03399135971069336, 0.03358835220336914, 0.03378396987915039, 0.03382550430297852, 0.03362332916259766, 0.03360636901855469, 0.03355020904541016, 0.03365871810913086, 0.03366940689086914, 0.03412432098388672, 0.0335175666809082, 0.03343974304199219, 0.03324313735961914, 0.0336130256652832, 0.03340358352661133, 0.03326985549926758, 0.0331960334777832, 0.033230846405029296, 0.03331436920166016, 0.033219009399414065, 0.03318163299560547, 0.03332716751098633, 0.033299934387207034, 0.033479198455810544, 0.03371782302856445, 0.033554752349853514, 0.03334476852416992, 0.0332718734741211, 0.03338931274414063, 0.033374271392822265, 0.03340848159790039, 0.03334812927246094, 0.03333244705200195, 0.033383201599121094, 0.03350233459472656, 0.034064960479736325, 0.033673057556152346, 0.03359996795654297, 0.0335810546875, 0.033546241760253906, 0.03468492889404297, 0.03346188735961914, 0.033791553497314455, 0.03360851287841797, 0.03357891082763672, 0.033374305725097655, 0.03353753662109375, 0.033511585235595706, 0.03346672058105469, 0.0335175666809082, 0.03347353744506836, 0.03358540725708008, 0.03352604675292969, 0.033613407135009765, 0.03364339065551758, 0.03351532745361328, 0.03356275177001953, 0.03356620788574219, 0.03351526260375977, 0.03365311813354492, 0.03374534225463867, 0.03363177490234375, 0.033761215209960935, 0.03362870407104492, 0.03356467056274414, 0.0335728645324707, 0.033691646575927735, 0.033680896759033206, 0.03375360107421875, 0.03375247955322266, 0.03371273422241211, 0.03367068862915039, 0.034264896392822264, 0.03362326431274414, 0.03341955184936524, 0.03330096054077149, 0.03332243347167969, 0.03331683349609375, 0.03328287887573242, 0.03329433441162109, 0.03337196731567383, 0.033271678924560545, 0.033194305419921875, 0.03322617721557617, 0.03324147033691406, 0.033261600494384765, 0.03318297576904297, 0.033457054138183596, 0.033377952575683593, 0.03329264068603516, 0.03334870529174805, 0.03330902481079102, 0.03325136184692383, 0.033291999816894534, 0.03324396896362305, 0.03329635238647461, 0.033459648132324216, 0.03337052917480469, 0.033382591247558595, 0.033517505645751955, 0.0337196159362793, 0.03356134414672852, 0.03361587142944336, 0.03353120040893555, 0.03343225479125977, 0.03344380950927734, 0.03351555252075195, 0.033443264007568356, 0.03337446212768555, 0.03347078323364258, 0.033476608276367184, 0.03341257476806641, 0.03347715377807617, 0.03358310317993164, 0.03340854263305664, 0.033452510833740234, 0.033389728546142576, 0.03357126235961914, 0.03346425628662109, 0.03351599884033203, 0.033527809143066405, 0.03339468765258789, 0.03345801544189453, 0.03353411102294922, 0.033546241760253906, 0.03361587142944336, 0.03351116943359375, 0.033465824127197265, 0.033716865539550785, 0.03373072052001953, 0.033808383941650394, 0.03371811294555664, 0.03375702285766601, 0.03381689453125, 0.03374678421020508, 0.03424256134033203, 0.03357900619506836, 0.03354355239868164, 0.03343552017211914, 0.0332702407836914, 0.033278240203857425, 0.033181697845458984, 0.033207584381103515, 0.033207008361816406, 0.03329596710205078, 0.033253185272216795, 0.03330108642578125, 0.033306625366210936, 0.03319753646850586, 0.03322675323486328, 0.03330831909179687, 0.03341945648193359, 0.033423328399658205, 0.03355231857299805, 0.033557281494140626, 0.03337420654296875, 0.03340233612060547, 0.0334398078918457, 0.033552864074707034, 0.033861183166503904, 0.03334537506103516, 0.033462879180908206, 0.03346745681762695, 0.03346912002563476, 0.03345843124389648, 0.033490528106689454, 0.03352313613891601, 0.03353472137451172, 0.033538272857666016, 0.03360671997070312, 0.0335799674987793, 0.03347251129150391, 0.03349417495727539, 0.03358342361450195, 0.03350582504272461, 0.03343974304199219, 0.033445022583007813, 0.03346915054321289, 0.03353945541381836, 0.03351532745361328, 0.03343356704711914, 0.03346857452392578, 0.033500991821289065, 0.03374950408935547, 0.03368806457519531, 0.03367935943603516, 0.0335994873046875, 0.03356800079345703, 0.033581825256347654, 0.0335830078125, 0.03366307067871094, 0.03354806518554688, 0.03373798370361328, 0.0336446418762207, 0.03363315200805664, 0.03360736083984375, 0.03372268676757813, 0.03367929458618164, 0.03451052856445312, 0.033661247253417965, 0.03341836929321289, 0.033325950622558595, 0.03346432113647461, 0.033452030181884765, 0.0331828498840332, 0.03323788833618164, 0.03337993621826172, 0.033341854095458985, 0.0333617935180664, 0.03335795211791992, 0.033379745483398435, 0.0334381103515625, 0.033532096862792966, 0.033422496795654295, 0.033494945526123046, 0.03347286224365234, 0.03350780868530273, 0.03342326354980469, 0.03346249771118164, 0.033414878845214845, 0.033401119232177735, 0.03344169616699219, 0.033486366271972656, 0.03381423950195313, 0.033880928039550784, 0.033834976196289064, 0.03377155303955078, 0.03369574356079102, 0.03367513656616211, 0.03364863967895508, 0.03368684768676758, 0.033516193389892576, 0.033478176116943356, 0.033538528442382816, 0.033514785766601565, 0.03348937606811524, 0.03349545669555664, 0.03362611389160156, 0.03357491302490234, 0.03355648040771484, 0.033576702117919924, 0.03365251159667969, 0.033585281372070314, 0.03360393524169922, 0.033708030700683594, 0.03376873779296875, 0.033466400146484374, 0.03350316619873047, 0.03358950424194336, 0.03368396759033203, 0.03368751907348633, 0.03359952163696289, 0.03360563278198242, 0.03381615829467773, 0.0336611213684082, 0.033642719268798825, 0.033955841064453124, 0.033955841064453124, 0.033810081481933596, 0.03376300811767578, 0.03377219009399414, 0.03464147186279297, 0.03365932846069336, 0.03366502380371094, 0.034361343383789066, 0.033933311462402346, 0.03332505416870117, 0.033253055572509765, 0.03333075332641602, 0.03326380920410156, 0.033421886444091796, 0.03346371078491211, 0.03334614562988281, 0.033471519470214844, 0.033403873443603516, 0.033361217498779294, 0.03326841735839844, 0.03333446502685547, 0.033399200439453124, 0.0334114875793457, 0.03341516876220703, 0.033398624420166015, 0.03338419342041016, 0.03341321563720703, 0.03362643051147461, 0.033457279205322266, 0.0334051513671875, 0.03333331298828125, 0.03370655822753906, 0.03369359970092774, 0.033775745391845705, 0.03353366470336914, 0.03353132629394531, 0.03359625625610352, 0.03352166366577149, 0.03341107177734375, 0.033454078674316406, 0.033463455200195315, 0.033450752258300784, 0.033390209197998046, 0.0334543342590332, 0.03344611358642578, 0.033560577392578124, 0.033455486297607424, 0.03352844619750977, 0.033546241760253906, 0.033470462799072266, 0.033552383422851564, 0.03350726318359375, 0.03357497787475586, 0.033587200164794925, 0.0335250244140625, 0.033562881469726566, 0.03374537658691406, 0.03381248092651367, 0.03369779205322266, 0.03379814529418945, 0.03373056030273437, 0.033698974609375, 0.03363721466064453, 0.033617919921875, 0.03368134307861328, 0.03370137786865234, 0.033849918365478515, 0.03427852630615234, 0.03366796875, 0.03336959838867187, 0.03346675109863281, 0.03357913589477539, 0.03358924865722656, 0.03338796615600586, 0.03334406280517578, 0.03344384002685547, 0.033372161865234375, 0.03330867385864258, 0.03335331344604492, 0.0334134407043457, 0.0334840316772461, 0.03332320022583008, 0.033503807067871094, 0.03351766586303711, 0.033533470153808594, 0.03339516830444336, 0.0333496322631836, 0.03338608169555664, 0.033505695343017575, 0.03350732803344727, 0.03356860733032226, 0.03365903854370117, 0.03350291061401367, 0.033562942504882814, 0.03355766296386719, 0.033608543395996095, 0.03382271957397461, 0.033724414825439454, 0.03363840103149414, 0.03356016159057617, 0.033565086364746095, 0.03357651138305664, 0.033620414733886717, 0.03346825790405274, 0.0335750732421875, 0.03361097717285156, 0.03373852920532226, 0.03379097747802735, 0.03347238540649414, 0.033603679656982424, 0.03358108901977539, 0.03366912078857422, 0.033594913482666015, 0.03362863922119141, 0.03359865570068359, 0.033530689239501955, 0.033570816040039066, 0.03374899291992187, 0.03354608154296875, 0.03366515350341797, 0.03388623809814453, 0.03361347198486328, 0.03368163299560547, 0.0337470703125, 0.03371417617797851, 0.03354214477539062, 0.03371212768554688, 0.033693695068359376, 0.033672672271728515, 0.033605438232421875]",tokens/s,29.84044015981976,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 786, in __init__ self.model = RecurrentGemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 660, in __init__ [RecurrentGemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 660, in [RecurrentGemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 490, in __init__ self.mlp_block = RecurrentGemmaMlp(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 472, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 142817 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 717, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 188.12 MiB is free. Process 37785 has 14.55 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.45 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 1195, in __init__ self.model = MixtralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in __init__ [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 755, in __init__ self.self_attn = MIXTRAL_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 349, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 72.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 38.12 MiB is free. Process 182326 has 14.70 GiB memory in use. Of the allocated memory 14.55 GiB is allocated by PyTorch, and 41.65 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,812.417024,12523.077632,0.0,12127.830016,12122.08896,s,1,7.2994873046875,7.2994873046875,0.0,7.2994873046875,7.2994873046875,7.2994873046875,7.2994873046875,[7.2994873046875],,kWh,1.1944894587497856e-05,1.2906058269482203e-06,6.215838306000091e-06,1.945133872044617e-05,,MB,1129.816064,12695.044096,0.0,12289.31072,12248.586752,s,10,11.432853271484374,1.1432853271484373,0.003098576912891139,1.1437304077148438,1.1467366577148437,1.146837567138672,1.1469182946777343,"[1.13678466796875, 1.139625, 1.1412708740234374, 1.1431728515625, 1.1437130126953126, 1.143747802734375, 1.144585205078125, 1.1467142333984375, 1.1469384765625, 1.1463011474609375]",tokens/s,223.9161073102467,kWh,3.350108923875007e-05,3.694341063623324e-06,2.2123045476200084e-05,5.9318475778573484e-05,tokens/kWh,4315687.4252063995,MB,1173.42208,12703.432704,0.0,12297.699328,12248.589312,s,10,33.81919946289062,3.381919946289062,0.002572435008086271,3.382077392578125,3.3849273437500003,3.3851793701171875,3.385380991210938,"[3.37895458984375, 3.37922119140625, 3.3785986328125, 3.38262890625, 3.383670166015625, 3.38152587890625, 3.379484130859375, 3.384813232421875, 3.384871337890625, 3.385431396484375]",tokens/s,18.628471696714495,kWh,9.873379739124999e-05,1.089143092125667e-05,6.570288589559988e-05,0.00017532811420810655,tokens/kWh,359326.285373844,,s,630,33.81635486221316,0.053676753749544656,0.00028610013942388234,0.05367704010009766,0.05391408386230469,0.05399905891418457,0.055077218322753904,"[0.05495600128173828, 0.053784576416015625, 0.05345059204101563, 0.05316624069213867, 0.05299609756469727, 0.052991519927978514, 0.053130943298339846, 0.05316793441772461, 0.05340208053588867, 0.053469345092773436, 0.05364944076538086, 0.053452129364013674, 0.05346198272705078, 0.05349929428100586, 0.053352161407470705, 0.053179264068603516, 0.05315584182739258, 0.05346303939819336, 0.053599361419677735, 0.05381177520751953, 0.05366815948486328, 0.05349990463256836, 0.05333606338500976, 0.05348556900024414, 0.05334000015258789, 0.05345862579345703, 0.053488094329833986, 0.05365119934082031, 0.05359231948852539, 0.05361423873901367, 0.053510494232177734, 0.05368832015991211, 0.053528575897216796, 0.05336403274536133, 0.053572288513183595, 0.053645313262939455, 0.05386608123779297, 0.05390582275390625, 0.053749088287353516, 0.05393436813354492, 0.05368051147460937, 0.05366579055786133, 0.053526527404785154, 0.05374959945678711, 0.05369241714477539, 0.05401206588745117, 0.05393936157226562, 0.053598751068115236, 0.05371907043457031, 0.054013729095458984, 0.05370230484008789, 0.053764289855957034, 0.05374736022949219, 0.053768417358398435, 0.053803455352783205, 0.05402159881591797, 0.053932350158691404, 0.05403910446166992, 0.053907455444335936, 0.05377433776855469, 0.053744895935058594, 0.05396950531005859, 0.05383184051513672, 0.054832416534423827, 0.05351532745361328, 0.05334614562988281, 0.05319641494750976, 0.05313766479492187, 0.05346460723876953, 0.05319097518920898, 0.053262401580810546, 0.05395814514160156, 0.05352483367919922, 0.05355295944213867, 0.053631553649902346, 0.05339910507202148, 0.053645278930664064, 0.05346966552734375, 0.053448318481445316, 0.05346342468261719, 0.0537245101928711, 0.053607070922851566, 0.053628929138183595, 0.05343148803710938, 0.05343110275268555, 0.05339340972900391, 0.05331148910522461, 0.05343139266967773, 0.05341686248779297, 0.05333340835571289, 0.053524192810058595, 0.053711776733398435, 0.05368761444091797, 0.0536459846496582, 0.05368627166748047, 0.053573696136474606, 0.0536165771484375, 0.05363097763061524, 0.05365760040283203, 0.053761184692382814, 0.053742431640625, 0.053937248229980465, 0.05370767974853516, 0.05350112152099609, 0.053456897735595706, 0.05333689498901367, 0.05341593551635742, 0.05351177597045898, 0.0536371841430664, 0.05365385437011719, 0.053651454925537106, 0.053754047393798826, 0.05389907073974609, 0.053835777282714846, 0.05384396743774414, 0.05366988754272461, 0.053929088592529296, 0.05394931030273437, 0.053892383575439455, 0.05420864105224609, 0.05401676940917969, 0.053827423095703125, 0.053940223693847655, 0.05376598358154297, 0.053733631134033205, 0.05384182357788086, 0.05520844650268555, 0.053714942932128903, 0.05346713638305664, 0.053260128021240236, 0.05318415832519531, 0.05330492782592773, 0.053156097412109374, 0.053682430267333985, 0.05339494323730469, 0.05366012954711914, 0.0536693115234375, 0.053482494354248046, 0.053348350524902347, 0.05353062438964844, 0.05345014572143555, 0.053559902191162106, 0.05342972946166992, 0.0535700798034668, 0.0537242546081543, 0.05374863815307617, 0.05347087860107422, 0.05345724868774414, 0.05344460678100586, 0.0533831672668457, 0.0534835205078125, 0.05330739212036133, 0.0535327033996582, 0.053762016296386717, 0.053597312927246094, 0.05369331359863281, 0.053583774566650394, 0.05375705718994141, 0.053465312957763675, 0.05362969589233398, 0.05357065582275391, 0.05365852737426758, 0.053819393157958986, 0.05367766571044922, 0.05378688049316406, 0.05364303970336914, 0.05351420974731445, 0.05360886383056641, 0.05351001739501953, 0.053602432250976564, 0.05360639953613281, 0.053663745880126956, 0.0538603515625, 0.05380441665649414, 0.05374835205078125, 0.05356492614746094, 0.053580097198486325, 0.05349596786499024, 0.05348969650268555, 0.05377024078369141, 0.053684223175048826, 0.05391299057006836, 0.053830047607421876, 0.05377657699584961, 0.05384313583374024, 0.053811710357666014, 0.05388320159912109, 0.053766143798828124, 0.05374771118164062, 0.05544809722900391, 0.05384396743774414, 0.05328486251831055, 0.05330739212036133, 0.05304912185668945, 0.05308745574951172, 0.05308729553222656, 0.05318761444091797, 0.05328947067260742, 0.0533570556640625, 0.05341116714477539, 0.05359430313110351, 0.05348390579223633, 0.053422080993652345, 0.05330944061279297, 0.053561344146728515, 0.05324758529663086, 0.0536539192199707, 0.053722942352294925, 0.05370006561279297, 0.053510879516601564, 0.05355868911743164, 0.05354713439941406, 0.053628639221191404, 0.05357849502563477, 0.0541080322265625, 0.05369187164306641, 0.05366550445556641, 0.05366265487670899, 0.05375590515136719, 0.053698558807373044, 0.05364432144165039, 0.05362502288818359, 0.053531425476074215, 0.05360639953613281, 0.05382758331298828, 0.053678081512451174, 0.05389311981201172, 0.05396480178833008, 0.05393203353881836, 0.05373747253417969, 0.05389430236816406, 0.05364822387695312, 0.05374566268920898, 0.05373132705688476, 0.05375942230224609, 0.05382815933227539, 0.05382758331298828, 0.05373132705688476, 0.053838977813720705, 0.05380777740478516, 0.05376800155639649, 0.05373583984375, 0.05400502395629883, 0.053840351104736325, 0.05401558303833008, 0.054010528564453125, 0.053902942657470705, 0.053889278411865235, 0.05393423843383789, 0.053794654846191406, 0.05386387252807617, 0.0538746223449707, 0.0554106559753418, 0.05382368087768555, 0.053427040100097654, 0.05347225570678711, 0.05326182556152344, 0.053436321258544923, 0.0534218864440918, 0.05341059112548828, 0.05333401489257812, 0.05345836639404297, 0.05381792068481445, 0.05348716735839844, 0.05361081695556641, 0.053583518981933594, 0.053561824798583985, 0.05373491287231445, 0.0534984016418457, 0.05378249740600586, 0.05387468719482422, 0.053852161407470706, 0.053704414367675785, 0.05369785690307617, 0.053596351623535154, 0.053617408752441406, 0.0536781120300293, 0.05357158279418945, 0.053645313262939455, 0.053556385040283205, 0.0536126708984375, 0.05380124664306641, 0.053623233795166016, 0.05365923309326172, 0.053676414489746097, 0.053685886383056644, 0.05365187072753906, 0.05381497573852539, 0.05378889465332031, 0.05379679870605469, 0.05389459228515625, 0.053725921630859375, 0.05366534423828125, 0.05367801666259766, 0.05370300674438477, 0.05370665740966797, 0.05364879989624023, 0.053658462524414065, 0.053626880645751954, 0.053642559051513675, 0.05359891128540039, 0.05358720016479492, 0.05376076889038086, 0.05372911834716797, 0.05383731079101563, 0.05401257705688477, 0.05399552154541016, 0.053907455444335936, 0.05390553665161133, 0.05377830505371094, 0.0537968635559082, 0.053816574096679684, 0.0536890869140625, 0.053704158782958984, 0.05390595245361328, 0.055509056091308594, 0.05405596923828125, 0.05329919815063477, 0.053298145294189456, 0.053141502380371096, 0.05313324737548828, 0.05328287887573242, 0.05331763076782227, 0.0533414077758789, 0.05339215850830078, 0.05334220886230469, 0.05344460678100586, 0.053403297424316404, 0.053373279571533205, 0.053489376068115234, 0.05355548858642578, 0.053378944396972654, 0.05360652923583984, 0.05364303970336914, 0.05360038375854492, 0.05348486328125, 0.053504798889160154, 0.053387264251708984, 0.053456897735595706, 0.05327667236328125, 0.05347942352294922, 0.05409366226196289, 0.053575199127197264, 0.053492351531982424, 0.0535200309753418, 0.053727584838867186, 0.05376409530639648, 0.05356339263916016, 0.053460990905761716, 0.053491168975830075, 0.05372774505615235, 0.05361872100830078, 0.05366483306884766, 0.05392236709594726, 0.05383206558227539, 0.053628929138183595, 0.05372108840942383, 0.05350153732299805, 0.05357814407348633, 0.054063102722167966, 0.05382144165039063, 0.053857311248779294, 0.05386134338378906, 0.053768192291259766, 0.05376768112182617, 0.054008289337158205, 0.053823520660400394, 0.053889022827148435, 0.05380505752563477, 0.053816864013671875, 0.05398780822753906, 0.05404467010498047, 0.053933345794677734, 0.05397331237792969, 0.05397135925292969, 0.05389625549316406, 0.05391996765136719, 0.05391843032836914, 0.05531846237182617, 0.05389315032958984, 0.0534615364074707, 0.05349728012084961, 0.05315430450439453, 0.05328028869628906, 0.05352054214477539, 0.05336307144165039, 0.05332588958740234, 0.05345868682861328, 0.053402816772460934, 0.05339433670043945, 0.05358796691894531, 0.05351023864746094, 0.053429534912109375, 0.05358803176879883, 0.053354400634765625, 0.05359283065795899, 0.05366742324829102, 0.05374403381347656, 0.05368569564819336, 0.0536929931640625, 0.05359001541137695, 0.053441665649414063, 0.053296001434326175, 0.05349785614013672, 0.05359548950195313, 0.05357372665405274, 0.05351795196533203, 0.05366259384155273, 0.0535428466796875, 0.05342425537109375, 0.0535470085144043, 0.0535838737487793, 0.053626304626464845, 0.053762622833251957, 0.05368012619018555, 0.05389644622802734, 0.0537628173828125, 0.05374294281005859, 0.053596832275390624, 0.05370675277709961, 0.053610496520996094, 0.053575199127197264, 0.053635551452636716, 0.05380662536621094, 0.05375862503051758, 0.05370582580566406, 0.053602336883544925, 0.0537504653930664, 0.05367561721801758, 0.05362729644775391, 0.05371020889282226, 0.05378713607788086, 0.05379072189331055, 0.05386159896850586, 0.053738399505615236, 0.0537784309387207, 0.0537861442565918, 0.053807582855224606, 0.05370675277709961, 0.05374566268920898, 0.053782527923583984, 0.05525897598266601, 0.0540054702758789, 0.053416385650634765, 0.05335244750976562, 0.053255840301513674, 0.053373279571533205, 0.053364734649658206, 0.05324342346191406, 0.05346963119506836, 0.05339344024658203, 0.05336064147949219, 0.0533831672668457, 0.05348543930053711, 0.05351414489746094, 0.053298465728759764, 0.053545921325683594, 0.05346281433105469, 0.053604705810546875, 0.05385801696777344, 0.05376416015625, 0.0536003532409668, 0.05374771118164062, 0.05359791946411133, 0.05368592071533203, 0.05364595031738281, 0.05363091278076172, 0.053657665252685546, 0.05381324768066406, 0.05364096069335938, 0.0536352653503418, 0.0537426872253418, 0.05385468673706055, 0.053723648071289064, 0.053694465637207034, 0.05368822479248047, 0.05380684661865234, 0.05364284896850586, 0.05376233673095703, 0.053752288818359376, 0.053743614196777346, 0.053743488311767576, 0.05385023880004883, 0.05374771118164062, 0.053823520660400394, 0.05392995071411133, 0.053691841125488284, 0.05390095901489258, 0.05385852813720703, 0.05373974227905273, 0.05390383911132812, 0.053806175231933595, 0.053894046783447266, 0.05378047943115234, 0.05391360092163086, 0.053800960540771485, 0.05385420989990235, 0.05386441421508789, 0.053878814697265624, 0.05392947387695313, 0.054104576110839846, 0.053850112915039064, 0.054106113433837894, 0.054091007232666015, 0.0550563850402832, 0.05373721694946289, 0.053444862365722656, 0.05348726272583008, 0.0535002555847168, 0.05351619338989258, 0.05347452926635742, 0.0535316162109375, 0.05334230422973633, 0.05328774261474609, 0.05359254455566406, 0.053590560913085936, 0.05337702560424805, 0.05365756988525391, 0.05352860641479492, 0.053528575897216796, 0.053423263549804687, 0.05366460800170898, 0.05386240005493164, 0.05399052810668945, 0.05366806411743164, 0.05364553451538086, 0.053688159942626955, 0.05376800155639649, 0.053596126556396485, 0.05370553588867188, 0.05354086303710937, 0.05362076950073242, 0.0536486701965332, 0.05386710357666016, 0.05376214218139649, 0.05376755142211914, 0.05368691253662109, 0.05370675277709961, 0.05368832015991211, 0.053790401458740235, 0.05386214447021485, 0.05390959930419922, 0.053823966979980466, 0.05384806442260742, 0.053823486328125, 0.053768192291259766, 0.05367603302001953, 0.05383990478515625, 0.05361043167114258, 0.05362662506103515, 0.053655742645263675, 0.05367958450317383, 0.053795265197753905, 0.0536451187133789, 0.05393446350097656, 0.05389923095703125, 0.05376121520996094, 0.053895103454589845, 0.054001953125, 0.05392652893066406, 0.05375356674194336, 0.053823486328125, 0.05378047943115234, 0.053982494354248046, 0.05390361785888672, 0.05382944107055664, 0.05381216049194336, 0.05508572769165039, 0.05351628875732422, 0.053292991638183594, 0.053448768615722654, 0.0531599349975586, 0.053352001190185544, 0.05328326416015625, 0.05325209426879883, 0.05356505584716797, 0.05363750457763672, 0.05351116943359375, 0.0538263053894043, 0.0534653434753418, 0.05360351943969727, 0.05360108947753906, 0.05351628875732422, 0.0534854736328125, 0.05369251251220703, 0.05362185668945312, 0.05373023986816406, 0.05360022354125977, 0.05363916778564453, 0.053548160552978515, 0.05368681716918945, 0.05357603073120117, 0.05383782577514649, 0.05398483276367187, 0.053844417572021484, 0.053714942932128903, 0.053984512329101564, 0.05382406234741211, 0.05377862548828125, 0.05389926528930664, 0.05377766418457031, 0.0537259521484375, 0.05377795028686523, 0.05363091278076172, 0.05368681716918945, 0.053907455444335936, 0.053866497039794924, 0.05379020690917969, 0.053799297332763674, 0.05369401550292969, 0.05387286376953125, 0.0538353271484375, 0.05376470565795898, 0.05388054275512695, 0.053758430480957034, 0.05378416061401367, 0.05396284866333008, 0.05388307189941406, 0.05383785629272461, 0.053768287658691405, 0.05382067108154297, 0.05376486587524414, 0.053788608551025394, 0.05379283142089844, 0.0539791374206543, 0.05387257766723633, 0.053682239532470706, 0.053768062591552736, 0.0541267204284668, 0.05395574569702148]",tokens/s,18.630038706625083,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 510, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 201, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 94956 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.907392,806.289408,0.0,411.041792,391.374848,s,1,7.3405439453125,7.3405439453125,0.0,7.3405439453125,7.3405439453125,7.3405439453125,7.3405439453125,[7.3405439453125],,kWh,4.876233570833695e-06,5.308025702608459e-07,1.983334919991586e-06,7.3903710610861275e-06,,MB,1112.3712,879.689728,0.0,473.956352,454.832128,s,14,0.35395107078552246,0.025282219341823033,0.0006675251077587096,0.02511552047729492,0.025283087921142578,0.02612307538986206,0.027355866603851316,"[0.027664064407348633, 0.025009151458740234, 0.0250250244140625, 0.025128000259399413, 0.02510304069519043, 0.02513587188720703, 0.024959232330322264, 0.02525923156738281, 0.024972927093505858, 0.025055456161499023, 0.0250283203125, 0.02516223907470703, 0.02515519905090332, 0.025293312072753905]",tokens/s,10125.69333960776,kWh,8.802714563880312e-07,9.703644657400391e-08,5.748766312530154e-07,1.5521845342150508e-06,tokens/kWh,164928843.41838953,MB,1139.023872,906.952704,0.0,501.219328,454.834688,s,14,10.088037719726563,0.7205741228376116,0.007503769931621452,0.7197563781738281,0.7292069396972656,0.7304784545898437,0.7312623901367188,"[0.7006243286132813, 0.7314583740234375, 0.7256400146484375, 0.7224078979492188, 0.7274712524414062, 0.72591748046875, 0.714115234375, 0.7185491943359374, 0.71629638671875, 0.7185131225585938, 0.7195335083007812, 0.7299508056640625, 0.719979248046875, 0.7175808715820312]",tokens/s,87.43028371863649,kWh,2.0470741543611325e-05,2.2576304109255806e-06,8.849634876604323e-06,3.157800683114122e-05,tokens/kWh,1995059.4202124062,,s,882,10.07979100131987,0.011428334468616649,0.00034070346924705217,0.011425951957702636,0.01164567985534668,0.011714222478866578,0.012337450504302967,"[0.011187392234802246, 0.01128428840637207, 0.011154335975646973, 0.011010080337524415, 0.010950079917907715, 0.011192223548889161, 0.01102511978149414, 0.01102400016784668, 0.010981760025024413, 0.011669504165649413, 0.011978719711303711, 0.013942432403564454, 0.011230751991271973, 0.011288736343383789, 0.011254464149475098, 0.011176959991455078, 0.01110102367401123, 0.011054783821105958, 0.012296671867370606, 0.011003168106079101, 0.010989760398864746, 0.010976960182189941, 0.010957216262817383, 0.010914239883422851, 0.011362303733825683, 0.011315199851989746, 0.010932288169860839, 0.010882623672485351, 0.010912287712097167, 0.010935232162475586, 0.011019167900085449, 0.010912960052490234, 0.010942815780639648, 0.01100870418548584, 0.011087648391723633, 0.010894559860229492, 0.010933247566223145, 0.010874912261962891, 0.010816800117492675, 0.010888895988464355, 0.010926464080810546, 0.010907648086547851, 0.010867232322692872, 0.010972928047180176, 0.010893535614013673, 0.010917792320251465, 0.010840031623840332, 0.010933823585510254, 0.010846688270568847, 0.01088652801513672, 0.010914048194885255, 0.010934623718261718, 0.01090294361114502, 0.010934911727905273, 0.010950655937194824, 0.011067647933959961, 0.011034367561340332, 0.010991616249084473, 0.010966015815734862, 0.011037152290344238, 0.011231743812561035, 0.0112326078414917, 0.011455136299133301, 0.011290047645568848, 0.01157363224029541, 0.011560928344726563, 0.011636768341064453, 0.011632479667663575, 0.011688096046447754, 0.011600192070007325, 0.011626175880432129, 0.011542752265930175, 0.011669376373291015, 0.011519904136657716, 0.011464768409729004, 0.011421631813049317, 0.01149289608001709, 0.01174176025390625, 0.011482560157775878, 0.01153395175933838, 0.011540863990783691, 0.011558752059936523, 0.011541119575500488, 0.01148470401763916, 0.011463135719299316, 0.011498496055603028, 0.01141427230834961, 0.01158176040649414, 0.011614144325256348, 0.011487232208251954, 0.011552767753601074, 0.012955615997314453, 0.012981951713562012, 0.011619903564453126, 0.01168012809753418, 0.011649439811706543, 0.012210176467895508, 0.01161404800415039, 0.011614368438720704, 0.011426048278808594, 0.01187228775024414, 0.01154428768157959, 0.011496512413024903, 0.011527104377746582, 0.011610367774963378, 0.011675392150878906, 0.011970815658569335, 0.01173299217224121, 0.011515647888183593, 0.011440128326416015, 0.011456512451171874, 0.011669568061828613, 0.011499456405639649, 0.011447808265686036, 0.01148134422302246, 0.011630240440368653, 0.011630623817443848, 0.01154310417175293, 0.011495455741882325, 0.011386207580566406, 0.011491680145263672, 0.011349632263183593, 0.011459551811218261, 0.011456319808959961, 0.01133513641357422, 0.011241888046264649, 0.011032575607299805, 0.011427840232849122, 0.0115447998046875, 0.011566975593566894, 0.011640735626220703, 0.011513728141784668, 0.011403039932250976, 0.011448672294616699, 0.011531840324401856, 0.011629216194152832, 0.011405088424682618, 0.011272192001342773, 0.011266048431396485, 0.01133516788482666, 0.011606687545776368, 0.011623776435852051, 0.011602432250976562, 0.011648320198059081, 0.011510463714599609, 0.011593728065490723, 0.011746527671813965, 0.011553728103637695, 0.011612000465393067, 0.011527199745178223, 0.011545568466186524, 0.011470720291137696, 0.011489567756652832, 0.011444064140319824, 0.0113536958694458, 0.011292703628540039, 0.011383071899414062, 0.011434080123901368, 0.011660544395446778, 0.011631360054016113, 0.011698176383972168, 0.011501567840576172, 0.011489279747009277, 0.011423680305480957, 0.01154047966003418, 0.011556927680969237, 0.011454463958740235, 0.011410783767700196, 0.01142585563659668, 0.011599679946899415, 0.011473600387573241, 0.011390368461608886, 0.011399168014526367, 0.011524736404418945, 0.01147116756439209, 0.0114749755859375, 0.011458271980285645, 0.01151910400390625, 0.011521023750305176, 0.011621536254882813, 0.011596608161926269, 0.011699616432189941, 0.011405599594116212, 0.011509119987487793, 0.011631520271301269, 0.011700160026550293, 0.011671551704406738, 0.011577343940734864, 0.011525664329528808, 0.011155008316040039, 0.011452863693237305, 0.011413503646850585, 0.011390975952148438, 0.0117391357421875, 0.011433792114257813, 0.011395584106445313, 0.011449983596801758, 0.011585599899291992, 0.011503616333007812, 0.011351200103759765, 0.011457375526428223, 0.011540831565856934, 0.011529855728149414, 0.01152012825012207, 0.01148470401763916, 0.011538016319274902, 0.011522720336914063, 0.011432064056396485, 0.011393024444580077, 0.011362303733825683, 0.011294719696044921, 0.011253824234008789, 0.011340831756591797, 0.011393535614013671, 0.011483551979064942, 0.011493375778198242, 0.01132953643798828, 0.011302016258239745, 0.011290656089782716, 0.011190688133239746, 0.011202048301696778, 0.011407936096191406, 0.01164352035522461, 0.011464127540588379, 0.011445664405822753, 0.01151478385925293, 0.011559200286865235, 0.011677408218383789, 0.01147052764892578, 0.011540127754211425, 0.011597503662109375, 0.011625503540039063, 0.011628576278686523, 0.011564959526062011, 0.011460288047790528, 0.011513567924499512, 0.011544768333435058, 0.011425503730773926, 0.01152070426940918, 0.01142745590209961, 0.011405856132507323, 0.011462400436401367, 0.011374143600463868, 0.011401663780212403, 0.011468480110168457, 0.011552255630493164, 0.011510944366455078, 0.0115217924118042, 0.011342016220092774, 0.011411264419555664, 0.01157475185394287, 0.011516448020935058, 0.011628543853759766, 0.013717184066772461, 0.011911487579345703, 0.011515904426574707, 0.011476415634155274, 0.011471103668212891, 0.01145248031616211, 0.01158351993560791, 0.011587712287902832, 0.01161843204498291, 0.011538399696350097, 0.011611840248107911, 0.011534111976623535, 0.011663935661315917, 0.01159119987487793, 0.011714240074157715, 0.01161292839050293, 0.011497792243957519, 0.011520031929016114, 0.01140003204345703, 0.011350879669189453, 0.0116627836227417, 0.011468607902526856, 0.01151369571685791, 0.011412320137023925, 0.011347711563110352, 0.011539775848388672, 0.01151478385925293, 0.01142307186126709, 0.011346688270568847, 0.011347935676574708, 0.011470656394958496, 0.01146236801147461, 0.01150819206237793, 0.011436032295227052, 0.01145241641998291, 0.011460448265075683, 0.011327360153198242, 0.011418047904968262, 0.011382368087768555, 0.01136076831817627, 0.011364095687866211, 0.011499808311462403, 0.011531871795654297, 0.01148969554901123, 0.011503328323364257, 0.011501855850219726, 0.0114518404006958, 0.011479328155517578, 0.011526111602783204, 0.011524127960205079, 0.011575296401977539, 0.011652768135070801, 0.011631135940551757, 0.011695327758789063, 0.01159228801727295, 0.011429887771606445, 0.011500896453857422, 0.01155958366394043, 0.011505663871765137, 0.01140940761566162, 0.011296319961547851, 0.011322943687438965, 0.011124128341674805, 0.011658207893371582, 0.011484512329101563, 0.011401568412780762, 0.011434144020080566, 0.011333791732788086, 0.011294719696044921, 0.012070624351501465, 0.011448512077331543, 0.011585280418395997, 0.011502079963684082, 0.011352160453796386, 0.0113570556640625, 0.01141823959350586, 0.011584063529968262, 0.011595775604248047, 0.011562463760375976, 0.011581727981567383, 0.011623616218566895, 0.01154047966003418, 0.011493120193481445, 0.01152444839477539, 0.01154105567932129, 0.011789983749389648, 0.01166585636138916, 0.011690079689025879, 0.011476479530334472, 0.011508128166198731, 0.011454591751098633, 0.011439935684204101, 0.011477055549621583, 0.011583488464355468, 0.011573023796081543, 0.011528127670288086, 0.011341152191162109, 0.011378879547119141, 0.011806528091430664, 0.011367487907409667, 0.011396703720092773, 0.011426079750061035, 0.011646976470947265, 0.012785216331481934, 0.011399295806884766, 0.011305024147033691, 0.011204863548278808, 0.011456640243530273, 0.011435903549194337, 0.01124687957763672, 0.01163747215270996, 0.011558912277221679, 0.011531904220581054, 0.011328895568847657, 0.011306303977966309, 0.011480768203735352, 0.011487104415893555, 0.011569503784179688, 0.011489055633544921, 0.01159926414489746, 0.0117990083694458, 0.011484992027282715, 0.011437824249267578, 0.011461183547973633, 0.011306271553039551, 0.011020383834838866, 0.011834495544433594, 0.011561599731445313, 0.011286751747131347, 0.011161536216735839, 0.011355487823486329, 0.011458239555358886, 0.01140220832824707, 0.01137052822113037, 0.01127830410003662, 0.01122713565826416, 0.011318400382995605, 0.011293567657470704, 0.011399168014526367, 0.011270496368408203, 0.011131775856018067, 0.011176735877990722, 0.01117734432220459, 0.011146271705627441, 0.011998815536499024, 0.011028608322143555, 0.011007648468017578, 0.011438303947448731, 0.011671551704406738, 0.01168716812133789, 0.01162668800354004, 0.011517631530761718, 0.01148630428314209, 0.011489055633544921, 0.011382399559020996, 0.011325535774230957, 0.01123750400543213, 0.011352224349975586, 0.011182368278503418, 0.01112179183959961, 0.011201120376586915, 0.011259455680847168, 0.011270112037658691, 0.011230815887451171, 0.01116044807434082, 0.011175423622131348, 0.01147481632232666, 0.011647616386413574, 0.011395071983337402, 0.011228192329406738, 0.011287520408630371, 0.011319231986999512, 0.011201919555664063, 0.011356863975524902, 0.011401439666748047, 0.011351840019226074, 0.011560959815979004, 0.01133743953704834, 0.011536928176879882, 0.01147980785369873, 0.011171039581298827, 0.01118505573272705, 0.0111494722366333, 0.011057439804077149, 0.010963104248046874, 0.010989151954650878, 0.011173695564270019, 0.011473119735717774, 0.01198265552520752, 0.011597791671752929, 0.011545023918151855, 0.011427231788635254, 0.011345727920532227, 0.011336480140686035, 0.011364480018615723, 0.011267104148864747, 0.011203071594238282, 0.011192192077636718, 0.011129311561584473, 0.011318559646606446, 0.011594207763671874, 0.011469056129455566, 0.011294719696044921, 0.01147935962677002, 0.011491007804870605, 0.011404831886291504, 0.011237824440002441, 0.011218015670776366, 0.011250335693359375, 0.011219584465026855, 0.011129695892333985, 0.011235967636108398, 0.011397184371948242, 0.011354240417480468, 0.011288576126098633, 0.011122271537780762, 0.01111900806427002, 0.011082847595214844, 0.011055968284606933, 0.010969152450561523, 0.01092403221130371, 0.011327263832092285, 0.011657024383544922, 0.011564736366271972, 0.01153536033630371, 0.011541664123535156, 0.011529024124145509, 0.011448224067687989, 0.011937952041625977, 0.01209926414489746, 0.011427840232849122, 0.012511296272277832, 0.011321120262145996, 0.011315360069274902, 0.011306912422180175, 0.011337408065795898, 0.011501983642578125, 0.011503968238830566, 0.011515551567077637, 0.011311103820800781, 0.011324704170227051, 0.011393759727478027, 0.011472895622253418, 0.01140940761566162, 0.0112576322555542, 0.011265536308288575, 0.011412192344665527, 0.011472415924072266, 0.011475104331970215, 0.011409728050231934, 0.011332991600036622, 0.011019136428833008, 0.011167743682861327, 0.011103263854980469, 0.01126307201385498, 0.011672960281372071, 0.011581695556640625, 0.011669055938720703, 0.011662015914916992, 0.011577343940734864, 0.011488415718078614, 0.011420512199401855, 0.01122713565826416, 0.011093376159667968, 0.011343903541564941, 0.011179807662963868, 0.01117471981048584, 0.011050335884094238, 0.011138943672180176, 0.01135696029663086, 0.01152409553527832, 0.011718463897705079, 0.011607904434204102, 0.01125158405303955, 0.011282848358154298, 0.01134598445892334, 0.01123145580291748, 0.011202527999877929, 0.011310912132263184, 0.011481087684631347, 0.011390975952148438, 0.011356160163879395, 0.011567104339599609, 0.011396575927734375, 0.011172479629516601, 0.011018176078796387, 0.010997376441955566, 0.011055711746215821, 0.011443679809570312, 0.011701567649841309, 0.011623456001281738, 0.011539999961853026, 0.01150614356994629, 0.011373855590820313, 0.011313983917236328, 0.011251744270324706, 0.011206463813781738, 0.01123737621307373, 0.011249024391174317, 0.011266752243041992, 0.01152627182006836, 0.01147475242614746, 0.011463744163513184, 0.011296799659729004, 0.011213600158691406, 0.01144547176361084, 0.011470815658569337, 0.011340543746948243, 0.011379167556762696, 0.01149510383605957, 0.011437824249267578, 0.011421759605407715, 0.011489503860473633, 0.011449503898620605, 0.011141119956970215, 0.011389984130859374, 0.011142047882080078, 0.011192383766174317, 0.011583200454711914, 0.011540639877319335, 0.011683903694152831, 0.01154054355621338, 0.011491328239440919, 0.011576800346374512, 0.01147548770904541, 0.01132755184173584, 0.011380672454833984, 0.011302911758422851, 0.011222463607788086, 0.011253567695617675, 0.011407487869262696, 0.011457216262817383, 0.011417792320251465, 0.011248415946960449, 0.01128060817718506, 0.011538271903991698, 0.011560959815979004, 0.011395456314086914, 0.011333312034606934, 0.011373408317565918, 0.011511584281921387, 0.011472352027893066, 0.01138764762878418, 0.011552448272705079, 0.011434304237365722, 0.01139065647125244, 0.011394880294799804, 0.011239935874938965, 0.011222528457641602, 0.011260416030883789, 0.011309056282043458, 0.011084863662719727, 0.011158432006835937, 0.011400320053100587, 0.011684767723083496, 0.011705856323242187, 0.011612223625183106, 0.011616928100585937, 0.01155782413482666, 0.011719264030456544, 0.011303168296813965, 0.01109228801727295, 0.01127619171142578, 0.011396096229553223, 0.011303071975708007, 0.011543007850646972, 0.011401375770568848, 0.011226719856262207, 0.011202336311340332, 0.01122374439239502, 0.011194304466247558, 0.011431936264038087, 0.011388928413391113, 0.011466303825378417, 0.011571935653686524, 0.011447456359863281, 0.01147049617767334, 0.011185471534729003, 0.01138268756866455, 0.011340576171875, 0.011339776039123535, 0.011591520309448242, 0.011524255752563477, 0.01160752010345459, 0.011643424034118652, 0.011480640411376954, 0.011534784317016602, 0.011390432357788086, 0.01132307243347168, 0.011221792221069336, 0.010997823715209961, 0.010950655937194824, 0.011142304420471191, 0.011305824279785156, 0.01115884780883789, 0.01132806396484375, 0.01159603214263916, 0.011435327529907226, 0.011380672454833984, 0.011412096023559571, 0.011396639823913575, 0.011386816024780273, 0.011530783653259278, 0.011334976196289063, 0.011316255569458008, 0.011413215637207031, 0.011395008087158202, 0.011255071640014649, 0.01150211238861084, 0.01143558406829834, 0.01192204761505127, 0.011956512451171875, 0.011598912239074707, 0.011410079956054687, 0.011385919570922852, 0.011469759941101075, 0.0114617919921875, 0.011524959564208985, 0.011692031860351563, 0.011333632469177245, 0.011304960250854493, 0.011593728065490723, 0.01165721607208252, 0.011419872283935546, 0.011534111976623535, 0.011802463531494141, 0.01147100830078125, 0.011757823944091797, 0.011482463836669923, 0.01131497573852539, 0.011324159622192383, 0.011302783966064453, 0.011247008323669434, 0.011336288452148437, 0.011482272148132324, 0.011504480361938477, 0.011187552452087402, 0.011014335632324219, 0.011006431579589844, 0.011196352005004883, 0.011448543548583984, 0.011786016464233398, 0.011518143653869628, 0.0113438081741333, 0.011450336456298829, 0.011713888168334961, 0.014022527694702148, 0.011655872344970702, 0.011632384300231934, 0.011481120109558105, 0.011462880134582519, 0.011303263664245606, 0.011177632331848144, 0.011169695854187011, 0.01145251178741455, 0.01146675205230713, 0.011394271850585938, 0.011402303695678712, 0.011368224143981934, 0.011376799583435059, 0.011758624076843261, 0.011551103591918944, 0.011362688064575195, 0.011249407768249512, 0.011210623741149903, 0.011183679580688477, 0.011477791786193847, 0.011648927688598633, 0.011614368438720704, 0.011456480026245117, 0.011481216430664062, 0.011396991729736327, 0.01120687961578369, 0.01105465602874756, 0.011036800384521485, 0.011175423622131348, 0.011260512351989747, 0.011165696144104004, 0.011392831802368164, 0.01124953556060791, 0.011456831932067871, 0.011280256271362305, 0.01153651237487793, 0.011376768112182618, 0.011472767829895019, 0.01147606372833252, 0.011362815856933594, 0.011204352378845215, 0.011276960372924805, 0.011318431854248048, 0.011479328155517578, 0.011385439872741699, 0.011321311950683593, 0.01127785587310791, 0.011346559524536132, 0.012291872024536132, 0.016046464920043944, 0.015373920440673828, 0.011554911613464355, 0.011342111587524415, 0.011253600120544433, 0.011236703872680665, 0.011177760124206543, 0.011076031684875488, 0.011390496253967285, 0.011491904258728028, 0.011560256004333497, 0.011459168434143066, 0.011396703720092773, 0.011481504440307617, 0.011438079833984375, 0.011304960250854493, 0.01139913558959961, 0.01140944004058838, 0.011347135543823243, 0.011286656379699707, 0.011404064178466797, 0.01150496006011963, 0.011397407531738282, 0.011417887687683105, 0.01113491153717041, 0.011211903572082519, 0.011645919799804687, 0.011658368110656738, 0.011613280296325683, 0.011558367729187011, 0.011491647720336914, 0.011978752136230468, 0.011326560020446777, 0.011172767639160155, 0.011209919929504394, 0.01115772819519043, 0.011117183685302734, 0.011470911979675293, 0.01134768009185791, 0.011332799911499023, 0.011502271652221679, 0.01136575984954834, 0.011318207740783692, 0.011222208023071289, 0.011400159835815429, 0.011456543922424317, 0.011430047988891601, 0.01132630443572998, 0.011294591903686523, 0.011506624221801758, 0.011339039802551269, 0.011465439796447754, 0.011308320045471191, 0.011220000267028808, 0.011183744430541992, 0.011493087768554688, 0.011569503784179688, 0.011720447540283204, 0.011773951530456543, 0.011538687705993653, 0.011544544219970703, 0.011444576263427734, 0.011851648330688476, 0.011355072021484376, 0.0114901762008667, 0.011317248344421387, 0.011271552085876465, 0.011473631858825684, 0.011474495887756347, 0.011527839660644532, 0.011315808296203614, 0.011546688079833984, 0.011382783889770508, 0.01132953643798828, 0.01128809642791748, 0.011333503723144532, 0.011448415756225586, 0.011491840362548827, 0.011517824172973633, 0.011509311676025391, 0.011655679702758789, 0.011647295951843262, 0.011771648406982423, 0.011629887580871582, 0.011601823806762696, 0.011498496055603028, 0.011361536026000976, 0.011356703758239746, 0.011286175727844239, 0.011306976318359375, 0.011382207870483399, 0.01155571174621582, 0.011362367630004883, 0.011471936225891113, 0.011461376190185547, 0.011335743904113769, 0.011432064056396485, 0.011531904220581054, 0.011321727752685546, 0.011353983879089355, 0.011331520080566405, 0.01130515193939209, 0.011405311584472656, 0.011334815979003906, 0.01132153606414795, 0.011439007759094238, 0.011288607597351074, 0.011207776069641113, 0.011099072456359863, 0.011328831672668457, 0.01164735984802246, 0.011677696228027343, 0.012042207717895508, 0.011521599769592284, 0.01209596824645996, 0.011251711845397949, 0.01110540771484375, 0.01113491153717041, 0.011086400032043458, 0.011023776054382324, 0.011246560096740722, 0.011314944267272949, 0.011133184432983398, 0.011300864219665528, 0.011206751823425292, 0.011374879837036132, 0.011299679756164551, 0.01111734390258789, 0.011167743682861327, 0.011170880317687988, 0.011125823974609376, 0.011093215942382812, 0.011279168128967286]",tokens/s,87.50181426227068,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.90272,14274.199552,0.0,13878.951936,13865.632768,s,1,7.55004736328125,7.55004736328125,0.0,7.55004736328125,7.55004736328125,7.55004736328125,7.55004736328125,[7.55004736328125],,kWh,1.3800054829179469e-05,1.5147876523830924e-06,6.2275049819982e-06,2.1542347463560763e-05,,MB,1129.074688,14695.727104,0.0,14289.993728,14241.298944,s,10,13.345639648437501,1.3345639648437502,0.003894462219437641,1.3360365600585937,1.3384409790039062,1.3385873718261718,1.3387044860839843,"[1.32904296875, 1.3282088623046875, 1.335257568359375, 1.3383267822265625, 1.3303668212890625, 1.3368155517578124, 1.3375191650390625, 1.332959716796875, 1.338408447265625, 1.3387337646484374]",tokens/s,191.82295247269943,kWh,3.896184749041557e-05,4.297043286655047e-06,2.5829770663800265e-05,6.908866144087088e-05,tokens/kWh,3705383.7006104984,MB,1144.2176,14863.499264,0.0,14457.765888,14413.156352,s,10,40.96315966796875,4.096315966796874,0.004376815909226851,4.095991577148437,4.10388447265625,4.10389853515625,4.10390978515625,"[4.0968349609375, 4.09696240234375, 4.0912841796875, 4.091384033203125, 4.091685791015625, 4.095148193359375, 4.10391259765625, 4.10388134765625, 4.0973232421875, 4.094742919921875]",tokens/s,15.379672981931378,kWh,0.00011971858115291828,1.3205879698689224e-05,7.97050915417999e-05,0.00021262955239340745,tokens/kWh,296289.952599991,,s,630,40.95959750366215,0.065015234132797,0.00030070849411744234,0.06500059127807617,0.06537307205200196,0.06550978622436524,0.06580866020202637,"[0.06566841888427734, 0.0649345932006836, 0.06491478729248047, 0.064529052734375, 0.06437792205810547, 0.06460717010498047, 0.06472637176513672, 0.06464761352539063, 0.06466377258300782, 0.06464022064208984, 0.06524393463134766, 0.06474956512451172, 0.06488063812255859, 0.06469017791748047, 0.06489087677001953, 0.06500761413574219, 0.06507929229736328, 0.06488790130615234, 0.06477302551269531, 0.06478643035888672, 0.06467791748046875, 0.06478128051757813, 0.06495948791503907, 0.06486016082763672, 0.06484114837646485, 0.06477062225341797, 0.06485327911376954, 0.06519471740722656, 0.06498918151855469, 0.06497280120849609, 0.06513423919677734, 0.06511446380615235, 0.06509158325195312, 0.0651878433227539, 0.06503424072265625, 0.06495027160644531, 0.06492147064208985, 0.06489920043945313, 0.06504227447509765, 0.06505487823486328, 0.06510944366455078, 0.06513481903076172, 0.06512834930419922, 0.06528361511230468, 0.06507778930664063, 0.06528355407714843, 0.06535820770263671, 0.06523753356933594, 0.06533468627929688, 0.06531251525878906, 0.06503919982910156, 0.0651673583984375, 0.06505677032470703, 0.06529228973388672, 0.06516524505615234, 0.06512646484375, 0.06538822174072266, 0.06524905395507813, 0.06524777221679688, 0.06541516876220703, 0.06530246734619141, 0.06541289520263673, 0.06531100463867187, 0.06574205017089843, 0.06474626922607422, 0.06458573150634765, 0.06454886627197266, 0.06448902130126953, 0.06465171051025391, 0.06456521606445312, 0.06463081359863282, 0.06571759796142577, 0.06469599914550782, 0.06491776275634766, 0.06506569671630859, 0.06485417938232421, 0.06487229156494141, 0.0650997085571289, 0.06497187042236328, 0.06491168212890625, 0.0650533447265625, 0.06488678741455078, 0.06479257965087891, 0.06527699279785157, 0.06478943634033203, 0.06479052734375, 0.06486630249023438, 0.06473628997802734, 0.06518867492675781, 0.0647927017211914, 0.06483766174316406, 0.06511766052246094, 0.06510582733154296, 0.06502873229980469, 0.06505401611328125, 0.06495712280273437, 0.06498713684082032, 0.0649085464477539, 0.06493177795410156, 0.0649019546508789, 0.06482128143310546, 0.06498095703125, 0.06516121673583984, 0.06492569732666016, 0.06497593688964844, 0.06514579010009766, 0.06516310119628907, 0.065087646484375, 0.06524889373779297, 0.0652721939086914, 0.0653148193359375, 0.06516918182373047, 0.06506495666503906, 0.06531462097167968, 0.06532870483398437, 0.0650514907836914, 0.06499327850341798, 0.06496611022949218, 0.06510441589355469, 0.06534963226318359, 0.06541004943847656, 0.06521753692626953, 0.06518918609619141, 0.06565516662597656, 0.06534751892089843, 0.06533113861083985, 0.06566092681884765, 0.06467581176757813, 0.06447232055664062, 0.06449842834472656, 0.06442598724365234, 0.06454025268554688, 0.06453414154052735, 0.06457955169677734, 0.06463369750976562, 0.06469990539550781, 0.06478844451904296, 0.06468659210205079, 0.06478643035888672, 0.0648419189453125, 0.06505452728271484, 0.06485919952392578, 0.06507965087890626, 0.06497955322265625, 0.06466934204101563, 0.0644807357788086, 0.06452438354492188, 0.06451689910888672, 0.0645889892578125, 0.06477606201171875, 0.0646902084350586, 0.06488771057128906, 0.0649031982421875, 0.0649785614013672, 0.06503663635253906, 0.06511325073242187, 0.06509859466552734, 0.06510173034667968, 0.06499452972412109, 0.0649708480834961, 0.06495465850830077, 0.06484585571289063, 0.06518614196777343, 0.06482752227783203, 0.06474752044677734, 0.06481327819824219, 0.06491932678222656, 0.06494822692871094, 0.06515238189697266, 0.0650183334350586, 0.06523222351074219, 0.06515766143798828, 0.06512777709960937, 0.06517775726318359, 0.06519254302978515, 0.06522492980957031, 0.06504230499267578, 0.06504457855224609, 0.06518342590332031, 0.06499263763427734, 0.06497571563720703, 0.06496265411376953, 0.06517350769042969, 0.06516896057128906, 0.06528173065185547, 0.06538246154785156, 0.0652008285522461, 0.06550131225585938, 0.06537583923339843, 0.06551789093017578, 0.06466127777099609, 0.0644920654296875, 0.06452019500732421, 0.06456082916259766, 0.06460572814941407, 0.0644820785522461, 0.06446694183349609, 0.06463488006591797, 0.06458573150634765, 0.06469427490234375, 0.06481436920166016, 0.06486093139648437, 0.0648636474609375, 0.06487635040283203, 0.06506896209716796, 0.064768798828125, 0.06471481323242187, 0.0646983642578125, 0.06454393768310547, 0.06450873565673829, 0.06484134674072266, 0.06481139373779297, 0.06474137878417968, 0.06478438568115234, 0.06505052947998047, 0.06504867553710937, 0.06495641326904297, 0.06489702606201173, 0.06500748443603516, 0.06494834899902344, 0.06498303985595703, 0.06494783782958985, 0.06490560150146485, 0.06471065521240234, 0.06470764923095704, 0.06493280029296875, 0.06489839935302734, 0.06490589141845703, 0.06488665771484375, 0.0650769271850586, 0.06490550231933594, 0.06500969696044921, 0.06506050872802735, 0.06521001434326172, 0.06527378845214844, 0.06508332824707032, 0.06537862396240235, 0.06519257354736328, 0.06497689819335938, 0.06497261047363281, 0.06490509033203125, 0.06509305572509766, 0.06494502258300781, 0.06516534423828126, 0.06528406524658203, 0.06514076995849609, 0.06523286437988281, 0.06554332733154297, 0.06534381103515625, 0.06538409423828125, 0.06563276672363282, 0.06530496215820313, 0.06576019287109375, 0.06473725128173828, 0.06455296325683593, 0.06439094543457032, 0.06444000244140625, 0.06440399932861328, 0.06449254608154296, 0.06460288238525391, 0.06458745574951172, 0.06467027282714843, 0.06464921569824218, 0.06488063812255859, 0.06479027557373047, 0.0646371841430664, 0.06480681610107422, 0.06494998168945312, 0.06486463928222656, 0.06469631958007813, 0.06464717102050781, 0.06469120025634766, 0.06469516754150391, 0.06468732452392578, 0.06474127960205078, 0.06512108612060546, 0.06474320220947266, 0.06476432037353516, 0.06485603332519531, 0.06473017883300781, 0.0649920654296875, 0.06499094390869141, 0.06499971008300781, 0.0649483871459961, 0.06502178955078125, 0.06498524475097656, 0.06486573028564453, 0.06489555358886719, 0.06516941070556641, 0.06506665802001953, 0.06490509033203125, 0.06499702453613282, 0.06496342468261719, 0.06498505401611328, 0.0651304931640625, 0.06514265441894532, 0.06519331359863281, 0.06515106964111328, 0.06518841552734375, 0.06523932647705079, 0.06521590423583984, 0.06518422698974609, 0.06502397155761719, 0.06500479888916015, 0.06505955505371094, 0.06500662231445313, 0.06506598663330078, 0.06529843139648438, 0.0652042236328125, 0.06524684906005859, 0.06536844635009766, 0.06525917053222656, 0.0653028793334961, 0.06538240051269531, 0.06530662536621094, 0.06581043243408204, 0.06480486297607421, 0.06455059051513672, 0.06436873626708985, 0.06460643005371093, 0.06458338928222657, 0.06468172454833984, 0.06466614532470703, 0.0645670394897461, 0.06468633270263671, 0.06464832305908202, 0.06487273406982422, 0.06517158508300781, 0.06481970977783204, 0.06497071838378907, 0.06490499114990235, 0.06494025421142578, 0.06484371185302734, 0.0646075210571289, 0.064768798828125, 0.06466371154785157, 0.06477811431884765, 0.06478224182128907, 0.06464112091064453, 0.06476188659667968, 0.0647628173828125, 0.06468918609619141, 0.0646368637084961, 0.0648908462524414, 0.06497901153564453, 0.06499062347412109, 0.06511676788330079, 0.06512640380859375, 0.06505401611328125, 0.06510047912597657, 0.06476515197753906, 0.0647524185180664, 0.06487449645996093, 0.0650240020751953, 0.06510387420654297, 0.06495549011230468, 0.06512060546875, 0.06526009368896485, 0.06518374633789062, 0.06514864349365235, 0.0651918716430664, 0.06524307250976563, 0.06530457305908204, 0.06527843475341796, 0.06501558685302734, 0.06514895629882812, 0.06534770965576171, 0.06509372711181641, 0.06512630462646485, 0.06607872009277344, 0.06522982025146484, 0.06520524597167969, 0.06540691375732421, 0.06531897735595703, 0.06524313354492188, 0.06541455841064453, 0.06537276458740235, 0.06572978973388671, 0.06578253173828125, 0.06482681274414062, 0.06451795196533203, 0.06444729614257813, 0.06442393493652344, 0.0647352294921875, 0.06463078308105469, 0.06457158660888672, 0.06472013092041015, 0.06484639739990235, 0.06554192352294921, 0.06488086700439454, 0.06483148956298829, 0.06485807800292968, 0.06518172454833984, 0.06533939361572266, 0.06498303985595703, 0.06475775909423828, 0.06483148956298829, 0.06468402862548828, 0.06485148620605469, 0.06499488067626953, 0.06501673889160156, 0.06486937713623046, 0.06485017395019531, 0.0650881576538086, 0.06509986877441407, 0.06499046325683594, 0.06515174102783203, 0.06541280364990235, 0.06516515350341796, 0.06511459350585938, 0.06521199798583985, 0.06526403045654297, 0.06499942779541015, 0.06506633758544922, 0.06502057647705078, 0.06510793304443359, 0.0655848617553711, 0.06494611358642578, 0.06521040344238281, 0.06518972778320313, 0.06546809387207031, 0.06522962951660156, 0.06529843139648438, 0.06558035278320312, 0.06542546844482422, 0.06523958587646485, 0.06530876922607422, 0.06528739166259766, 0.0652415008544922, 0.06523664093017578, 0.06572048187255859, 0.06552841949462891, 0.06527516937255859, 0.06544044494628906, 0.06539878082275391, 0.06531276702880859, 0.06557901000976563, 0.0655579833984375, 0.06586000061035156, 0.06545830535888672, 0.06546227264404297, 0.06596784210205078, 0.06479612731933594, 0.06460905456542969, 0.06471068572998047, 0.06447625732421874, 0.06458822631835938, 0.06484835052490234, 0.06476534271240235, 0.06479318237304688, 0.06462646484375, 0.06482150268554687, 0.06494409942626952, 0.06486019134521484, 0.06479049682617187, 0.06489292907714844, 0.0651608657836914, 0.06488272094726563, 0.06495587158203125, 0.0648642578125, 0.06471561431884766, 0.06484992218017578, 0.06548585510253906, 0.0651785888671875, 0.06488665771484375, 0.06478451538085937, 0.06500713348388672, 0.06502857971191406, 0.06495577239990234, 0.06512191772460937, 0.06566194915771484, 0.06539453125, 0.06518390655517578, 0.06499737548828124, 0.06515302276611327, 0.06500147247314453, 0.06496208190917968, 0.06532867431640625, 0.065274658203125, 0.06513426971435547, 0.06526000213623047, 0.06526976013183594, 0.06536396789550782, 0.0654725112915039, 0.06538400268554688, 0.06539103698730468, 0.06556854248046876, 0.0655912322998047, 0.06531206512451172, 0.0658043212890625, 0.0649942398071289, 0.0652260513305664, 0.06528844451904296, 0.06515699005126953, 0.06513107299804688, 0.06537830352783203, 0.06532051086425782, 0.06523494720458985, 0.065285888671875, 0.06517420959472656, 0.06545817565917969, 0.06550527954101562, 0.06596604919433594, 0.06548694610595703, 0.06600908660888671, 0.06495177459716797, 0.06456761932373047, 0.06459986877441407, 0.06448931121826172, 0.06461702728271485, 0.06476595306396485, 0.06465535736083984, 0.0648253402709961, 0.06484150695800782, 0.06476620483398438, 0.06488060760498048, 0.06486611175537109, 0.06500985717773437, 0.06498303985595703, 0.06528614044189453, 0.06506700897216797, 0.06486195373535156, 0.06490345764160156, 0.0647775650024414, 0.0647399673461914, 0.06466969299316407, 0.06493596649169922, 0.06477616119384766, 0.06496665954589843, 0.06510387420654297, 0.06502809906005859, 0.06503132629394531, 0.06511638641357421, 0.06524524688720704, 0.06500204467773438, 0.06512614440917969, 0.06497100830078124, 0.06493103790283203, 0.06491801452636718, 0.06514495849609375, 0.06500982666015626, 0.06499737548828124, 0.06487452697753907, 0.06493746948242188, 0.06505110168457032, 0.06507705688476563, 0.06498249816894532, 0.06516400146484375, 0.06539469146728516, 0.06531890869140625, 0.0652390365600586, 0.0651878433227539, 0.06531053161621093, 0.06518739318847656, 0.06508515167236328, 0.06504108428955079, 0.06505622100830079, 0.06520665740966797, 0.06515340423583985, 0.06516643524169922, 0.06523792266845703, 0.06529638671875, 0.06522415924072265, 0.06541327667236328, 0.0653438720703125, 0.06532495880126953, 0.06526764678955078, 0.06558719635009766, 0.06475142669677734, 0.06442540740966797, 0.06439762878417969, 0.06428511810302734, 0.06423757171630859, 0.06430105590820312, 0.06452569580078125, 0.06485874938964843, 0.06492364501953125, 0.06457094573974609, 0.06479625701904297, 0.06475145721435546, 0.06480377960205078, 0.06479055786132812, 0.06503427124023438, 0.06476799774169922, 0.06475737762451172, 0.06491808319091796, 0.06468716430664062, 0.06462335968017578, 0.06455625915527344, 0.06453305816650391, 0.06464739227294922, 0.06489907073974609, 0.06476390075683594, 0.06473728179931641, 0.06612966156005859, 0.06490310668945312, 0.06496697235107422, 0.06522035217285156, 0.06524543762207032, 0.06548592376708984, 0.06495938873291016, 0.06513017272949219, 0.0651185302734375, 0.06496463775634766, 0.06490723419189454, 0.06497484588623047, 0.06505062103271485, 0.06496230316162109, 0.06503977966308594, 0.06507504272460937, 0.06514585876464844, 0.0652779541015625, 0.06534915161132812, 0.06519602966308594, 0.06534601593017578, 0.06513375854492187, 0.06514265441894532, 0.06531305694580078, 0.06525199890136718, 0.0651855697631836, 0.06507894134521484, 0.06510854339599609, 0.06512230682373046, 0.06523407745361329, 0.06532592010498046, 0.06526361846923828, 0.06537149047851562, 0.06567388916015625, 0.06551347351074219, 0.06529452514648437]",tokens/s,15.381010517588047,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 891, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 823, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 374, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 116, in __init__ self.v_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 120907 has 14.73 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 3.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,782.934016,14129.496064,0.0,13734.248448,13728.777216,s,1,7.401724609375,7.401724609375,0.0,7.401724609375,7.401724609375,7.401724609375,7.401724609375,[7.401724609375],,kWh,7.16384237918722e-06,7.825425540195544e-07,3.5797250860017393e-06,1.1526110019208514e-05,,MB,1137.598464,14142.078976,0.0,13736.3456,13487.53408,s,10,12.767971313476561,1.276797131347656,0.0034546170262645267,1.2778217773437501,1.2802987060546875,1.2804129638671875,1.2805043701171874,"[1.2690040283203126, 1.27655322265625, 1.273699462890625, 1.2774561767578125, 1.2802733154296875, 1.273830322265625, 1.27937548828125, 1.2805272216796875, 1.279064697265625, 1.2781873779296875]",tokens/s,200.50170361033994,kWh,3.725450471458354e-05,4.10871512767824e-06,2.4681547523000557e-05,6.604476736526235e-05,tokens/kWh,3876158.705869689,MB,1162.518528,14142.078976,0.0,13736.3456,13661.262848,s,10,37.650972656250005,3.7650972656249997,0.0023110677335511625,3.765232421875,3.768010693359375,3.7682917236328124,3.7685165478515628,"[3.76295849609375, 3.766229248046875, 3.7679482421875, 3.76857275390625, 3.766557373046875, 3.765681884765625, 3.760609375, 3.764782958984375, 3.76463818359375, 3.762994140625]",tokens/s,16.73263545544609,kWh,0.00010977200966958206,1.2107495557714114e-05,7.312919739219867e-05,0.00019500870261949485,tokens/kWh,323062.5051792019,,s,630,37.64779961776732,0.05975841209169418,0.00019985936006495185,0.05975465583801269,0.059999047088623046,0.0600776029586792,0.06035248321533203,"[0.060187488555908206, 0.059701248168945314, 0.059469825744628904, 0.05955081558227539, 0.05928847885131836, 0.059463680267333986, 0.05941183853149414, 0.05939468765258789, 0.05959884643554687, 0.06039756774902344, 0.0595333137512207, 0.059588481903076175, 0.05944316864013672, 0.059616737365722657, 0.059562686920166016, 0.05978112030029297, 0.059770591735839845, 0.05977088165283203, 0.05964009475708008, 0.05964083099365235, 0.0595561294555664, 0.05955452728271484, 0.0594977912902832, 0.05954220962524414, 0.05954150390625, 0.059686912536621096, 0.059510784149169924, 0.05954764938354492, 0.05949161529541016, 0.0596910400390625, 0.05975315093994141, 0.05966438293457031, 0.059795425415039065, 0.059856895446777345, 0.059672607421875, 0.05982393646240235, 0.05967686462402344, 0.059805694580078124, 0.05965414428710938, 0.059584510803222655, 0.06037299346923828, 0.05977907180786133, 0.059772926330566405, 0.059719680786132816, 0.05973929595947266, 0.059705760955810545, 0.05970169448852539, 0.05989718246459961, 0.05993715286254883, 0.05998825454711914, 0.05991424179077148, 0.05996502304077148, 0.05989382553100586, 0.059913761138916014, 0.05985363388061524, 0.05983027267456055, 0.05979689788818359, 0.059961761474609375, 0.06000864028930664, 0.059807743072509766, 0.0597995834350586, 0.05982204818725586, 0.05977679824829102, 0.06014617538452149, 0.05969142532348633, 0.05938307189941406, 0.059437793731689455, 0.05940140914916992, 0.05948259353637695, 0.05948982238769531, 0.05949545669555664, 0.05949792098999023, 0.05951932907104492, 0.05962319946289062, 0.05962339019775391, 0.059581886291503905, 0.05959148788452148, 0.059568126678466796, 0.05955910491943359, 0.059893695831298825, 0.059818878173828124, 0.05970534515380859, 0.0596357421875, 0.05951631927490234, 0.05954412841796875, 0.05958041763305664, 0.059579681396484375, 0.059592575073242185, 0.05967116928100586, 0.05959027099609375, 0.05976028823852539, 0.05978412628173828, 0.059791358947753906, 0.05969305419921875, 0.05969919967651367, 0.059756542205810545, 0.059842655181884766, 0.05976873779296875, 0.05978889465332031, 0.060110622406005856, 0.05999004745483399, 0.05984438323974609, 0.059732833862304685, 0.05977494430541992, 0.060090465545654295, 0.05975187301635742, 0.05982374572753906, 0.05973078536987304, 0.05991628646850586, 0.05994496154785156, 0.059854270935058594, 0.05996319961547852, 0.06004169464111328, 0.060068126678466796, 0.059942401885986325, 0.059994014739990234, 0.05999593734741211, 0.060005184173583984, 0.06001804733276367, 0.059970176696777344, 0.05978112030029297, 0.05990399932861328, 0.06063036727905274, 0.06003692626953125, 0.06001321411132812, 0.05986016082763672, 0.060125503540039066, 0.059625537872314456, 0.05962924957275391, 0.05940310287475586, 0.05945657730102539, 0.05948652648925781, 0.05942284774780274, 0.05973446273803711, 0.06014166259765625, 0.05984601593017578, 0.059585121154785155, 0.059615264892578124, 0.05959676742553711, 0.05955535888671875, 0.059576801300048825, 0.05978112030029297, 0.05980979156494141, 0.05969097518920898, 0.059607105255126955, 0.059762657165527346, 0.05961884689331055, 0.05973654556274414, 0.05978217697143555, 0.05955683135986328, 0.05962937545776367, 0.06022505569458008, 0.059966110229492185, 0.059834369659423826, 0.05976883316040039, 0.059774974822998046, 0.05967443084716797, 0.059748382568359376, 0.06004444885253906, 0.05995212936401367, 0.05979692840576172, 0.05976326370239258, 0.059840511322021485, 0.05981919860839844, 0.05986387252807617, 0.05986304092407226, 0.05970684814453125, 0.05994345474243164, 0.05978857421875, 0.059880062103271486, 0.059784385681152345, 0.05993529510498047, 0.05986953735351563, 0.05992784118652344, 0.059928512573242186, 0.060305633544921876, 0.059925056457519534, 0.05987123107910156, 0.05985279846191406, 0.05994496154785156, 0.05991219329833984, 0.05993859100341797, 0.059776512145996094, 0.05989007949829102, 0.05986540985107422, 0.05988662338256836, 0.05993068695068359, 0.060040096282958984, 0.0600002555847168, 0.060268543243408204, 0.059686912536621096, 0.05954079818725586, 0.0594600944519043, 0.05957603073120117, 0.05950921630859375, 0.05954764938354492, 0.0596190071105957, 0.059644222259521484, 0.05963776016235352, 0.059598560333251956, 0.05971177673339844, 0.05955583953857422, 0.059651615142822266, 0.05965046310424805, 0.05953532791137695, 0.05972572708129883, 0.05982352066040039, 0.05971212768554687, 0.05959596633911133, 0.05960188674926758, 0.05956787109375, 0.05972316741943359, 0.05976559829711914, 0.059789310455322264, 0.05977052688598633, 0.059647327423095704, 0.05968707275390625, 0.05972409439086914, 0.0597628173828125, 0.05972623825073242, 0.05965363311767578, 0.05974272155761719, 0.05997772979736328, 0.06014976119995117, 0.06020438385009766, 0.059646591186523434, 0.05971868896484375, 0.059887775421142576, 0.05975481414794922, 0.05998982238769531, 0.059880062103271486, 0.05974009704589844, 0.059725345611572264, 0.059810367584228516, 0.05985696029663086, 0.05979750442504883, 0.0599818229675293, 0.05998284912109375, 0.060006401062011716, 0.059896671295166015, 0.06006697463989258, 0.05991731262207031, 0.06008195114135742, 0.06011312103271484, 0.060022785186767576, 0.060184574127197264, 0.0602149772644043, 0.060335647583007815, 0.06002463912963867, 0.05997260665893555, 0.06005075073242187, 0.06004908752441406, 0.0604956169128418, 0.059609153747558596, 0.05955440139770508, 0.05954716873168946, 0.059424705505371093, 0.05978988647460937, 0.05958041763305664, 0.060014591217041016, 0.059573726654052736, 0.059587039947509766, 0.05951059341430664, 0.059698497772216794, 0.05980051040649414, 0.05970684814453125, 0.059612766265869144, 0.05961004638671875, 0.059717632293701174, 0.059797374725341794, 0.05974556732177734, 0.059568992614746095, 0.05954889678955078, 0.059634273529052734, 0.05976287841796875, 0.059676673889160155, 0.05972518539428711, 0.05967526245117188, 0.0596049919128418, 0.059652000427246096, 0.05975839996337891, 0.05993913650512695, 0.0598773422241211, 0.05976601409912109, 0.0598392333984375, 0.059842559814453126, 0.059789310455322264, 0.05975449752807617, 0.05976886367797852, 0.05987705612182617, 0.05959267044067383, 0.05974867248535156, 0.05974972915649414, 0.05991462326049805, 0.05976927947998047, 0.05966937637329101, 0.05979849624633789, 0.059768318176269535, 0.05986095809936524, 0.059951648712158204, 0.0599444465637207, 0.05988195037841797, 0.05994895935058594, 0.060069408416748044, 0.05995993423461914, 0.0598546257019043, 0.059892990112304687, 0.05980460739135742, 0.059824127197265625, 0.0600384635925293, 0.059953857421875, 0.05994905471801758, 0.059936767578125, 0.05992038345336914, 0.059969600677490235, 0.06006070327758789, 0.05967929458618164, 0.05942108917236328, 0.059463680267333986, 0.05932783889770508, 0.059670238494873046, 0.05943392181396484, 0.05954134368896485, 0.05956012725830078, 0.059463680267333986, 0.059485248565673825, 0.05960796737670898, 0.05954492950439453, 0.059787105560302735, 0.059568958282470705, 0.05960908889770508, 0.05976268768310547, 0.05991628646850586, 0.059715007781982424, 0.05957894515991211, 0.0595882568359375, 0.060268894195556644, 0.05975449752807617, 0.059694206237792966, 0.05961308670043945, 0.05967766571044922, 0.05971484756469726, 0.059665153503417966, 0.059639263153076175, 0.05975296020507812, 0.05969715118408203, 0.05964799880981445, 0.059731998443603516, 0.06026649475097656, 0.059757537841796875, 0.0597694091796875, 0.05982979202270508, 0.05970143890380859, 0.059681503295898435, 0.059676193237304685, 0.05970915222167969, 0.059738174438476566, 0.059998912811279295, 0.05997772979736328, 0.05976883316040039, 0.059807743072509766, 0.059966880798339846, 0.059857246398925784, 0.05999590301513672, 0.05999257659912109, 0.0599920654296875, 0.05997568130493164, 0.0600494384765625, 0.060016609191894534, 0.059756542205810545, 0.059908096313476565, 0.06019651031494141, 0.060072288513183594, 0.05974425506591797, 0.059829822540283205, 0.05988191986083984, 0.059844062805175784, 0.05997212982177735, 0.06016211318969727, 0.059686878204345706, 0.05937097549438477, 0.05931222534179687, 0.05946182250976562, 0.05946799850463867, 0.05981769561767578, 0.05942300796508789, 0.0594920654296875, 0.059447582244873044, 0.05942230224609375, 0.05961564636230469, 0.059469825744628904, 0.05956185531616211, 0.05951919937133789, 0.059568031311035156, 0.05975244903564453, 0.059686912536621096, 0.059660289764404295, 0.059676673889160155, 0.05959385681152344, 0.05963776016235352, 0.059572639465332033, 0.05949488067626953, 0.059432193756103514, 0.059433727264404296, 0.05950054550170898, 0.059600704193115236, 0.05963702392578125, 0.05968783950805664, 0.05970473480224609, 0.05959270477294922, 0.05963193511962891, 0.05986681747436524, 0.059789920806884764, 0.05992652893066406, 0.05981824111938477, 0.05978291320800781, 0.05971353530883789, 0.05967871856689453, 0.05967814254760742, 0.059802177429199216, 0.05966563034057617, 0.05980649566650391, 0.05969311904907226, 0.05981587219238281, 0.060022785186767576, 0.059931934356689455, 0.05989638519287109, 0.059754112243652346, 0.05982262420654297, 0.059891712188720705, 0.05977907180786133, 0.059873279571533204, 0.05979052734375, 0.06003180694580078, 0.05977097702026367, 0.059840415954589846, 0.0597212142944336, 0.05986089706420898, 0.059687744140625, 0.05975769424438476, 0.05972588729858398, 0.060084030151367186, 0.05970851135253906, 0.059464607238769535, 0.059487648010253906, 0.05948886489868164, 0.05951216125488281, 0.05954627227783203, 0.059399585723876956, 0.059525760650634765, 0.059539039611816405, 0.059476222991943356, 0.05954060745239258, 0.059529216766357425, 0.05987225723266602, 0.05960815811157227, 0.05972796630859375, 0.0599375991821289, 0.06000844955444336, 0.05978521728515625, 0.05972172927856445, 0.05978112030029297, 0.05962131118774414, 0.05964191818237305, 0.05964761734008789, 0.05951667022705078, 0.05954947280883789, 0.059546047210693356, 0.05962998580932617, 0.05967843246459961, 0.059729503631591796, 0.05965689468383789, 0.0599733772277832, 0.0603875846862793, 0.05978112030029297, 0.05973606491088867, 0.05978316879272461, 0.05969062423706055, 0.05990028762817383, 0.059718910217285155, 0.05966310501098633, 0.05958467102050781, 0.059727710723876955, 0.05957632064819336, 0.059661823272705077, 0.059738624572753904, 0.059698238372802734, 0.059980735778808594, 0.05990524673461914, 0.0599150390625, 0.05985696029663086, 0.0597154541015625, 0.05985823822021484, 0.059916545867919925, 0.0598873291015625, 0.0600700798034668, 0.06040636825561523, 0.05989785766601562, 0.05997292709350586, 0.059877086639404296, 0.059808319091796874, 0.05980201721191406, 0.06004121780395508, 0.05993577575683594, 0.060243968963623044, 0.05971558380126953, 0.05948320007324219, 0.05952767944335938, 0.05944527816772461, 0.06004336166381836, 0.05946774291992187, 0.05939440155029297, 0.0594595832824707, 0.05962464141845703, 0.05956486511230469, 0.05964543914794922, 0.05962799835205078, 0.05975830459594727, 0.05976710510253906, 0.05967792129516602, 0.0598043212890625, 0.059911392211914063, 0.05988454437255859, 0.05978694534301758, 0.05996976089477539, 0.059614654541015624, 0.05948412704467773, 0.05959740829467773, 0.05965619277954102, 0.05957017517089844, 0.05979520034790039, 0.059715839385986326, 0.0595599365234375, 0.05964799880981445, 0.059658241271972653, 0.05974211120605469, 0.05975868988037109, 0.059979774475097655, 0.05989577484130859, 0.059850273132324217, 0.05986067199707031, 0.05975676727294922, 0.059695518493652344, 0.059635902404785154, 0.05988739013671875, 0.059813312530517575, 0.05960326385498047, 0.059816417694091795, 0.05978083038330078, 0.05985686492919922, 0.059840065002441406, 0.05985971069335937, 0.05995542526245117, 0.059954975128173826, 0.05997564697265625, 0.06003919982910156, 0.05976646423339844, 0.059869503021240236, 0.059881153106689455, 0.059768928527832034, 0.05968217468261719, 0.0597471694946289, 0.05974835205078125, 0.05976678466796875, 0.05977088165283203, 0.05986716842651367, 0.059813697814941405, 0.060256542205810545, 0.059643905639648435, 0.05966761779785156, 0.05943996810913086, 0.05940019226074219, 0.05942691040039062, 0.05936528015136719, 0.059379711151123046, 0.05935932922363281, 0.059394081115722655, 0.05941644668579102, 0.05961068725585938, 0.059421119689941404, 0.059660289764404295, 0.05969900894165039, 0.0597977294921875, 0.059772865295410156, 0.059912223815917966, 0.059848705291748044, 0.05970534515380859, 0.059504863739013675, 0.05963478469848633, 0.05946755218505859, 0.05949257659912109, 0.059628223419189455, 0.0595333137512207, 0.05957017517089844, 0.0596049919128418, 0.05957820892333984, 0.0597751350402832, 0.06011084747314453, 0.05986304092407226, 0.06013132858276367, 0.05996255874633789, 0.05984543991088867, 0.059950206756591795, 0.05987145614624023, 0.05966096115112305, 0.05961321640014648, 0.05967244720458984, 0.05965427017211914, 0.059799518585205075, 0.05972124862670898, 0.059859390258789065, 0.05976038360595703, 0.05963174438476562, 0.05971574401855469, 0.05971148681640625, 0.059643905639648435, 0.059791358947753906, 0.05990115356445312, 0.06002355194091797, 0.05997091293334961, 0.060359359741210934, 0.05994496154785156, 0.059799774169921875, 0.059748126983642576, 0.059780319213867186, 0.05975529479980469, 0.0599733772277832, 0.059797630310058594, 0.059889087677001955, 0.05981254577636719]",tokens/s,16.73404571837661,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,868.335616,2698.903552,0.0,2296.38144,2202.20672,s,1,7.71149169921875,7.71149169921875,0.0,7.71149169921875,7.71149169921875,7.71149169921875,7.71149169921875,[7.71149169921875],,kWh,5.186690570811454e-06,5.647598881478235e-07,1.983057141996536e-06,7.734507600955813e-06,,MB,1193.009152,2799.566848,0.0,2386.558976,2267.495936,s,10,1.9913778076171877,0.19913778076171879,0.0012788419322838599,0.19962133026123047,0.20036922607421875,0.200714306640625,0.20099037109375,"[0.20029254150390624, 0.19691830444335937, 0.19945689392089844, 0.1997857666015625, 0.20105938720703126, 0.1985720672607422, 0.20004588317871094, 0.1980786895751953, 0.19980911254882813, 0.1973591613769531]",tokens/s,1285.5420956323728,kWh,6.009325540476528e-06,6.626912571862145e-07,3.993325190122317e-06,1.0665341987785059e-05,tokens/kWh,24002980.89767726,MB,1241.088,2799.566848,0.0,2386.558976,2267.498496,s,10,14.162250244140624,1.4162250244140624,0.00469332052118712,1.4185830078125,1.4202748291015623,1.4210951782226562,1.4217514575195314,"[1.41077294921875, 1.409296875, 1.418581787109375, 1.419492431640625, 1.4199619140625, 1.41492724609375, 1.420092529296875, 1.42191552734375, 1.408624755859375, 1.418584228515625]",tokens/s,44.484456152061796,kWh,4.1357217170774606e-05,4.560883780275874e-06,2.3221974359877643e-05,6.914007531092813e-05,tokens/kWh,911193.6849458762,,s,630,14.155974164962766,0.022469800261845665,0.00029686123320211803,0.022425248146057127,0.02271115779876709,0.022880822467803954,0.023675017528533945,"[0.02303455924987793, 0.022343103408813476, 0.022587423324584962, 0.02248454475402832, 0.022522687911987305, 0.022565120697021483, 0.022464128494262697, 0.02244758415222168, 0.023765920639038086, 0.022552480697631837, 0.022337247848510742, 0.022391328811645506, 0.02236390495300293, 0.02233763122558594, 0.022312896728515625, 0.0223253116607666, 0.022276031494140626, 0.02234979248046875, 0.022378559112548827, 0.02229212760925293, 0.022415647506713866, 0.02229043197631836, 0.02248908805847168, 0.02209791946411133, 0.022316768646240236, 0.022231327056884766, 0.022228992462158204, 0.02218828773498535, 0.02221612739562988, 0.02217193603515625, 0.022134431838989256, 0.022169984817504883, 0.022271743774414064, 0.022464128494262697, 0.022535839080810548, 0.022135776519775392, 0.022188032150268554, 0.022240928649902344, 0.022900480270385742, 0.02240777587890625, 0.02216499137878418, 0.022729215621948243, 0.022386688232421875, 0.022216064453125, 0.022645376205444337, 0.02223411178588867, 0.022303743362426756, 0.02246063995361328, 0.022279680252075194, 0.022298912048339843, 0.02234783935546875, 0.022314271926879882, 0.022421823501586915, 0.022321632385253906, 0.02241481590270996, 0.022392480850219727, 0.02230575942993164, 0.022214496612548828, 0.02221174430847168, 0.022251583099365233, 0.022278047561645507, 0.022298719406127928, 0.02243452835083008, 0.0224880313873291, 0.02272447967529297, 0.022769472122192384, 0.022228511810302734, 0.022287103652954103, 0.022280160903930663, 0.022382368087768556, 0.02256496047973633, 0.02214236831665039, 0.022233184814453126, 0.022336128234863282, 0.022416799545288087, 0.02237049674987793, 0.02231875228881836, 0.022402015686035157, 0.022382368087768556, 0.022382591247558595, 0.02230886459350586, 0.02225766372680664, 0.022340736389160155, 0.02231999969482422, 0.02209382438659668, 0.02208563232421875, 0.022224767684936524, 0.022181119918823242, 0.022338336944580078, 0.022212703704833983, 0.02233344078063965, 0.022242975234985352, 0.022034784317016602, 0.02206924819946289, 0.022190080642700196, 0.02231907272338867, 0.02214067268371582, 0.022296672821044923, 0.022251167297363282, 0.02316057586669922, 0.02243459129333496, 0.022310815811157226, 0.02321776008605957, 0.02254470443725586, 0.022335424423217773, 0.022268159866333008, 0.022409215927124023, 0.022339584350585938, 0.02230179214477539, 0.022309568405151366, 0.022347999572753907, 0.02221696090698242, 0.022281984329223632, 0.02229248046875, 0.02229209518432617, 0.02237273597717285, 0.022134336471557617, 0.022200767517089843, 0.022351871490478514, 0.022478847503662108, 0.022343008041381836, 0.022571680068969726, 0.022571008682250978, 0.022577152252197266, 0.022596960067749024, 0.02242019271850586, 0.022483968734741212, 0.022566911697387695, 0.022576799392700197, 0.022401376724243163, 0.022530336380004883, 0.02244924736022949, 0.02240166473388672, 0.022427072525024416, 0.022491327285766603, 0.022456703186035157, 0.022527999877929687, 0.02242095947265625, 0.022331071853637696, 0.022540224075317382, 0.022490016937255858, 0.022414688110351563, 0.022391328811645506, 0.022493568420410157, 0.02272230339050293, 0.022502496719360353, 0.02244291114807129, 0.022487039566040038, 0.02235500717163086, 0.0223176326751709, 0.022796672821044923, 0.022995040893554686, 0.023162399291992188, 0.02272483253479004, 0.022659231185913085, 0.022517375946044922, 0.022487232208251953, 0.02251590347290039, 0.02256844711303711, 0.02265513610839844, 0.022577791213989257, 0.02250067138671875, 0.02255708885192871, 0.02246147155761719, 0.02241017532348633, 0.022448160171508788, 0.022489343643188477, 0.02251136016845703, 0.0225218563079834, 0.022566911697387695, 0.02247212791442871, 0.02247520065307617, 0.02252579116821289, 0.022565055847167968, 0.022528255462646484, 0.022540128707885743, 0.022437887191772463, 0.022435392379760742, 0.022302944183349608, 0.022919391632080077, 0.022318527221679686, 0.02219475173950195, 0.02275315284729004, 0.022401151657104493, 0.02232524871826172, 0.022568960189819336, 0.02229769515991211, 0.02229136085510254, 0.02224742317199707, 0.022640159606933594, 0.022527616500854494, 0.02231942367553711, 0.02238108825683594, 0.022600704193115235, 0.02220953559875488, 0.022145023345947267, 0.02217103958129883, 0.023005216598510743, 0.02219865608215332, 0.022253984451293944, 0.02233750343322754, 0.022273120880126954, 0.022188352584838866, 0.022233503341674805, 0.022373632431030275, 0.022268672943115235, 0.022425600051879883, 0.022386688232421875, 0.024972448348999022, 0.025092191696166992, 0.022479616165161132, 0.022337535858154296, 0.02231609535217285, 0.022350048065185545, 0.022199008941650392, 0.022312959671020507, 0.02235990333557129, 0.022578815460205078, 0.022491680145263673, 0.022392831802368163, 0.022484384536743163, 0.022585472106933593, 0.022420095443725585, 0.02253100776672363, 0.02290780830383301, 0.022331680297851562, 0.022412960052490234, 0.022402368545532226, 0.022533056259155273, 0.024024896621704102, 0.022548479080200197, 0.022321151733398437, 0.02229859161376953, 0.02220649528503418, 0.022116352081298828, 0.02232524871826172, 0.022297792434692383, 0.02234783935546875, 0.022518623352050782, 0.022560672760009767, 0.022368255615234374, 0.022431167602539062, 0.02223801612854004, 0.022249216079711913, 0.02274239921569824, 0.022651519775390625, 0.02308723258972168, 0.022277536392211913, 0.022319583892822265, 0.022361728668212892, 0.022464351654052736, 0.02269264030456543, 0.02287811279296875, 0.022864383697509767, 0.022964128494262694, 0.022376575469970704, 0.022304031372070314, 0.02255308723449707, 0.022284255981445313, 0.02246668815612793, 0.02228175926208496, 0.02241584014892578, 0.02252191925048828, 0.022368127822875977, 0.022408960342407226, 0.0223621768951416, 0.022710527420043945, 0.02265292739868164, 0.022626367568969727, 0.02250921630859375, 0.022499296188354494, 0.022346048355102538, 0.022544384002685547, 0.02266726493835449, 0.022366207122802736, 0.022562816619873048, 0.022240896224975586, 0.02227187156677246, 0.022333951950073243, 0.022609535217285155, 0.022434175491333006, 0.02248054313659668, 0.022440288543701174, 0.023364992141723634, 0.022852191925048827, 0.022560800552368164, 0.022748800277709962, 0.022573247909545898, 0.022843584060668946, 0.022380544662475587, 0.022525951385498046, 0.022451744079589844, 0.022538368225097655, 0.022575456619262694, 0.022595584869384764, 0.02234982490539551, 0.022384767532348634, 0.022437759399414062, 0.022373567581176756, 0.022368255615234374, 0.022293312072753906, 0.02219935989379883, 0.022502496719360353, 0.022394720077514647, 0.02270582389831543, 0.02264486312866211, 0.022612096786499024, 0.022550752639770508, 0.02269094467163086, 0.022491647720336915, 0.022640863418579103, 0.022560543060302734, 0.022491039276123045, 0.022359935760498047, 0.022863775253295898, 0.022794271469116213, 0.022450815200805663, 0.022442272186279297, 0.022409215927124023, 0.022398080825805664, 0.02250022315979004, 0.022517759323120116, 0.022699487686157226, 0.022351423263549806, 0.022582239151000976, 0.022462528228759767, 0.022578176498413087, 0.022229215621948243, 0.02230713653564453, 0.022554399490356446, 0.02263104057312012, 0.022570592880249023, 0.02236662483215332, 0.022818080902099608, 0.022672096252441407, 0.022431039810180665, 0.0221942081451416, 0.02225014305114746, 0.022384384155273437, 0.022288671493530275, 0.022515039443969726, 0.022215295791625976, 0.021952512741088868, 0.022160736083984375, 0.022164127349853516, 0.022161279678344727, 0.02238489532470703, 0.022327167510986328, 0.022328575134277343, 0.02227180862426758, 0.022338752746582032, 0.02242108726501465, 0.022263519287109373, 0.022431808471679686, 0.022360448837280274, 0.022208160400390625, 0.02227849578857422, 0.022427167892456055, 0.022335968017578124, 0.02224127960205078, 0.02224332809448242, 0.022284000396728516, 0.022616352081298828, 0.022175647735595702, 0.022126527786254884, 0.02228780746459961, 0.02262499237060547, 0.02229452705383301, 0.02226806449890137, 0.0226628475189209, 0.022620319366455078, 0.022394880294799805, 0.022635616302490235, 0.022320032119750977, 0.022357471466064455, 0.02267353630065918, 0.02277177619934082, 0.025272544860839845, 0.022632448196411133, 0.02230659294128418, 0.022427871704101564, 0.02225971221923828, 0.023480512619018554, 0.02256057548522949, 0.022548479080200197, 0.023313695907592774, 0.023298816680908205, 0.022648799896240236, 0.022691839218139647, 0.022256895065307616, 0.022372480392456054, 0.022512256622314455, 0.022495231628417968, 0.022515359878540038, 0.022368864059448244, 0.02234956741333008, 0.022259359359741212, 0.02231532859802246, 0.022210432052612306, 0.02226806449890137, 0.02228416061401367, 0.022386816024780272, 0.022390495300292967, 0.02241155242919922, 0.02241935920715332, 0.023754463195800782, 0.022544544219970705, 0.022327520370483397, 0.022521888732910156, 0.022474912643432616, 0.02241107177734375, 0.022539775848388673, 0.02275391960144043, 0.02284476852416992, 0.023022111892700196, 0.022437183380126954, 0.022554912567138673, 0.022407583236694336, 0.022503423690795898, 0.022463584899902345, 0.022373407363891602, 0.022243200302124025, 0.022304767608642577, 0.022183935165405275, 0.022271999359130858, 0.022210559844970702, 0.022424896240234374, 0.02245907211303711, 0.022450176239013672, 0.02260540771484375, 0.022688287734985352, 0.022490016937255858, 0.02268608093261719, 0.022497119903564452, 0.02246339225769043, 0.02261180877685547, 0.0227346248626709, 0.022490400314331055, 0.02252854347229004, 0.022624128341674803, 0.022565408706665038, 0.022657024383544923, 0.022495391845703126, 0.02233283233642578, 0.02229702377319336, 0.022534143447875975, 0.022341600418090822, 0.022537887573242186, 0.02228236770629883, 0.022968576431274413, 0.02251571273803711, 0.02247065544128418, 0.02268547248840332, 0.02269206428527832, 0.022587392807006838, 0.022722560882568358, 0.02266726493835449, 0.022719680786132814, 0.02303673553466797, 0.022672992706298828, 0.022471071243286133, 0.022478847503662108, 0.022840768814086913, 0.02250809669494629, 0.02248908805847168, 0.022437599182128905, 0.022579488754272462, 0.022392831802368163, 0.02225584030151367, 0.02253004837036133, 0.022480224609375, 0.022405567169189452, 0.02262544059753418, 0.0227675838470459, 0.02242425537109375, 0.022464704513549805, 0.02246246337890625, 0.022351871490478514, 0.022474496841430665, 0.02247929573059082, 0.022660928726196287, 0.022536191940307617, 0.02266867256164551, 0.022364416122436524, 0.02249964714050293, 0.022644800186157228, 0.02253004837036133, 0.022460128784179686, 0.022609216690063477, 0.022483871459960936, 0.022917184829711914, 0.022697984695434572, 0.022564128875732423, 0.022813568115234373, 0.022644479751586913, 0.022595680236816407, 0.02266316795349121, 0.022529247283935547, 0.02293552017211914, 0.022534944534301757, 0.02246806335449219, 0.022476703643798827, 0.022393503189086915, 0.02245427131652832, 0.02252390480041504, 0.02214908790588379, 0.022208160400390625, 0.022213247299194334, 0.022179840087890625, 0.022192256927490234, 0.022349279403686525, 0.022572736740112304, 0.022285024642944337, 0.022385887145996094, 0.022338336944580078, 0.022181631088256836, 0.022261983871459962, 0.02237001609802246, 0.022442304611206054, 0.02231920051574707, 0.02249648094177246, 0.022245983123779296, 0.022427743911743164, 0.02240719985961914, 0.022427616119384767, 0.022489152908325195, 0.022650304794311522, 0.022202239990234376, 0.022321792602539064, 0.022441984176635742, 0.022375904083251952, 0.022397024154663086, 0.02244054412841797, 0.022560352325439452, 0.023028064727783203, 0.022863775253295898, 0.02250752067565918, 0.022357471466064455, 0.022280736923217772, 0.022365440368652345, 0.02226576042175293, 0.022055776596069335, 0.02249907112121582, 0.02227984046936035, 0.022280799865722657, 0.022343103408813476, 0.02245088005065918, 0.022400896072387697, 0.02243708801269531, 0.02226793670654297, 0.02228652763366699, 0.022125024795532227, 0.022320608139038085, 0.022725248336791994, 0.02212588882446289, 0.022538944244384764, 0.022214656829833986, 0.02215260887145996, 0.02241187286376953, 0.02221414375305176, 0.022216320037841797, 0.02213702392578125, 0.022358720779418945, 0.0221200008392334, 0.022260160446166993, 0.022261760711669923, 0.02208745574951172, 0.022571008682250978, 0.022358015060424806, 0.02247475242614746, 0.022378335952758788, 0.022467744827270507, 0.022923391342163087, 0.022272895812988282, 0.022169727325439453, 0.022415008544921875, 0.022604000091552733, 0.02230793571472168, 0.022750400543212892, 0.022379743576049806, 0.02245622444152832, 0.0224935359954834, 0.022343616485595703, 0.022314592361450194, 0.022883039474487304, 0.022486560821533202, 0.022409696578979493, 0.02230259132385254, 0.022560127258300783, 0.022313119888305664, 0.022299232482910155, 0.0222608642578125, 0.022455167770385743, 0.02240278434753418, 0.022325632095336913, 0.022343807220458985, 0.02250934410095215, 0.022417407989501953, 0.022757375717163086, 0.022517759323120116, 0.02241535949707031, 0.022444032669067384, 0.022457632064819336, 0.0224672966003418, 0.022482847213745116, 0.022468704223632813, 0.022755392074584962, 0.022482112884521486, 0.022532640457153322, 0.022409439086914062, 0.022939327239990235, 0.02233718490600586, 0.022291040420532225, 0.022228960037231446, 0.022476383209228516, 0.02271683120727539, 0.022323295593261717, 0.0224849910736084, 0.022470016479492188, 0.022479488372802735, 0.022619935989379884, 0.024002111434936524, 0.022565120697021483, 0.023007232666015624, 0.02250912094116211, 0.022410079956054686, 0.022503423690795898, 0.02255619239807129, 0.022624671936035155, 0.0225546875]",tokens/s,44.504178423785426,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.238592,6223.233024,0.0,5827.985408,5712.718848,s,1,7.54695166015625,7.54695166015625,0.0,7.54695166015625,7.54695166015625,7.54695166015625,7.54695166015625,[7.54695166015625],,kWh,1.0930174412499126e-05,1.1981091724824766e-06,4.701948206002615e-06,1.683023179098422e-05,,MB,1111.724032,6451.822592,0.0,6046.089216,5989.425664,s,10,5.286087219238281,0.5286087219238281,0.002321793649104793,0.5290133361816407,0.5308089782714844,0.531027963256836,0.5312031512451172,"[0.5225958251953124, 0.52860546875, 0.5275892333984376, 0.5301387939453125, 0.5295997314453125, 0.5275242309570313, 0.5293984375, 0.5286282348632813, 0.5307603149414063, 0.5312469482421875]",tokens/s,484.29015523676753,kWh,1.5522173169080033e-05,1.7117404982930236e-06,1.0341382542105052e-05,2.757529620947811e-05,tokens/kWh,9283671.807376934,MB,1137.668096,6514.737152,0.0,6109.003776,6090.851328,s,10,20.916714111328126,2.0916714111328125,0.006449218288308434,2.09066162109375,2.1005560058593753,2.101516650390625,2.102285166015625,"[2.08726904296875, 2.081265380859375, 2.084882080078125, 2.09225341796875, 2.08906982421875, 2.088210693359375, 2.096529296875, 2.09441455078125, 2.102477294921875, 2.100342529296875]",tokens/s,30.119453593277495,kWh,6.068127666216896e-05,6.6933406774990235e-06,4.018646343569547e-05,0.00010756108077536346,tokens/kWh,585713.7130443371,,s,630,20.91351463317871,0.03319605497329954,0.0003447476206661443,0.03314238357543945,0.033563927841186524,0.03374415397644043,0.03473555046081543,"[0.0346644172668457, 0.03373174285888672, 0.03311088180541992, 0.032956447601318356, 0.03288854217529297, 0.03291366577148438, 0.03288684844970703, 0.032904319763183594, 0.03283145523071289, 0.033006431579589844, 0.033435329437255856, 0.03340934371948242, 0.03290457534790039, 0.032905887603759766, 0.03291545486450195, 0.03283990478515625, 0.03289583969116211, 0.0328480339050293, 0.03286297607421875, 0.03293398284912109, 0.03310291290283203, 0.03321331024169922, 0.03304035186767578, 0.033058815002441407, 0.03306905746459961, 0.033058559417724606, 0.0330590705871582, 0.03303395080566406, 0.0330937614440918, 0.033147041320800784, 0.03329228973388672, 0.033277950286865234, 0.03304857635498047, 0.03302918243408203, 0.03296761703491211, 0.033058815002441407, 0.03318374252319336, 0.0332485122680664, 0.03301862335205078, 0.033132190704345706, 0.033091934204101565, 0.03320217514038086, 0.033159168243408206, 0.03303593444824219, 0.03306710433959961, 0.033245311737060544, 0.03321219253540039, 0.033144416809082033, 0.033140830993652344, 0.033102527618408206, 0.03308540725708008, 0.03312639999389649, 0.03320217514038086, 0.03317708969116211, 0.0331514892578125, 0.03316659164428711, 0.03310464096069336, 0.03313180923461914, 0.03322544097900391, 0.0334552001953125, 0.03326249694824219, 0.03326092910766602, 0.03325750350952148, 0.03412057495117188, 0.03347763061523437, 0.03413993453979492, 0.03299964904785156, 0.032835582733154296, 0.03278652954101562, 0.03289075088500976, 0.03295849609375, 0.032958335876464843, 0.03279430389404297, 0.032841793060302736, 0.03278041458129883, 0.0328111686706543, 0.03270207977294922, 0.032647647857666016, 0.032704479217529295, 0.03279289627075195, 0.03269398498535156, 0.03277004623413086, 0.032815006256103514, 0.03281110382080078, 0.03290697479248047, 0.032792865753173826, 0.03286227035522461, 0.03290924835205078, 0.03297484970092773, 0.032958240509033204, 0.03303433609008789, 0.03300979232788086, 0.03334707260131836, 0.033323360443115235, 0.03318742370605469, 0.033185760498046876, 0.03328224182128906, 0.033251777648925784, 0.03302601623535156, 0.03299123382568359, 0.03292559814453125, 0.032845920562744144, 0.03288063812255859, 0.03284172821044922, 0.03291664123535156, 0.032885601043701175, 0.03296390533447266, 0.0329911994934082, 0.033027935028076175, 0.032981246948242185, 0.03294476699829101, 0.03302134323120117, 0.03292745590209961, 0.03291545486450195, 0.03314777755737305, 0.03300131225585937, 0.03313036727905273, 0.03328841781616211, 0.03325049591064453, 0.033121150970458986, 0.03308240127563476, 0.03323344039916992, 0.033579456329345704, 0.033261566162109374, 0.033186912536621094, 0.03325337600708008, 0.03483504104614258, 0.03366851043701172, 0.033057376861572264, 0.03282876968383789, 0.03278876876831055, 0.032866687774658204, 0.0327086067199707, 0.03273523330688476, 0.03268150329589844, 0.0326517448425293, 0.03269222259521484, 0.03263488006591797, 0.03274947357177734, 0.03284182357788086, 0.032866302490234374, 0.03283679962158203, 0.03276227188110352, 0.032766368865966795, 0.03275980758666992, 0.032785633087158206, 0.03287033462524414, 0.032783199310302734, 0.032958110809326174, 0.033443519592285156, 0.0329152946472168, 0.03288761520385742, 0.03294617462158203, 0.0329986572265625, 0.03314163208007812, 0.033107616424560546, 0.03309769439697265, 0.03311008071899414, 0.033199905395507816, 0.03368387222290039, 0.033027294158935544, 0.033142623901367185, 0.033164222717285155, 0.03310182571411133, 0.033157119750976564, 0.03319807815551758, 0.03292940902709961, 0.03304486465454102, 0.03298009490966797, 0.03306489562988281, 0.033063201904296874, 0.032997440338134766, 0.033065567016601564, 0.03308745574951172, 0.03315100860595703, 0.03362815856933594, 0.03298303985595703, 0.03314688110351562, 0.033392574310302736, 0.03329017639160156, 0.03316044616699219, 0.03317644882202148, 0.03328409576416016, 0.03356991958618164, 0.03341721725463867, 0.03358972930908203, 0.033339710235595704, 0.03355420684814453, 0.03321001434326172, 0.034756607055664065, 0.033670238494873043, 0.03304262542724609, 0.03287424087524414, 0.03284435272216797, 0.032865985870361325, 0.03284860610961914, 0.033023998260498046, 0.03377910232543945, 0.03296112060546875, 0.03295846557617187, 0.03299942398071289, 0.03294972610473633, 0.03289961624145508, 0.03284915161132813, 0.03289891052246094, 0.03287542343139648, 0.0331038703918457, 0.03307136154174805, 0.033083393096923826, 0.03305036926269531, 0.032903167724609376, 0.03285942459106445, 0.03283222579956055, 0.03294345474243164, 0.03314748764038086, 0.033062976837158205, 0.03313049697875976, 0.033232383728027344, 0.033417598724365234, 0.03349331283569336, 0.03329625701904297, 0.03362575912475586, 0.03313059234619141, 0.03301007843017578, 0.03293683242797851, 0.0330269775390625, 0.03323494338989258, 0.033130016326904294, 0.033098369598388674, 0.03313443374633789, 0.03319718551635742, 0.03323788833618164, 0.033345535278320314, 0.03339571380615235, 0.033205249786376956, 0.03314467239379883, 0.03329244613647461, 0.03317750549316406, 0.03317891311645508, 0.03314771270751953, 0.03317724609375, 0.0336448974609375, 0.0333496322631836, 0.033263614654541016, 0.03339433670043945, 0.03337027359008789, 0.03347270584106445, 0.03363772964477539, 0.03380438232421875, 0.033538623809814455, 0.033495040893554685, 0.0334194221496582, 0.034786399841308595, 0.0339851188659668, 0.033372478485107424, 0.03309568023681641, 0.03301375961303711, 0.03279673767089844, 0.033261505126953125, 0.03282944107055664, 0.03278031921386719, 0.03283305740356445, 0.03292745590209961, 0.03279536056518555, 0.0328089599609375, 0.03299327850341797, 0.03306291198730469, 0.03294617462158203, 0.03289702224731445, 0.03279872131347656, 0.032763904571533206, 0.03296041488647461, 0.03303004837036133, 0.033175167083740235, 0.033101406097412106, 0.03356361770629883, 0.033253246307373044, 0.0330937614440918, 0.032892929077148435, 0.03299532699584961, 0.03361740875244141, 0.03319039916992188, 0.03499359893798828, 0.0331454086303711, 0.03320627212524414, 0.03334572982788086, 0.03323782348632812, 0.03299430465698242, 0.032997184753417966, 0.03290361785888672, 0.032906017303466796, 0.032850528717041014, 0.032833919525146485, 0.032890911102294924, 0.03291952133178711, 0.03292364883422851, 0.0329411849975586, 0.03311094284057617, 0.0332369613647461, 0.03330035018920898, 0.03319411087036133, 0.033062400817871096, 0.03306108856201172, 0.03304476928710937, 0.03313071823120117, 0.03314467239379883, 0.03315008163452148, 0.033395294189453126, 0.0331286735534668, 0.03320832061767578, 0.03345427322387695, 0.033400863647460935, 0.03343337631225586, 0.03335699081420898, 0.033232769012451174, 0.034336734771728515, 0.033591327667236326, 0.03292127990722656, 0.032930110931396486, 0.03280691146850586, 0.0331893424987793, 0.03295929718017578, 0.03282304000854492, 0.03278041458129883, 0.03273043060302734, 0.0327685432434082, 0.03267900848388672, 0.03280579376220703, 0.03274342346191406, 0.03281305694580078, 0.03268972778320312, 0.03284566497802734, 0.03278908920288086, 0.03286969757080078, 0.03294892883300781, 0.032882686614990234, 0.03295795059204101, 0.03296307373046875, 0.03303219223022461, 0.03301686477661133, 0.03299407958984375, 0.033005470275878905, 0.03311030578613281, 0.03322995376586914, 0.03330342483520508, 0.033371326446533206, 0.03308217620849609, 0.033076351165771484, 0.03305971145629883, 0.0333251838684082, 0.03315727996826172, 0.033125312805175784, 0.033118305206298826, 0.033239742279052735, 0.0332327995300293, 0.03335382461547851, 0.03323260879516601, 0.0331280632019043, 0.0330667839050293, 0.033315296173095706, 0.03325788879394531, 0.03325155258178711, 0.03319305419921875, 0.03341110229492188, 0.03338499069213867, 0.03320230484008789, 0.03342326354980469, 0.03337839889526367, 0.03309088134765625, 0.03315987014770508, 0.03323075103759766, 0.03348489761352539, 0.033277950286865234, 0.03352492904663086, 0.0332927360534668, 0.033720703125, 0.033380352020263675, 0.033819904327392576, 0.03460748672485352, 0.0335906867980957, 0.033067615509033206, 0.03298918533325195, 0.03309590530395508, 0.033021728515625, 0.03294131088256836, 0.03297148895263672, 0.03291120147705078, 0.032970943450927735, 0.03298099136352539, 0.03295353698730469, 0.03295743942260742, 0.03300070571899414, 0.032879169464111326, 0.03287859344482422, 0.03301299285888672, 0.03294486236572266, 0.03323635101318359, 0.03301033782958984, 0.03294585418701172, 0.03303456115722656, 0.033019134521484375, 0.03312460708618164, 0.03317097473144531, 0.033003616333007815, 0.03300748825073242, 0.033062015533447266, 0.033216350555419924, 0.033552417755126955, 0.03336739349365234, 0.03354000091552734, 0.03331078338623047, 0.033333953857421876, 0.03337814331054687, 0.034025630950927734, 0.03321855926513672, 0.03313423919677734, 0.033083518981933596, 0.03327772903442383, 0.03452972793579102, 0.033226303100585934, 0.0331569595336914, 0.03317583847045898, 0.03330847930908203, 0.033219070434570314, 0.03390195083618164, 0.033979007720947266, 0.03343155288696289, 0.03343900680541992, 0.03319881439208985, 0.033181697845458984, 0.033268993377685546, 0.033344257354736326, 0.0332410888671875, 0.033588958740234376, 0.033322975158691405, 0.0332817268371582, 0.03349798583984375, 0.033328575134277345, 0.033382015228271486, 0.03340924835205078, 0.03392777633666992, 0.035089534759521486, 0.03408512115478515, 0.03329500961303711, 0.033158878326416015, 0.03313488006591797, 0.03288883209228516, 0.032917503356933595, 0.032919551849365236, 0.0329150390625, 0.03288310241699219, 0.03299532699584961, 0.033068416595458984, 0.03313257598876953, 0.033122081756591794, 0.03312022399902344, 0.03299728012084961, 0.03302406311035156, 0.033046817779541014, 0.033043041229248046, 0.033121952056884764, 0.033046558380126954, 0.032906719207763675, 0.032965473175048825, 0.03297484970092773, 0.03308700942993164, 0.03296099090576172, 0.03305814361572266, 0.0331168327331543, 0.03309097671508789, 0.03307785415649414, 0.03375059127807617, 0.033184192657470704, 0.033208511352539063, 0.03359955215454102, 0.033259456634521484, 0.03313782501220703, 0.03305539321899414, 0.033087646484375, 0.03297264099121094, 0.033132545471191405, 0.03312188720703125, 0.03308585739135742, 0.03317964935302734, 0.03309363174438477, 0.033124351501464845, 0.03314604949951172, 0.033194080352783206, 0.03323958587646485, 0.03342729568481445, 0.03336431884765625, 0.03326342391967774, 0.03319561767578125, 0.03322531127929688, 0.033236927032470706, 0.033386878967285157, 0.03318751907348633, 0.03322265625, 0.03344998550415039, 0.03339878463745117, 0.03411558532714844, 0.03395779037475586, 0.03424854278564453, 0.03361753463745117, 0.03513507080078125, 0.0338436164855957, 0.03355263900756836, 0.0332710075378418, 0.03310441589355469, 0.03313840103149414, 0.03324860763549805, 0.03308745574951172, 0.03301196670532226, 0.03307136154174805, 0.033692127227783204, 0.03321395111083984, 0.03307980728149414, 0.033355777740478515, 0.033191776275634764, 0.03306089782714844, 0.03326959991455078, 0.03310316848754883, 0.03320876693725586, 0.033161758422851566, 0.03307855987548828, 0.03309641647338867, 0.03321241760253906, 0.03364044952392578, 0.033142143249511716, 0.033245918273925784, 0.033255329132080076, 0.03332207870483399, 0.03332150268554687, 0.033656993865966794, 0.03338671875, 0.03347478485107422, 0.03352143859863281, 0.03347455978393555, 0.033306625366210936, 0.033261600494384765, 0.033381534576416017, 0.03324601745605469, 0.033102977752685545, 0.03315299224853516, 0.03331961441040039, 0.03344406509399414, 0.03333232116699219, 0.03336431884765625, 0.03333587265014649, 0.03325337600708008, 0.03329600143432617, 0.03331216049194336, 0.03319087982177735, 0.03312633514404297, 0.03323052978515625, 0.03359577560424805, 0.033304576873779294, 0.033363006591796876, 0.034683998107910154, 0.03353785705566406, 0.033157024383544925, 0.033277057647705076, 0.033653759002685545, 0.033465438842773435, 0.0336530876159668, 0.03373072052001953, 0.033401153564453126, 0.03485955047607422, 0.03357500839233398, 0.033286048889160154, 0.03316454315185547, 0.0330043830871582, 0.03305628967285156, 0.03305305480957031, 0.032881759643554685, 0.03303926467895508, 0.03318560028076172, 0.03313273620605469, 0.032985088348388675, 0.03300742340087891, 0.033027359008789066, 0.032956832885742186, 0.03291350555419922, 0.03288025665283203, 0.032946975708007815, 0.03299711990356445, 0.03320857620239258, 0.033189888000488284, 0.03327350234985352, 0.03318790435791016, 0.03309372711181641, 0.03320969772338867, 0.033164127349853516, 0.03320627212524414, 0.033274078369140626, 0.03347135925292969, 0.033491008758544924, 0.033501697540283204, 0.03369152069091797, 0.03331343841552734, 0.03340268707275391, 0.03335907363891601, 0.03317225646972656, 0.03317379379272461, 0.033508190155029295, 0.03332185745239258, 0.03330223846435547, 0.03336220932006836, 0.03324860763549805, 0.03318236923217773, 0.03330192184448242, 0.03348080062866211, 0.034129886627197265, 0.03324067306518555, 0.03355244827270508, 0.033483905792236326, 0.03344563293457031, 0.033271808624267575, 0.03326736068725586, 0.03365923309326172, 0.0332677116394043, 0.03356671905517578, 0.0333656005859375, 0.033736286163330076, 0.033407905578613284, 0.033771327972412106, 0.03390047836303711, 0.03364988708496094, 0.033455039978027346, 0.03375619125366211]",tokens/s,30.12406145261316,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 54.12 MiB is free. Process 87125 has 14.69 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 1.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,836.268032,1903.099904,0.0,1507.852288,1469.840384,s,1,7.54557958984375,7.54557958984375,0.0,7.54557958984375,7.54557958984375,7.54557958984375,7.54557958984375,[7.54557958984375],,kWh,9.880303533331396e-06,1.08249113978262e-06,4.201947805995698e-06,1.5164742479109715e-05,,MB,1141.8624,1942.945792,0.0,1537.212416,1426.272256,s,10,0.9013761444091797,0.09013761444091797,0.0016001539587170693,0.08954978942871095,0.09113024673461914,0.09279881935119628,0.09413367744445801,"[0.09446739196777344, 0.0894210205078125, 0.08925401306152343, 0.08892195129394531, 0.08967855834960937, 0.09071209716796876, 0.09034918212890625, 0.08910758209228516, 0.09075945281982421, 0.08870489501953124]",tokens/s,2840.1017886689133,kWh,3.001324800595198e-06,3.309910265364032e-07,1.9376659492245198e-06,5.269981776356121e-06,tokens/kWh,48577018.074056566,MB,1163.390976,1953.431552,0.0,1547.698176,1426.274816,s,10,15.022645507812502,1.5022645507812498,0.01031252672261329,1.50402294921875,1.5139361694335938,1.5142273620605469,1.5144603161621093,"[1.5039227294921875, 1.480812255859375, 1.4960413818359375, 1.490408935546875, 1.5138714599609375, 1.5145185546875, 1.512547119140625, 1.506765869140625, 1.5041231689453125, 1.499634033203125]",tokens/s,41.93668816004276,kWh,4.3225946373989707e-05,4.767480243062044e-06,2.0830685598775315e-05,6.882411221582706e-05,tokens/kWh,915376.8638880063,,s,630,15.020117973327645,0.023841457100520057,0.00040457175319861285,0.023801039695739747,0.024200953865051267,0.02438948554992676,0.02525592248916626,"[0.024852064132690428, 0.02422825622558594, 0.02429481506347656, 0.024156768798828124, 0.024006656646728516, 0.023957504272460937, 0.024294912338256838, 0.024455680847167968, 0.02365763282775879, 0.023536415100097657, 0.02394291114807129, 0.023818559646606445, 0.023776319503784178, 0.0238351993560791, 0.02362406349182129, 0.023688383102416992, 0.023827264785766602, 0.024055328369140625, 0.023734272003173826, 0.02416819190979004, 0.023736127853393553, 0.02375369644165039, 0.023872608184814452, 0.026456415176391603, 0.02418943977355957, 0.024086528778076172, 0.02422915267944336, 0.026542816162109375, 0.023611488342285155, 0.023537664413452147, 0.023500064849853515, 0.023700096130371093, 0.023640064239501952, 0.02365235137939453, 0.02338128089904785, 0.023447872161865235, 0.02354422378540039, 0.023480319976806642, 0.023801023483276368, 0.023806783676147462, 0.02365644836425781, 0.023564287185668945, 0.02348646354675293, 0.023533567428588868, 0.023565343856811524, 0.023419008255004883, 0.02376585578918457, 0.023390111923217775, 0.025808992385864257, 0.023408063888549803, 0.023433792114257813, 0.023416831970214845, 0.023480319976806642, 0.02348441505432129, 0.023752704620361328, 0.023813119888305666, 0.02369193649291992, 0.023557567596435548, 0.023382720947265626, 0.02354198455810547, 0.023463935852050782, 0.02376412773132324, 0.02343612861633301, 0.024520671844482422, 0.0238985595703125, 0.024315488815307616, 0.02368284797668457, 0.023501184463500975, 0.023414047241210937, 0.023410879135131835, 0.023374624252319336, 0.02328335952758789, 0.023163232803344726, 0.023373376846313475, 0.023466432571411133, 0.02323865509033203, 0.023355392456054686, 0.023635551452636717, 0.023787935256958007, 0.023557376861572266, 0.023819007873535157, 0.02378447914123535, 0.0237291202545166, 0.02350694465637207, 0.02350809669494629, 0.023184255599975587, 0.02323187255859375, 0.023411199569702147, 0.023406047821044922, 0.023384735107421874, 0.023341056823730468, 0.0236910400390625, 0.023423200607299806, 0.0230830078125, 0.023504896163940428, 0.023980031967163085, 0.02371174430847168, 0.02355200004577637, 0.0231910400390625, 0.023260799407958985, 0.023427967071533204, 0.02324662399291992, 0.023134431838989257, 0.023355392456054686, 0.023549312591552733, 0.023511680603027343, 0.023525375366210938, 0.023721536636352538, 0.023814592361450195, 0.023623680114746092, 0.023513088226318358, 0.02369126319885254, 0.023572479248046875, 0.023447519302368165, 0.023271455764770507, 0.023130111694335938, 0.02314035224914551, 0.023244096755981446, 0.02318582344055176, 0.023439647674560547, 0.023447551727294923, 0.023470048904418946, 0.023547103881835937, 0.023543775558471678, 0.023669599533081054, 0.023670783996582033, 0.024637439727783202, 0.023605247497558594, 0.023558143615722657, 0.023611391067504883, 0.023580671310424805, 0.023615488052368162, 0.023574399948120116, 0.02502822494506836, 0.024299072265625, 0.023546016693115235, 0.023716032028198244, 0.023717727661132813, 0.023950271606445313, 0.02384671974182129, 0.023476224899291992, 0.023442527770996095, 0.02343619155883789, 0.02434048080444336, 0.023770591735839845, 0.023618303298950195, 0.023850784301757813, 0.023573759078979493, 0.023341312408447265, 0.023451776504516603, 0.02421798324584961, 0.023754751205444336, 0.02405311965942383, 0.023831008911132812, 0.023714975357055666, 0.02383091163635254, 0.023883424758911132, 0.023797887802124024, 0.023755615234375, 0.02386124801635742, 0.02396675109863281, 0.02373516845703125, 0.02371183967590332, 0.023813663482666017, 0.023732704162597658, 0.02359059143066406, 0.023607616424560548, 0.02348646354675293, 0.024089696884155274, 0.0238703670501709, 0.023610431671142577, 0.023579584121704102, 0.023779327392578126, 0.02372812843322754, 0.023740415573120118, 0.023631872177124022, 0.023785472869873047, 0.02352332878112793, 0.02352895927429199, 0.023427391052246095, 0.023637760162353517, 0.02370787239074707, 0.02410927963256836, 0.023607295989990236, 0.02349465560913086, 0.02360316848754883, 0.023409696578979493, 0.02344403266906738, 0.02356268882751465, 0.02451046371459961, 0.02348646354675293, 0.02364601516723633, 0.02332896041870117, 0.023513088226318358, 0.023595008850097656, 0.023392255783081056, 0.023474111557006835, 0.02335955238342285, 0.023558143615722657, 0.02341993522644043, 0.023361759185791017, 0.0233110408782959, 0.02361555290222168, 0.023412736892700195, 0.023357440948486328, 0.023371776580810546, 0.02367660713195801, 0.023428800582885743, 0.0237574405670166, 0.023330816268920897, 0.023387584686279297, 0.023500383377075194, 0.02341747283935547, 0.023462015151977537, 0.023574176788330077, 0.02367340850830078, 0.023562240600585937, 0.023963455200195313, 0.02351532745361328, 0.023694559097290038, 0.024437311172485352, 0.023382495880126954, 0.02346143913269043, 0.023541952133178713, 0.02384048080444336, 0.02358278465270996, 0.023361759185791017, 0.023644128799438478, 0.0233984317779541, 0.023512800216674803, 0.023385536193847655, 0.023598207473754882, 0.023793376922607423, 0.023789567947387694, 0.023418880462646483, 0.023448991775512695, 0.02467286491394043, 0.023975936889648438, 0.02371075248718262, 0.02362246322631836, 0.0238573112487793, 0.02373222351074219, 0.02360483169555664, 0.02414224052429199, 0.024260608673095704, 0.02524985694885254, 0.023766016006469725, 0.02343212890625, 0.023394304275512694, 0.023553024291992186, 0.024290559768676757, 0.023668479919433594, 0.024482336044311524, 0.023914016723632813, 0.027047840118408203, 0.023898880004882814, 0.023870559692382814, 0.02419580841064453, 0.024005983352661135, 0.02396022415161133, 0.023855104446411132, 0.024151744842529296, 0.023976255416870117, 0.024004608154296874, 0.023877504348754883, 0.024145183563232423, 0.023827295303344726, 0.02377628707885742, 0.02399945640563965, 0.02387331199645996, 0.02398579216003418, 0.02385686492919922, 0.02392563247680664, 0.02392278480529785, 0.023838623046875, 0.02445516777038574, 0.02374790382385254, 0.023749311447143553, 0.02394316864013672, 0.023844192504882813, 0.023968416213989256, 0.023911680221557617, 0.023823104858398437, 0.02391244888305664, 0.02400592041015625, 0.023876319885253905, 0.023961599349975587, 0.02394726371765137, 0.024170303344726564, 0.024234176635742188, 0.02388172721862793, 0.023807327270507814, 0.023994720458984375, 0.0238799991607666, 0.023715328216552735, 0.023926336288452147, 0.023968095779418944, 0.023877792358398438, 0.024109504699707032, 0.024223552703857423, 0.02459872055053711, 0.023932928085327147, 0.023901695251464843, 0.024283008575439455, 0.024006784439086912, 0.023795679092407228, 0.02388956832885742, 0.024142496109008788, 0.023959775924682618, 0.023911840438842775, 0.023972448348999024, 0.023941024780273438, 0.023838432312011718, 0.024108608245849608, 0.023964672088623046, 0.024815231323242187, 0.02404390335083008, 0.02390425682067871, 0.02394726371765137, 0.02401241683959961, 0.02406630325317383, 0.024004608154296874, 0.024166528701782226, 0.023903263092041015, 0.02394620704650879, 0.024391040802001954, 0.024068735122680665, 0.024619007110595705, 0.02387334442138672, 0.023967296600341796, 0.02399091148376465, 0.023862464904785156, 0.023937536239624024, 0.023828096389770508, 0.023898015975952147, 0.02416924858093262, 0.023879680633544922, 0.024196256637573244, 0.023876287460327147, 0.023916704177856445, 0.02386147117614746, 0.02435465621948242, 0.024108991622924805, 0.025121984481811525, 0.024613536834716798, 0.02463759994506836, 0.024009952545166014, 0.023895872116088866, 0.02431059265136719, 0.02419910430908203, 0.02419728088378906, 0.023920703887939453, 0.02392064094543457, 0.02393087959289551, 0.024018495559692384, 0.023988672256469726, 0.02383839988708496, 0.023943264007568358, 0.02383673667907715, 0.024028448104858397, 0.023714656829833983, 0.02396112060546875, 0.02376156806945801, 0.024032127380371093, 0.02378031921386719, 0.023965120315551758, 0.023955680847167968, 0.023797344207763672, 0.023781152725219728, 0.024282175064086912, 0.023967552185058593, 0.023963743209838868, 0.024039520263671874, 0.024047456741333007, 0.02406118392944336, 0.023698240280151366, 0.023760896682739258, 0.023672096252441405, 0.024681407928466795, 0.024115232467651366, 0.023858848571777343, 0.02416579246520996, 0.023919551849365235, 0.024352767944335937, 0.02417020797729492, 0.02393641662597656, 0.023804800033569336, 0.02378982353210449, 0.02382179260253906, 0.02501251220703125, 0.026570751190185548, 0.024061952590942383, 0.024307296752929686, 0.025258399963378905, 0.024263776779174805, 0.02410179138183594, 0.023623680114746092, 0.02373222351074219, 0.023875583648681642, 0.023836671829223634, 0.023827615737915038, 0.02402390480041504, 0.02388582420349121, 0.023736160278320314, 0.023898271560668944, 0.024002464294433593, 0.02389948844909668, 0.024437503814697267, 0.024023040771484375, 0.024176704406738282, 0.02396972846984863, 0.024006496429443358, 0.023765439987182616, 0.023778112411499023, 0.02389289665222168, 0.023818239212036133, 0.023859199523925782, 0.023777280807495117, 0.023965696334838867, 0.023721567153930666, 0.023912031173706053, 0.023712223052978515, 0.02369366455078125, 0.024250207901000978, 0.023914560317993164, 0.02408563232421875, 0.023845439910888673, 0.02373878479003906, 0.023910400390625, 0.02393427276611328, 0.023683712005615233, 0.023912511825561523, 0.023856416702270507, 0.023716447830200195, 0.023648384094238282, 0.02368297576904297, 0.023778879165649414, 0.02411369514465332, 0.0236723518371582, 0.023711328506469728, 0.02374336051940918, 0.024385087966918944, 0.024039968490600586, 0.02404902458190918, 0.023900800704956055, 0.023787071228027343, 0.0243306884765625, 0.023918048858642578, 0.02387334442138672, 0.02386403274536133, 0.02363363265991211, 0.02376252746582031, 0.023724767684936525, 0.02370351982116699, 0.02353971290588379, 0.02388787269592285, 0.024100223541259767, 0.023925535202026366, 0.023867231369018554, 0.024225791931152343, 0.02379132843017578, 0.023914335250854492, 0.023699264526367187, 0.02392947196960449, 0.02382361602783203, 0.023841535568237305, 0.02379724884033203, 0.023675392150878907, 0.023879680633544922, 0.023834047317504884, 0.023862112045288087, 0.02382204818725586, 0.023778400421142577, 0.024135679244995118, 0.024091552734375, 0.0241395206451416, 0.023902463912963866, 0.023770559310913087, 0.02389846420288086, 0.02373244857788086, 0.023746559143066406, 0.02369068717956543, 0.02456550407409668, 0.02382931137084961, 0.023764768600463868, 0.02385532760620117, 0.023801055908203125, 0.023825183868408203, 0.024217599868774413, 0.02380307197570801, 0.02385001564025879, 0.024059680938720702, 0.024037376403808593, 0.023756799697875978, 0.023996416091918944, 0.023778432846069335, 0.02398624038696289, 0.024126367568969728, 0.023967647552490236, 0.023852096557617188, 0.02424233627319336, 0.02382863998413086, 0.02396019172668457, 0.024086528778076172, 0.024357471466064453, 0.024000511169433594, 0.024387584686279298, 0.024080671310424805, 0.02372371292114258, 0.023669824600219727, 0.023708639144897462, 0.02393087959289551, 0.02371993637084961, 0.02388991928100586, 0.023814144134521483, 0.023762016296386718, 0.02376563262939453, 0.023764511108398438, 0.02354457664489746, 0.023879680633544922, 0.023705408096313475, 0.023664831161499023, 0.02354956817626953, 0.023871231079101562, 0.024068735122680665, 0.023864543914794922, 0.023745023727416992, 0.023709983825683595, 0.023799871444702147, 0.026411136627197265, 0.024299167633056642, 0.024096927642822265, 0.02387331199645996, 0.023748735427856445, 0.02396995162963867, 0.024084415435791016, 0.02379292869567871, 0.0238189754486084, 0.02380326461791992, 0.024281728744506837, 0.02356393623352051, 0.023928159713745116, 0.024118207931518556, 0.023997600555419923, 0.023585695266723633, 0.023752511978149413, 0.023719232559204103, 0.023589759826660155, 0.02395955276489258, 0.02369126319885254, 0.02364825630187988, 0.02368230438232422, 0.0237739200592041, 0.023377952575683595, 0.02385696029663086, 0.024391807556152344, 0.023620864868164063, 0.023446432113647463, 0.023719072341918945, 0.023997184753417968, 0.023813343048095702, 0.023834815979003908, 0.023660671234130858, 0.02369174385070801, 0.023635456085205078, 0.023947776794433592, 0.023732095718383788, 0.024270847320556642, 0.0237455997467041, 0.023673791885375977, 0.023611391067504883, 0.023603200912475586, 0.024223264694213868, 0.02384124755859375, 0.023747583389282227, 0.023540351867675783, 0.023878015518188477, 0.023856639862060547, 0.023692991256713865, 0.023608287811279296, 0.02364419174194336, 0.02389094352722168, 0.02349888038635254, 0.02364076805114746, 0.023780832290649413, 0.024172479629516602, 0.02366329574584961, 0.023625247955322264, 0.02356617546081543, 0.023954015731811523, 0.02391152000427246, 0.02376380729675293, 0.023777280807495117, 0.023817695617675782, 0.02358255958557129, 0.02390399932861328, 0.023786432266235353, 0.02373017692565918, 0.023841856002807617, 0.02381737518310547, 0.02405353546142578, 0.023707008361816405, 0.02385174369812012, 0.02389731216430664, 0.023720640182495117, 0.023650144577026366, 0.02403299140930176, 0.023998912811279298, 0.02388528060913086, 0.023849664688110353, 0.02370908737182617, 0.023589311599731447, 0.023934112548828126, 0.023666719436645507, 0.023733055114746094, 0.025133056640625, 0.02352332878112793, 0.02332467269897461, 0.02354380798339844, 0.024034400939941407, 0.023745439529418946, 0.023605247497558594, 0.023530752182006835, 0.024808191299438478, 0.023941152572631835, 0.023577632904052733, 0.023743423461914062, 0.02357801628112793, 0.02376969528198242, 0.023597055435180665]",tokens/s,41.94374512362278,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,836.673536,4675.534848,0.0,4280.287232,4115.121152,s,1,7.6016826171875,7.6016826171875,0.0,7.6016826171875,7.6016826171875,7.6016826171875,7.6016826171875,[7.6016826171875],,kWh,1.0649561300040963e-05,1.1669500490514966e-06,4.535559183990734e-06,1.6352070533083194e-05,,MB,1141.620736,4981.71904,0.0,4575.985664,4408.408064,s,10,3.0703495788574213,0.3070349578857422,0.0031113508746365526,0.30735542297363283,0.30992298278808594,0.3105379837036133,0.3110299844360352,"[0.30563629150390625, 0.3078035583496094, 0.30978631591796874, 0.305809814453125, 0.31115298461914065, 0.3053193359375, 0.3092148132324219, 0.30690728759765623, 0.29955322265625, 0.30916595458984375]",tokens/s,833.7812793788322,kWh,9.109229113762307e-06,1.0042019392834388e-06,6.0174627264245135e-06,1.6130893779470258e-05,tokens/kWh,15870168.355197433,MB,1163.149312,4981.71904,0.0,4575.985664,4408.410624,s,10,15.703744506835939,1.570374450683594,0.010764770164829358,1.5709315795898438,1.5774057006835938,1.5861736145019532,1.5931879455566407,"[1.5635303955078126, 1.5949415283203126, 1.575457275390625, 1.567982177734375, 1.57343115234375, 1.5576832275390624, 1.5684320068359374, 1.5746732177734375, 1.5737548828125, 1.553858642578125]",tokens/s,40.117820289661296,kWh,4.525882735623948e-05,4.99226833244792e-06,2.9779704800174935e-05,8.003080048886235e-05,tokens/kWh,787196.9243737294,,s,630,15.701173805236804,0.024922498103550503,0.0004272402564130739,0.024874271392822264,0.02528845100402832,0.025565788555145262,0.026496753330230712,"[0.026102687835693358, 0.025196224212646483, 0.024997535705566405, 0.024463615417480468, 0.025049472808837892, 0.02510438346862793, 0.025030656814575194, 0.02476032066345215, 0.024825344085693358, 0.02540390396118164, 0.02489958381652832, 0.024928255081176756, 0.024862016677856445, 0.024836799621582032, 0.024797183990478516, 0.025020639419555665, 0.026484415054321288, 0.024778112411499024, 0.024988384246826173, 0.024788991928100586, 0.024749536514282227, 0.024975904464721678, 0.02467840003967285, 0.024720415115356446, 0.024675296783447265, 0.02478451156616211, 0.0245863037109375, 0.024650047302246094, 0.02476851272583008, 0.024532991409301756, 0.024600576400756836, 0.024489311218261717, 0.024676416397094728, 0.024684383392333985, 0.024685312271118164, 0.02476851272583008, 0.024755680084228515, 0.02483987236022949, 0.024685407638549806, 0.024723455429077147, 0.024803327560424804, 0.024874431610107422, 0.02490220832824707, 0.024626592636108398, 0.024721088409423827, 0.02501696014404297, 0.024639232635498047, 0.024637983322143554, 0.02453708839416504, 0.024753503799438477, 0.02453708839416504, 0.02462905693054199, 0.02465878486633301, 0.02449407958984375, 0.024461311340332033, 0.02452889633178711, 0.024820831298828124, 0.024779680252075196, 0.024526687622070314, 0.024524192810058593, 0.0245533447265625, 0.024827775955200197, 0.025082176208496093, 0.025601760864257812, 0.025077600479125977, 0.025168256759643556, 0.025092096328735353, 0.025173759460449217, 0.025379072189331053, 0.02611199951171875, 0.02509823989868164, 0.024986976623535155, 0.026501792907714844, 0.025174016952514647, 0.02531283187866211, 0.025022911071777343, 0.02510643196105957, 0.025440256118774415, 0.025280511856079102, 0.025208831787109375, 0.02533990478515625, 0.025112031936645508, 0.025060928344726563, 0.02517705535888672, 0.025235679626464842, 0.026005279541015624, 0.025058752059936525, 0.02528108787536621, 0.02527846336364746, 0.025186239242553712, 0.025094207763671876, 0.025026559829711914, 0.025834720611572267, 0.028717248916625977, 0.025162080764770507, 0.025092063903808595, 0.025484895706176756, 0.02531808090209961, 0.025070720672607422, 0.025049983978271486, 0.02565555191040039, 0.024929376602172853, 0.025104320526123047, 0.02495382308959961, 0.025081600189208984, 0.02486800003051758, 0.02741744041442871, 0.025100160598754882, 0.025208255767822266, 0.025230016708374024, 0.02483404731750488, 0.02494259262084961, 0.02655561637878418, 0.025391584396362306, 0.025010496139526366, 0.024936447143554686, 0.025272319793701172, 0.025309183120727538, 0.0249487361907959, 0.024983903884887696, 0.025122079849243164, 0.02492185592651367, 0.02502038383483887, 0.024807231903076172, 0.024941408157348632, 0.024813568115234375, 0.02556159973144531, 0.024815807342529295, 0.025108480453491212, 0.02476201629638672, 0.024852832794189452, 0.025132352828979493, 0.025187007904052733, 0.02479859161376953, 0.02477324867248535, 0.024879104614257814, 0.02485043144226074, 0.024979455947875977, 0.0246778564453125, 0.024840736389160158, 0.024905920028686523, 0.02506528091430664, 0.024967231750488282, 0.02499772834777832, 0.025595935821533203, 0.02516543960571289, 0.025006528854370116, 0.024720800399780272, 0.024848352432250975, 0.02477120018005371, 0.024868864059448242, 0.02488115119934082, 0.024781984329223634, 0.02485536003112793, 0.024868896484375, 0.024856447219848633, 0.024764543533325196, 0.025228511810302733, 0.02533452796936035, 0.024896896362304688, 0.02492483139038086, 0.02471731185913086, 0.024895488739013674, 0.02493235206604004, 0.024669792175292967, 0.02498192024230957, 0.0248353271484375, 0.024867456436157228, 0.024909952163696288, 0.024764415740966796, 0.02496512031555176, 0.02529280090332031, 0.025018367767333984, 0.024927520751953126, 0.024864896774291993, 0.025127519607543947, 0.024861696243286133, 0.024695808410644532, 0.024676000595092774, 0.024912191390991212, 0.024989984512329103, 0.02495257568359375, 0.026462207794189452, 0.02704310417175293, 0.025580255508422852, 0.025200096130371094, 0.024760160446166992, 0.02485318374633789, 0.02488934326171875, 0.025630720138549806, 0.025101696014404297, 0.02508198356628418, 0.02480179214477539, 0.02466160011291504, 0.024838144302368165, 0.02480684852600098, 0.024687583923339845, 0.024453119277954103, 0.02474777603149414, 0.02509971237182617, 0.024995935440063476, 0.02474985694885254, 0.02482681655883789, 0.024737951278686523, 0.025045984268188475, 0.024750495910644533, 0.024869344711303712, 0.024811807632446288, 0.025155296325683595, 0.024747711181640625, 0.02469875144958496, 0.024842687606811523, 0.02551398468017578, 0.025205888748168946, 0.02493529510498047, 0.024768192291259764, 0.024743776321411132, 0.024828384399414063, 0.024922111511230468, 0.02475212860107422, 0.0249051513671875, 0.0246625919342041, 0.024785919189453123, 0.024638463973999023, 0.024786815643310547, 0.025782400131225586, 0.025222591400146484, 0.024950944900512695, 0.02475200080871582, 0.02473628807067871, 0.025057279586791992, 0.024915136337280274, 0.024777536392211915, 0.02476995277404785, 0.024771167755126954, 0.02488528060913086, 0.024692703247070312, 0.02451251220703125, 0.025055007934570314, 0.025018592834472657, 0.02471651268005371, 0.024728160858154297, 0.024671903610229494, 0.024715808868408202, 0.02510643196105957, 0.024880607604980468, 0.025005983352661132, 0.02481558418273926, 0.02496169662475586, 0.024729600906372072, 0.024724863052368165, 0.025129600524902342, 0.025726688385009765, 0.025105791091918947, 0.02500495910644531, 0.024823808670043947, 0.024723295211791993, 0.024918176651000976, 0.025014272689819338, 0.02489753532409668, 0.024782848358154298, 0.02474809646606445, 0.024735679626464845, 0.024827903747558593, 0.02471900749206543, 0.024858879089355468, 0.02515567970275879, 0.024952831268310546, 0.024961023330688475, 0.02480931282043457, 0.02479283142089844, 0.025078176498413086, 0.024954879760742187, 0.024763904571533202, 0.02482431983947754, 0.024799232482910157, 0.025038175582885742, 0.024758495330810548, 0.02511644744873047, 0.026067615509033203, 0.025126911163330077, 0.025062976837158205, 0.024733951568603516, 0.025063583374023438, 0.02494063949584961, 0.02482975959777832, 0.024748159408569337, 0.02488096046447754, 0.024774848937988283, 0.024870336532592772, 0.024797760009765624, 0.024991743087768553, 0.025287967681884765, 0.02530748748779297, 0.024847808837890624, 0.02485958480834961, 0.02489952087402344, 0.02494441604614258, 0.024877344131469727, 0.02489753532409668, 0.0254748477935791, 0.024923648834228516, 0.024926464080810548, 0.02488368034362793, 0.025202592849731444, 0.025151584625244142, 0.024704191207885744, 0.025014591217041016, 0.02481203269958496, 0.025081439971923827, 0.025279232025146484, 0.025085248947143556, 0.024858976364135744, 0.02487411117553711, 0.025213823318481446, 0.025489887237548827, 0.02481564712524414, 0.024809663772583007, 0.02466377639770508, 0.02482614326477051, 0.024738815307617186, 0.024490400314331053, 0.024510879516601563, 0.024731840133666992, 0.02461033630371094, 0.02467238426208496, 0.024725727081298828, 0.024763967514038084, 0.02464614486694336, 0.02443199920654297, 0.024465375900268555, 0.024623327255249024, 0.024692960739135742, 0.024436511993408204, 0.024893951416015626, 0.02728246307373047, 0.02475926399230957, 0.02447100830078125, 0.024568447113037108, 0.02450979232788086, 0.024838720321655273, 0.024409311294555664, 0.02447849655151367, 0.024905439376831054, 0.025569215774536132, 0.024965471267700195, 0.024682079315185547, 0.02456220817565918, 0.024739072799682616, 0.024527488708496095, 0.024380672454833986, 0.025199359893798828, 0.024696832656860353, 0.02468659210205078, 0.02437443161010742, 0.024912736892700196, 0.02471673583984375, 0.024672672271728514, 0.024545024871826172, 0.024434431076049805, 0.024951007843017577, 0.02498147201538086, 0.024844768524169922, 0.02477414321899414, 0.02460723114013672, 0.024713279724121094, 0.024472864151000976, 0.024849056243896484, 0.02474630355834961, 0.02487468719482422, 0.024475616455078127, 0.0243917121887207, 0.024475648880004884, 0.024614431381225585, 0.02432044792175293, 0.024348352432250978, 0.024633695602416992, 0.024465055465698243, 0.02547884750366211, 0.024681856155395508, 0.02468118476867676, 0.024413759231567383, 0.024253215789794922, 0.025208671569824218, 0.024379392623901368, 0.02430735969543457, 0.02432035255432129, 0.02427449607849121, 0.024344255447387695, 0.02431667137145996, 0.02409267234802246, 0.02410851287841797, 0.024443424224853516, 0.02451878356933594, 0.02455779266357422, 0.02455311965942383, 0.024497600555419923, 0.02435744094848633, 0.024377344131469726, 0.0245166072845459, 0.02454732894897461, 0.024450624465942383, 0.024590784072875977, 0.02469478416442871, 0.024866559982299804, 0.024920320510864256, 0.025042943954467774, 0.02532307243347168, 0.025065919876098634, 0.02529859161376953, 0.025080160140991212, 0.025182207107543944, 0.02574131202697754, 0.025229312896728515, 0.024907039642333983, 0.025221855163574218, 0.02509414482116699, 0.025332927703857422, 0.02503763198852539, 0.0252969913482666, 0.025105663299560547, 0.025259807586669923, 0.025178720474243164, 0.02539507293701172, 0.025026208877563478, 0.02499580764770508, 0.025058080673217773, 0.02500806427001953, 0.025158815383911133, 0.024957056045532226, 0.024912448883056642, 0.025775711059570314, 0.02559449577331543, 0.02534604835510254, 0.025188352584838865, 0.02511257553100586, 0.024952192306518555, 0.02512249565124512, 0.024984384536743166, 0.02547110366821289, 0.02495078468322754, 0.02573311996459961, 0.02510438346862793, 0.024982816696166994, 0.025018304824829102, 0.0251297607421875, 0.024991743087768553, 0.024993791580200195, 0.02484809684753418, 0.024926496505737306, 0.02512009620666504, 0.024891040802001954, 0.024787967681884765, 0.02516377639770508, 0.025157312393188476, 0.02498796844482422, 0.025059328079223633, 0.02479497528076172, 0.025075872421264647, 0.024937952041625976, 0.025044607162475585, 0.02483228874206543, 0.02491827201843262, 0.024852319717407225, 0.024936479568481447, 0.024752639770507814, 0.02457747268676758, 0.02515385627746582, 0.025128351211547852, 0.024994848251342773, 0.02500966453552246, 0.024864288330078126, 0.025039264678955078, 0.02497747230529785, 0.02494211196899414, 0.024867231369018555, 0.024985984802246095, 0.024811103820800783, 0.02484003257751465, 0.024891807556152345, 0.02502467155456543, 0.025081823348999024, 0.025304895401000976, 0.024956863403320314, 0.024762655258178713, 0.02482713508605957, 0.024888032913208007, 0.024864927291870117, 0.02474380874633789, 0.02486262321472168, 0.025010271072387694, 0.02507161521911621, 0.024821760177612305, 0.024944032669067383, 0.02484694480895996, 0.02502182388305664, 0.025830015182495118, 0.02495692825317383, 0.025053152084350584, 0.025466911315917967, 0.02503987121582031, 0.024826400756835936, 0.02504038429260254, 0.025039392471313475, 0.025901952743530274, 0.025040319442749023, 0.02484486389160156, 0.0249467830657959, 0.024981407165527342, 0.02522915267944336, 0.027841856002807617, 0.025258848190307617, 0.025022464752197264, 0.025313247680664064, 0.024809503555297853, 0.02530860710144043, 0.024846912384033203, 0.024901216506958007, 0.024909568786621095, 0.024836767196655275, 0.0247127685546875, 0.024854976654052733, 0.0248090877532959, 0.024789375305175783, 0.02532099151611328, 0.025024991989135742, 0.02510438346862793, 0.02495692825317383, 0.02495078468322754, 0.024860448837280273, 0.024867040634155273, 0.02503887939453125, 0.024852447509765625, 0.02490096092224121, 0.02482441520690918, 0.024846399307250976, 0.024912927627563478, 0.024914016723632814, 0.025157983779907227, 0.025118976593017577, 0.024872928619384765, 0.024856895446777345, 0.025040447235107424, 0.024844736099243165, 0.02526518440246582, 0.025076416015625, 0.024860416412353516, 0.024975168228149415, 0.02530352020263672, 0.024951040267944338, 0.024993791580200195, 0.02516713523864746, 0.024832735061645506, 0.024532991409301756, 0.024606016159057616, 0.024582048416137696, 0.024684576034545897, 0.024572479248046876, 0.024503936767578127, 0.024566335678100584, 0.025038848876953124, 0.024696352005004883, 0.02490825653076172, 0.024750080108642578, 0.024944639205932616, 0.024710559844970705, 0.0245827522277832, 0.02560406494140625, 0.0246343994140625, 0.024334592819213866, 0.024294111251831056, 0.024363008499145508, 0.024772287368774414, 0.02504025650024414, 0.02467715263366699, 0.025089311599731445, 0.024744831085205077, 0.024397823333740236, 0.024467552185058594, 0.024422592163085937, 0.024391359329223632, 0.024465248107910155, 0.02428767967224121, 0.02412928009033203, 0.024260608673095704, 0.024224767684936522, 0.02447257614135742, 0.024227840423583984, 0.024276992797851563, 0.024223743438720705, 0.02434662437438965, 0.024319616317749024, 0.024489471435546875, 0.024296319961547852, 0.024321792602539062, 0.024270368576049806, 0.024369407653808593, 0.024230367660522462, 0.024231071472167968, 0.024239999771118164, 0.024346879959106445, 0.024429279327392577, 0.024449024200439453, 0.02481155204772949, 0.024452896118164064, 0.02439151954650879, 0.024455360412597656, 0.02438159942626953, 0.025869983673095703, 0.02511907196044922, 0.024579904556274415, 0.024358495712280274, 0.024795743942260744, 0.024793088912963866, 0.024440832138061523, 0.024411455154418945, 0.02466217613220215, 0.02484217643737793, 0.024576608657836913, 0.024827327728271485, 0.02606867218017578, 0.026467199325561522, 0.02553152084350586, 0.025037120819091797, 0.024994144439697264, 0.025038656234741212, 0.025051136016845704, 0.025047391891479493, 0.025286720275878905, 0.02515558433532715]",tokens/s,40.12438864856562,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.669824,569.311232,0.0,174.063616,172.57984,s,1,7.17195458984375,7.17195458984375,0.0,7.17195458984375,7.17195458984375,7.17195458984375,7.17195458984375,[7.17195458984375],,kWh,4.49210194999902e-06,4.882781367663077e-07,1.981946030002457e-06,6.962326116767785e-06,,MB,1108.475904,640.6144,0.0,234.881024,215.589888,s,25,0.27508755302429205,0.011003502120971677,0.00015233908185133062,0.010978240013122559,0.011162656211853028,0.011172537803649903,0.011461617774963379,"[0.01155247974395752, 0.010974176406860351, 0.011006848335266113, 0.010918208122253418, 0.011093888282775879, 0.011083488464355468, 0.010961407661437989, 0.010906815528869629, 0.010854496002197265, 0.010844511985778809, 0.010817952156066894, 0.010858304023742675, 0.011097536087036133, 0.011025247573852539, 0.011046751976013184, 0.01100153636932373, 0.011167136192321778, 0.011155936241149902, 0.011173888206481934, 0.010980640411376953, 0.010856415748596191, 0.010905407905578613, 0.010951775550842285, 0.010978240013122559, 0.010874464035034179]",tokens/s,23265.320184933415,kWh,3.5411350660398743e-07,3.90525435950372e-08,2.336999577257331e-07,6.268660079247578e-07,tokens/kWh,408380733.3045366,MB,1135.460352,642.711552,0.0,236.978176,215.592448,s,25,9.833397613525392,0.39333590454101564,0.02841497875139694,0.38773126220703125,0.3964486999511719,0.3983803649902344,0.498144299316406,"[0.3969692077636719, 0.5295372924804688, 0.39566793823242186, 0.3912528991699219, 0.39300204467773436, 0.388822998046875, 0.38113458251953125, 0.37845510864257814, 0.3840166931152344, 0.37589962768554686, 0.3793076171875, 0.39152328491210936, 0.3906401062011719, 0.3927539978027344, 0.39408251953125, 0.392140869140625, 0.398733154296875, 0.38739495849609373, 0.3850337829589844, 0.38190274047851563, 0.38393341064453124, 0.38520547485351564, 0.38467071533203123, 0.3835853271484375, 0.38773126220703125]",tokens/s,160.16844450931782,kWh,1.0956496214870327e-05,1.2083142877942698e-06,4.543194889428047e-06,1.670800539209264e-05,tokens/kWh,3770647.574115332,,s,1575,9.820896668434136,0.006235489948212153,0.0033837550131982762,0.0061016960144042965,0.006408895969390869,0.006480585527420044,0.006702362804412841,"[0.006441760063171387, 0.006477503776550293, 0.007278592109680176, 0.006242335796356201, 0.006197023868560791, 0.006123712062835693, 0.00619536018371582, 0.0062871999740600586, 0.0062791681289672855, 0.006250271797180176, 0.006262400150299072, 0.00640777587890625, 0.006451871871948242, 0.006435488224029541, 0.0064787201881408695, 0.006379903793334961, 0.006322336196899414, 0.006346879959106445, 0.006325600147247315, 0.006332896232604981, 0.006299583911895752, 0.0063175358772277835, 0.006257599830627442, 0.006154208183288574, 0.0061010241508483885, 0.006008416175842285, 0.00629750394821167, 0.006022655963897705, 0.006118144035339355, 0.006304160118103027, 0.006664127826690674, 0.006571743965148926, 0.006684864044189453, 0.00659660816192627, 0.006536287784576416, 0.006404416084289551, 0.006496863842010498, 0.006506591796875, 0.006361055850982666, 0.006279104232788086, 0.006217728137969971, 0.006187007904052734, 0.00610537576675415, 0.006141664028167725, 0.006139488220214844, 0.0062119998931884765, 0.006354015827178955, 0.006132031917572021, 0.006070847988128662, 0.006041632175445556, 0.006061952114105224, 0.006266335964202881, 0.0062523198127746584, 0.006249343872070313, 0.006047743797302246, 0.006020480155944824, 0.006121088027954102, 0.006068895816802979, 0.006124063968658448, 0.0061190400123596195, 0.006299839973449707, 0.006440864086151123, 0.006282815933227539, 0.006154208183288574, 0.006459455966949463, 0.006365056037902832, 0.006270336151123047, 0.006281504154205322, 0.0063179202079772945, 0.006151008129119873, 0.006214687824249268, 0.006289663791656494, 0.00641868782043457, 0.00624396800994873, 0.006178719997406006, 0.006220736026763916, 0.006245855808258057, 0.14023097229003906, 0.006438432216644287, 0.006478464126586914, 0.006367008209228516, 0.006177023887634277, 0.006741055965423584, 0.006666848182678223, 0.006194975852966309, 0.006236991882324219, 0.006126751899719238, 0.006145696163177491, 0.006063039779663086, 0.0061320638656616215, 0.006153183937072754, 0.0061016960144042965, 0.006318079948425293, 0.0062912960052490235, 0.006263199806213379, 0.006190336227416992, 0.006142079830169678, 0.006067615985870361, 0.0063088321685791015, 0.006425695896148682, 0.006314911842346191, 0.006272895812988281, 0.006056159973144531, 0.00615558385848999, 0.006147903919219971, 0.0063373122215271, 0.0063508481979370115, 0.006336512088775635, 0.006254591941833496, 0.00695091199874878, 0.006221824169158936, 0.006158239841461181, 0.006170720100402832, 0.006177087783813476, 0.006147520065307617, 0.006162687778472901, 0.006333727836608887, 0.006486815929412842, 0.006649504184722901, 0.006252831935882568, 0.006184415817260742, 0.006132256031036377, 0.0061831679344177244, 0.0062082881927490235, 0.006208127975463868, 0.006134111881256104, 0.005865471839904785, 0.006037856101989746, 0.006028960227966309, 0.006107135772705078, 0.006444416046142578, 0.00649894380569458, 0.006588704109191894, 0.006657760143280029, 0.006494016170501709, 0.006662591934204102, 0.006512447834014893, 0.006442016124725342, 0.006470560073852539, 0.0063610877990722655, 0.006357279777526856, 0.006414048194885254, 0.006186272144317627, 0.006176896095275879, 0.006113887786865235, 0.006187104225158692, 0.00617193603515625, 0.006304192066192627, 0.0063203201293945315, 0.006230016231536865, 0.006182015895843506, 0.006160352230072021, 0.006350048065185547, 0.0063259520530700684, 0.006202720165252686, 0.006091423988342285, 0.006133312225341797, 0.0062689919471740725, 0.006090464115142823, 0.006234784126281739, 0.006102240085601807, 0.006074975967407226, 0.006121664047241211, 0.006028992176055908, 0.006142591953277588, 0.0065491838455200195, 0.006555808067321777, 0.006338240146636963, 0.006260863780975342, 0.006132832050323486, 0.006200287818908691, 0.006116479873657227, 0.006169439792633057, 0.006230016231536865, 0.006195199966430664, 0.0062259202003479, 0.006518784046173095, 0.0065413122177124024, 0.0064430079460144046, 0.006467584133148193, 0.006606847763061524, 0.006338560104370118, 0.006399328231811523, 0.00623308801651001, 0.006174143791198731, 0.006082784175872802, 0.006190815925598145, 0.0060104641914367676, 0.0060824317932128905, 0.0061421761512756344, 0.00657366418838501, 0.0065446081161499026, 0.006555808067321777, 0.006583104133605957, 0.006516064167022705, 0.006380127906799316, 0.0063571839332580565, 0.006253856182098389, 0.0062368960380554195, 0.006162240028381348, 0.0061420159339904785, 0.006054944038391114, 0.00614246416091919, 0.006173151969909668, 0.006033408164978027, 0.00603110408782959, 0.0060646400451660155, 0.0060698561668396, 0.006082719802856446, 0.006064127922058105, 0.006088223934173584, 0.006042079925537109, 0.006090752124786377, 0.006047743797302246, 0.006150144100189209, 0.006136127948760986, 0.0060638079643249515, 0.006078271865844726, 0.006041088104248047, 0.006166207790374756, 0.006115615844726563, 0.0062984638214111325, 0.006290847778320312, 0.0061831998825073245, 0.006312287807464599, 0.006327744007110596, 0.006338592052459717, 0.00621401596069336, 0.006223872184753418, 0.006199295997619629, 0.006153952121734619, 0.006160448074340821, 0.006117216110229492, 0.006191167831420898, 0.006327807903289795, 0.006329152107238769, 0.006367392063140869, 0.006283103942871094, 0.006315167903900147, 0.006409056186676025, 0.006300831794738769, 0.0062206401824951175, 0.006278848171234131, 0.006219295978546142, 0.006136608123779297, 0.006045087814331055, 0.006036064147949219, 0.006017024040222168, 0.006071424007415772, 0.006059135913848877, 0.006045440196990967, 0.006107135772705078, 0.006348832130432129, 0.006434815883636475, 0.006508607864379883, 0.006668320178985596, 0.0067721281051635746, 0.006377151966094971, 0.0064234561920166015, 0.0063610877990722655, 0.006407392024993896, 0.006359295845031738, 0.006272575855255127, 0.006247712135314941, 0.006297279834747314, 0.006217728137969971, 0.006209856033325196, 0.006119455814361572, 0.006096127986907959, 0.006143487930297851, 0.006663167953491211, 0.006080416202545166, 0.00609830379486084, 0.006070015907287597, 0.006091231822967529, 0.006020991802215576, 0.006253087997436523, 0.006494400024414063, 0.006410048007965088, 0.006289247989654541, 0.006141503810882568, 0.006091360092163086, 0.006208896160125732, 0.006910528182983398, 0.006057888031005859, 0.006135871887207031, 0.00620358419418335, 0.006245855808258057, 0.00627945613861084, 0.006191264152526856, 0.006190847873687744, 0.006051519870758057, 0.00606166410446167, 0.005978752136230469, 0.006058656215667725, 0.006114816188812256, 0.006309120178222656, 0.006243264198303223, 0.00638972806930542, 0.006241663932800293, 0.006118048191070557, 0.006101088047027588, 0.0061131839752197265, 0.006191103935241699, 0.006060031890869141, 0.0061168642044067386, 0.006089119911193847, 0.006089824199676514, 0.006071296215057373, 0.006104415893554688, 0.006050464153289795, 0.006069952011108398, 0.006017343997955322, 0.006122591972351074, 0.00630617618560791, 0.006260799884796143, 0.0063446397781372075, 0.006395904064178467, 0.00641974401473999, 0.006273727893829345, 0.006219711780548096, 0.006221920013427734, 0.006196320056915283, 0.0062262721061706544, 0.00628495979309082, 0.006218656063079834, 0.0061131839752197265, 0.006133855819702149, 0.006064127922058105, 0.006159391880035401, 0.006024159908294678, 0.006062079906463623, 0.006039135932922364, 0.00635097599029541, 0.006383039951324463, 0.006588287830352783, 0.00664467191696167, 0.006448959827423096, 0.006289792060852051, 0.006215136051177978, 0.006246880054473877, 0.006176544189453125, 0.006204576015472412, 0.006123551845550537, 0.00604256010055542, 0.006075520038604737, 0.006012063980102539, 0.006055232048034668, 0.0060133438110351566, 0.006082464218139649, 0.006002560138702393, 0.006215968132019043, 0.006258624076843262, 0.006158336162567139, 0.0060860800743103025, 0.006007359981536865, 0.00601907205581665, 0.006003903865814209, 0.0060076799392700195, 0.005959616184234619, 0.006174111843109131, 0.005993055820465088, 0.0060416641235351565, 0.005990623950958252, 0.006008416175842285, 0.005994624137878418, 0.006017183780670166, 0.005955584049224853, 0.006011839866638184, 0.00609168004989624, 0.006328256130218506, 0.006506559848785401, 0.006327807903289795, 0.006261248111724854, 0.006199391841888427, 0.006086688041687011, 0.006033279895782471, 0.006017024040222168, 0.0057554559707641605, 0.005979135990142822, 0.006034463882446289, 0.0059576001167297365, 0.005983967781066895, 0.00596940803527832, 0.005995296001434326, 0.005969823837280274, 0.006023519992828369, 0.0059818878173828124, 0.006021183967590332, 0.005967264175415039, 0.005986911773681641, 0.005952576160430908, 0.006038688182830811, 0.005959455966949463, 0.006041376113891601, 0.006084832191467285, 0.007065279960632324, 0.006388031959533692, 0.006017024040222168, 0.006012928009033203, 0.00597811222076416, 0.006039552211761475, 0.0059935998916625976, 0.006026112079620362, 0.006000639915466309, 0.006014944076538086, 0.005980224132537842, 0.0059732160568237306, 0.005970751762390137, 0.0059697279930114745, 0.006072256088256836, 0.006136000156402588, 0.006228991985321045, 0.005983168125152588, 0.005959743976593017, 0.0061354880332946775, 0.005955327987670898, 0.006042175769805909, 0.005967455863952637, 0.0060208959579467775, 0.005972608089447022, 0.006039231777191162, 0.005978271961212158, 0.006039711952209472, 0.006003712177276611, 0.0061224961280822755, 0.005964191913604736, 0.006037087917327881, 0.0059592962265014645, 0.0060207037925720215, 0.006015103816986084, 0.006032032012939453, 0.006418496131896973, 0.006033567905426025, 0.005982175827026367, 0.0060347518920898435, 0.006017536163330078, 0.0059593281745910645, 0.005968224048614502, 0.006455264091491699, 0.0059987521171569825, 0.005822688102722168, 0.005946527957916259, 0.005976704120635987, 0.006002463817596435, 0.005988416194915772, 0.005937376022338867, 0.006008287906646728, 0.005966303825378418, 0.006005023956298828, 0.00594217586517334, 0.005980991840362549, 0.005929152011871338, 0.005975872039794922, 0.005917695999145508, 0.005995488166809082, 0.005975903987884522, 0.006008831977844238, 0.006013472080230713, 0.006019968032836914, 0.005978911876678467, 0.00601907205581665, 0.005935200214385986, 0.005986112117767334, 0.005942495822906494, 0.006029727935791015, 0.005998112201690674, 0.006013023853302002, 0.006009056091308594, 0.006053567886352539, 0.006023359775543213, 0.006052127838134766, 0.005986783981323242, 0.006036863803863525, 0.00612175989151001, 0.006025568008422851, 0.006076416015625, 0.00601907205581665, 0.0059411201477050785, 0.006060192108154297, 0.0059411201477050785, 0.0059905281066894535, 0.005918687820434571, 0.0060104641914367676, 0.0059415998458862305, 0.005976352214813232, 0.0059489598274230955, 0.006432576179504395, 0.0059584641456604005, 0.006020927906036377, 0.005933023929595947, 0.006121088027954102, 0.005955743789672852, 0.006004511833190918, 0.005968160152435302, 0.006020415782928467, 0.006044415950775146, 0.006006912231445312, 0.006170400142669678, 0.006030496120452881, 0.005959968090057373, 0.005988959789276123, 0.005946400165557861, 0.005996607780456543, 0.005866015911102295, 0.006158720016479493, 0.005962912082672119, 0.007720128059387207, 0.008408767700195312, 0.00780617618560791, 0.007717919826507568, 0.0070100479125976565, 0.005971519947052002, 0.005986656188964844, 0.005963840007781982, 0.005984255790710449, 0.005922560214996338, 0.005966080188751221, 0.005912576198577881, 0.005987552165985108, 0.00588265609741211, 0.006002463817596435, 0.0059699521064758305, 0.005950784206390381, 0.005971136093139648, 0.005990079879760742, 0.005945087909698487, 0.005932384014129639, 0.005895071983337402, 0.005936927795410156, 0.005925087928771972, 0.0060026879310607914, 0.005938943862915039, 0.005990655899047852, 0.005951583862304688, 0.005928864002227783, 0.0059435200691223145, 0.005924479961395264, 0.0059671678543090825, 0.005953824043273926, 0.0059500160217285155, 0.0059269118309021, 0.005910528182983398, 0.005937151908874512, 0.005947391986846923, 0.005967872142791748, 0.005944736003875732, 0.005964384078979492, 0.005908480167388916, 0.0058951997756958, 0.0059647679328918455, 0.005904160022735596, 0.005949247837066651, 0.005894495964050293, 0.00592083215713501, 0.0059678077697753905, 0.005975488185882568, 0.005884543895721435, 0.00593452787399292, 0.005900864124298096, 0.0059658241271972655, 0.005898528099060059, 0.005941055774688721, 0.005913760185241699, 0.0059275197982788085, 0.005965983867645264, 0.00594870376586914, 0.005664127826690674, 0.005906688213348389, 0.005962111949920654, 0.005951231956481934, 0.0059558401107788084, 0.005912223815917969, 0.0059415998458862305, 0.005984255790710449, 0.006062079906463623, 0.0059205121994018554, 0.005947648048400879, 0.005887519836425781, 0.006045728206634521, 0.005915008068084717, 0.005945280075073243, 0.005939616203308106, 0.005927807807922363, 0.005907360076904297, 0.005953536033630371, 0.005914559841156006, 0.005943168163299561, 0.005936384201049805, 0.005938144207000733, 0.0061801280975341795, 0.005986944198608398, 0.005943168163299561, 0.005965727806091309, 0.005927231788635254, 0.005988255977630615, 0.005937280178070069, 0.00595136022567749, 0.005937119960784912, 0.005950975894927979, 0.0059227199554443355, 0.0059584641456604005, 0.005932864189147949, 0.005969088077545166, 0.006034175872802735, 0.0059515519142150876, 0.005980160236358643, 0.0059550080299377445, 0.005952064037322998, 0.005922815799713135, 0.005988351821899414, 0.005933055877685547, 0.005988639831542969, 0.005928063869476318, 0.00599510383605957, 0.005927167892456055, 0.005991968154907226, 0.005951712131500244, 0.0062979841232299804, 0.005985983848571777, 0.005965760231018066, 0.006059455871582031, 0.005966400146484375, 0.0059269118309021, 0.005943552017211914, 0.0059246401786804195, 0.005973983764648438, 0.0059617919921875, 0.005983295917510986, 0.005919616222381591, 0.005689343929290771, 0.005947455883026123, 0.005965760231018066, 0.005934144020080566, 0.005941247940063477, 0.0059211840629577635, 0.006122079849243164, 0.007403456211090088, 0.006957056045532227, 0.0062156801223754886, 0.0059688959121704105, 0.005952511787414551, 0.005935232162475586, 0.005983520030975342, 0.005941855907440186, 0.005958752155303955, 0.005910431861877442, 0.005935679912567138, 0.00593887996673584, 0.005985023975372314, 0.005918879985809326, 0.005928639888763428, 0.005926559925079346, 0.006015679836273193, 0.006008607864379883, 0.005953567981719971, 0.005951039791107178, 0.005949535846710205, 0.0060635838508605955, 0.005949600219726563, 0.00594374418258667, 0.00595747184753418, 0.005957183837890625, 0.00599948787689209, 0.005953536033630371, 0.005937151908874512, 0.005975135803222656, 0.005939551830291748, 0.005976672172546387, 0.005942431926727295, 0.005974080085754395, 0.005902112007141113, 0.005958623886108398, 0.005910528182983398, 0.006109087944030762, 0.0060498881340026855, 0.00601043176651001, 0.005929408073425293, 0.005969503879547119, 0.005916063785552979, 0.005978879928588868, 0.005904928207397461, 0.005973728179931641, 0.0059324798583984375, 0.00602784013748169, 0.005980160236358643, 0.006082560062408447, 0.006088160037994385, 0.006142240047454834, 0.006040095806121826, 0.006086368083953858, 0.005998432159423828, 0.006023327827453614, 0.005827936172485351, 0.006211872100830078, 0.006291872024536133, 0.006391583919525146, 0.0063482561111450194, 0.006269248008728028, 0.006226431846618652, 0.006270976066589356, 0.006367072105407715, 0.006162496089935303, 0.006154304027557373, 0.00603872013092041, 0.006025728225708008, 0.005963967800140381, 0.006080671787261963, 0.006012224197387696, 0.006118080139160156, 0.006209536075592041, 0.00617087984085083, 0.006147071838378906, 0.006057888031005859, 0.006024032115936279, 0.0060026879310607914, 0.006038688182830811, 0.006154719829559326, 0.006179200172424316, 0.00610748815536499, 0.006158304214477539, 0.006168288230895996, 0.006375391960144043, 0.006330687999725342, 0.006268608093261719, 0.006367231845855713, 0.00638105583190918, 0.006407904148101807, 0.006279935836791992, 0.0063266558647155765, 0.006227583885192871, 0.0061645121574401855, 0.006158527851104737, 0.006137663841247558, 0.006098944187164307, 0.0060661759376525876, 0.006227456092834473, 0.006087007999420166, 0.0062846078872680665, 0.00647049617767334, 0.006552927970886231, 0.006453120231628418, 0.006269696235656738, 0.006259871959686279, 0.006187136173248291, 0.00618943977355957, 0.006101376056671143, 0.006158336162567139, 0.006090752124786377, 0.006023263931274414, 0.006039167881011963, 0.006266304016113281, 0.006155104160308838, 0.006387455940246582, 0.006463935852050781, 0.006512032032012939, 0.005967423915863037, 0.006191008090972901, 0.006179327964782715, 0.006275360107421875, 0.006350592136383056, 0.006295551776885986, 0.006327616214752197, 0.006430399894714355, 0.006392288208007813, 0.006359392166137695, 0.006389472007751465, 0.006377855777740479, 0.006456895828247071, 0.006438752174377441, 0.006408736228942871, 0.0064139838218688966, 0.00637337589263916, 0.006441472053527832, 0.0063192639350891115, 0.006246975898742676, 0.006361375808715821, 0.006244095802307129, 0.006202688217163086, 0.006146719932556152, 0.006285600185394287, 0.006205440044403076, 0.006162367820739746, 0.006121535778045654, 0.006080512046813965, 0.006096896171569824, 0.006042943954467773, 0.006105279922485352, 0.006131648063659668, 0.006113247871398926, 0.006105984210968017, 0.006102303981781006, 0.006078911781311035, 0.006082464218139649, 0.005999743938446045, 0.006074592113494873, 0.006146111965179443, 0.006275775909423828, 0.006165887832641602, 0.006128255844116211, 0.006019040107727051, 0.006021152019500733, 0.006053887844085694, 0.006060128211975098, 0.006184864044189453, 0.006217728137969971, 0.006178815841674804, 0.006133247852325439, 0.006124032020568848, 0.0061214399337768555, 0.006066048145294189, 0.006052000045776367, 0.006092512130737305, 0.005996672153472901, 0.0061166400909423825, 0.006114528179168701, 0.006098176002502441, 0.006242112159729004, 0.006140768051147461, 0.00602726411819458, 0.006256671905517578, 0.006212992191314697, 0.006238719940185547, 0.006159743785858154, 0.006120255947113037, 0.0061420159339904785, 0.00606601619720459, 0.006109183788299561, 0.006094848155975342, 0.006072319984436035, 0.0060499200820922855, 0.0061626238822937015, 0.006196928024291992, 0.006330592155456543, 0.0063528637886047365, 0.006278560161590576, 0.006265247821807861, 0.006199295997619629, 0.006172671794891358, 0.006117087841033935, 0.00626470422744751, 0.006071936130523681, 0.006139808177947998, 0.006394112110137939, 0.006197855949401855, 0.006326047897338867, 0.0064924159049987796, 0.006612991809844971, 0.006840415954589844, 0.00640934419631958, 0.006416672229766845, 0.006408703804016113, 0.006343999862670899, 0.006365888118743897, 0.006369279861450195, 0.00623638391494751, 0.0060778241157531734, 0.00602563190460205, 0.005991680145263672, 0.006054656028747558, 0.006199295997619629, 0.00638156795501709, 0.006262784004211426, 0.006258975982666015, 0.006280928134918213, 0.006245888233184814, 0.006208000183105469, 0.006141024112701416, 0.0060834879875183104, 0.006049791812896729, 0.006100992202758789, 0.00603545618057251, 0.006067615985870361, 0.006199903964996338, 0.0064245758056640625, 0.00630998420715332, 0.006158143997192383, 0.006049312114715576, 0.00611084794998169, 0.006308703899383545, 0.00630406379699707, 0.006362912178039551, 0.005897439956665039, 0.00616534423828125, 0.006092735767364502, 0.00611737585067749, 0.006205440044403076, 0.006292736053466797, 0.00623308801651001, 0.006235263824462891, 0.006324160099029541, 0.006265247821807861, 0.006299935817718506, 0.0060677118301391605, 0.006086880207061767, 0.006038976192474365, 0.006107872009277344, 0.006119552135467529, 0.006217567920684814, 0.006217887878417969, 0.006168575763702393, 0.006154240131378174, 0.006098944187164307, 0.006134975910186768, 0.00616099214553833, 0.006314208030700683, 0.006379231929779052, 0.006312096118927002, 0.00620966386795044, 0.006117280006408691, 0.006115327835083008, 0.00610211181640625, 0.006093823909759521, 0.0063318080902099606, 0.006295904159545899, 0.006119103908538819, 0.006462368011474609, 0.006688767910003662, 0.006082208156585694, 0.0062847681045532225, 0.006187263965606689, 0.006215968132019043, 0.006256383895874023, 0.006211679935455322, 0.006125823974609375, 0.006379392147064209, 0.006526783943176269, 0.006590784072875977, 0.006518688201904297, 0.006479135990142822, 0.006424511909484863, 0.006437695980072021, 0.006342016220092774, 0.0063630399703979495, 0.0064617919921875, 0.006400352001190186, 0.006414048194885254, 0.006240543842315674, 0.006280735969543457, 0.006132192134857177, 0.006188159942626953, 0.006201759815216064, 0.006191264152526856, 0.006251071929931641, 0.0061478400230407714, 0.00613478422164917, 0.00637440013885498, 0.006485568046569824, 0.006318111896514892, 0.006315487861633301, 0.0061874880790710445, 0.006168384075164795, 0.006094624042510986, 0.006034624099731445, 0.006061535835266113, 0.006031583786010742, 0.006145023822784424, 0.0062863359451293946, 0.006276768207550049, 0.0061586880683898925, 0.00636627197265625, 0.006429632186889648, 0.0064204797744750975, 0.006291679859161377, 0.006173791885375976, 0.006222527980804443, 0.006141952037811279, 0.006155488014221192, 0.006160639762878418, 0.006238080024719238, 0.006093183994293213, 0.006111519813537598, 0.006350783824920654, 0.0063303041458129886, 0.006332543849945069, 0.0062518720626831056, 0.006242976188659668, 0.0062791681289672855, 0.0061851201057434085, 0.006068064212799072, 0.006004735946655273, 0.006038911819458008, 0.006015615940093994, 0.006028831958770752, 0.006009088039398193, 0.00608892822265625, 0.006134047985076905, 0.006310688018798828, 0.0063639039993286135, 0.0063526082038879396, 0.006220384120941162, 0.006207071781158448, 0.006164768218994141, 0.006038943767547608, 0.006070528030395508, 0.006048416137695313, 0.0060514240264892575, 0.006055871963500977, 0.006183072090148926, 0.006347104072570801, 0.006454944133758545, 0.006505951881408691, 0.006426464080810547, 0.006367487907409668, 0.00624889612197876, 0.006256608009338379, 0.006230048179626465, 0.006424799919128418, 0.006057983875274659, 0.006498559951782227, 0.006460447788238525, 0.006382304191589356, 0.0064471039772033695, 0.006289408206939697, 0.006297344207763672, 0.006307871818542481, 0.0063879361152648926, 0.0064143362045288085, 0.0062873601913452145, 0.006250688076019287, 0.006131455898284912, 0.006221888065338135, 0.006342175960540771, 0.0063820481300354, 0.006338304042816162, 0.00623638391494751, 0.006258143901824951, 0.006285888195037842, 0.006350336074829102, 0.0065090560913085935, 0.006334464073181153, 0.006316031932830811, 0.00620688009262085, 0.006375487804412842, 0.006523104190826416, 0.006490143775939941, 0.006472224235534668, 0.006325215816497803, 0.006255392074584961, 0.006174975872039795, 0.006133503913879394, 0.006199295997619629, 0.006174719810485839, 0.006164480209350586, 0.006131711959838867, 0.006171807765960693, 0.006201663970947266, 0.0063508481979370115, 0.006567935943603515, 0.006392384052276611, 0.006473760128021241, 0.006891456127166748, 0.006373663902282715, 0.006431903839111328, 0.00642310380935669, 0.006405151844024658, 0.006299967765808105, 0.0061938238143920896, 0.00626204776763916, 0.006171360015869141, 0.006129407882690429, 0.006087135791778564, 0.006098720073699951, 0.0060661759376525876, 0.006061279773712158, 0.006371583938598633, 0.006484511852264404, 0.006563615798950195, 0.006442560195922852, 0.0063883838653564454, 0.006408192157745361, 0.006060031890869141, 0.0062269439697265625, 0.006217055797576905, 0.006143519878387451, 0.0060797438621521, 0.006110079765319824, 0.00604307222366333, 0.0060505599975585935, 0.005973440170288086, 0.005990816116333008, 0.005987552165985108, 0.005990431785583496, 0.005960415840148925, 0.006063519954681396, 0.006291200160980224, 0.006267744064331055, 0.006080512046813965, 0.006150144100189209, 0.005996543884277344, 0.006103040218353272, 0.006000639915466309, 0.00601635217666626, 0.005994175910949707, 0.006044640064239502, 0.006025184154510498, 0.00612559986114502, 0.006193376064300537, 0.006233888149261475, 0.006188416004180909, 0.006079103946685791, 0.006084928035736084, 0.006065855979919434, 0.006127295970916748, 0.0060910720825195315, 0.006051839828491211, 0.006017024040222168, 0.006033184051513672, 0.006013440132141113, 0.006033152103424072, 0.006117216110229492, 0.006416512012481689, 0.006428671836853027, 0.006489952087402343, 0.0065414719581604, 0.0064839677810668945, 0.0064692158699035645, 0.006375840187072754, 0.006318143844604492, 0.006313920021057129, 0.00614739179611206, 0.006134367942810058, 0.006090559959411621, 0.006202976226806641, 0.006089312076568603, 0.006100480079650879, 0.006111839771270752, 0.006103040218353272, 0.006029439926147461, 0.006084479808807373, 0.006189184188842773, 0.006073311805725097, 0.006073247909545898, 0.006238207817077636, 0.006025728225708008, 0.006159872055053711, 0.00615664005279541, 0.0061132159233093265, 0.006126815795898437, 0.00610745620727539, 0.006160255908966065, 0.006083168029785156, 0.00608028793334961, 0.006062528133392334, 0.006122432231903076, 0.0060342721939086914, 0.006114880084991455, 0.006039999961853028, 0.006031360149383545, 0.006002848148345947, 0.006000607967376709, 0.005936351776123047, 0.005968544006347656, 0.005928256034851074, 0.006011360168457032, 0.005955264091491699, 0.005996928215026855, 0.005961343765258789, 0.006068287849426269, 0.006064223766326904, 0.0060993280410766605, 0.006006175994873047, 0.006093376159667969, 0.006051167964935302, 0.006216383934020996, 0.006110911846160889, 0.006062528133392334, 0.0060126399993896485, 0.005975488185882568, 0.006025951862335205, 0.005996511936187744, 0.006082496166229248, 0.005963456153869629, 0.005976480007171631, 0.006033408164978027, 0.006270495891571045, 0.006408671855926514, 0.006436992168426514, 0.00635481595993042, 0.006428864002227783, 0.006422336101531982, 0.006315743923187256, 0.006238495826721192, 0.006180863857269287, 0.0061996479034423825, 0.006098495960235596, 0.006084703922271729, 0.006024608135223389, 0.006019455909729004, 0.005974239826202392, 0.006000256061553955, 0.005927584171295166, 0.00599183988571167, 0.005986271858215332, 0.00624291181564331, 0.006526048183441162, 0.006341279983520508, 0.005863423824310303, 0.006123519897460937, 0.006124767780303955, 0.00605398416519165, 0.006046400070190429, 0.006004831790924072, 0.00598419189453125, 0.00603439998626709, 0.006181024074554444, 0.0060136961936950685, 0.00599283218383789, 0.005920447826385498, 0.005955967903137207, 0.005933856010437012, 0.005976128101348877, 0.005927264213562012, 0.005923295974731445, 0.005905695915222168, 0.005988863945007325, 0.005998176097869873, 0.006101632118225097, 0.006268032073974609, 0.006323071956634521, 0.0065491518974304195, 0.006379871845245361, 0.0061494078636169435, 0.006073376178741455, 0.0060044159889221195, 0.0059920320510864255, 0.005978528022766113, 0.005963776111602783, 0.005942912101745606, 0.0059498238563537595, 0.00591212797164917, 0.0059433279037475585, 0.005898848056793213, 0.005949567794799804, 0.005912255764007568, 0.005931295871734619, 0.005997568130493164, 0.006108928203582764, 0.006393983840942383, 0.006373888015747071, 0.006304096221923828, 0.006280288219451904, 0.006288127899169922, 0.00618668794631958, 0.006004672050476074, 0.005966368198394775, 0.005976096153259277, 0.005947360038757324, 0.006011072158813476, 0.005924352169036865, 0.006003007888793945, 0.005971968173980713, 0.00606822395324707, 0.005928959846496582, 0.005998623847961426, 0.005969823837280274, 0.0061502718925476076, 0.00624019193649292, 0.0061328959465026854, 0.006015071868896485, 0.0057849278450012205, 0.0059688959121704105, 0.0060002880096435545, 0.005978271961212158, 0.005995744228363037, 0.005912320137023926, 0.0059827518463134765, 0.006021471977233886, 0.006016895771026611, 0.006072447776794433, 0.005980127811431884, 0.005994527816772461, 0.006014976024627685, 0.006030432224273682, 0.005996511936187744, 0.005995456218719482, 0.006004735946655273, 0.006012224197387696, 0.005935808181762695, 0.005937280178070069, 0.00604966402053833, 0.006230016231536865, 0.006453248023986816, 0.006352255821228028, 0.006274847984313965, 0.00631712007522583, 0.006347743988037109, 0.006470143795013428, 0.006451519966125488, 0.006684351921081543, 0.006367775917053222, 0.006267712116241455, 0.006167520046234131, 0.005980160236358643, 0.006047743797302246, 0.006021344184875489, 0.006053696155548096, 0.005959360122680664, 0.006027008056640625, 0.005945888042449951, 0.005984384059906006, 0.005894015789031983, 0.006018176078796387, 0.00593395185470581, 0.005955584049224853, 0.0060269122123718264, 0.006312287807464599, 0.006440095901489258, 0.006320608139038086, 0.006125823974609375, 0.006037888050079346, 0.0059983677864074705, 0.0060412797927856445, 0.006050079822540283, 0.005973311901092529, 0.005982975959777832, 0.006041728019714356, 0.006018847942352295, 0.006092832088470459, 0.0060702719688415525, 0.00601043176651001, 0.006035583972930908, 0.005996863842010498, 0.00571830415725708, 0.00597760009765625, 0.005939231872558594, 0.0059732160568237306, 0.005960512161254883, 0.005957632064819336, 0.005944767951965332, 0.005994688034057618, 0.006013472080230713, 0.006033184051513672, 0.005986559867858887, 0.006039775848388672, 0.0060433921813964845, 0.006076704025268555, 0.006024064064025879, 0.006093152046203613, 0.006001120090484619, 0.006060031890869141, 0.006020448207855225, 0.006078176021575928, 0.006093791961669922, 0.00602726411819458, 0.005966047763824463, 0.005991487979888916, 0.005982336044311524, 0.005960544109344482, 0.005969664096832275, 0.006103199958801269, 0.006364511966705322, 0.006425087928771973, 0.006424352169036866, 0.006443071842193604, 0.0065168957710266115, 0.006370975971221924, 0.006370687961578369, 0.006353663921356201, 0.0061561279296875, 0.006211904048919678, 0.0061010560989379885, 0.006090752124786377, 0.006010015964508056, 0.00604860782623291, 0.006236159801483154, 0.005990399837493897, 0.00601087999343872, 0.005996672153472901, 0.00602294397354126, 0.006016511917114258, 0.006050399780273438, 0.006316031932830811, 0.006278783798217774, 0.006130080223083496, 0.006166016101837158, 0.006275551795959473, 0.006389472007751465, 0.00618287992477417, 0.006101503849029541, 0.006090496063232422, 0.0061051521301269535, 0.006059967994689942, 0.006326432228088379, 0.006035359859466552, 0.006033472061157227, 0.005752831935882568, 0.0060356159210205075, 0.005936031818389893, 0.005987360000610352, 0.005955488204956055, 0.006002560138702393, 0.005959743976593017, 0.006010367870330811, 0.005978687763214112, 0.00606822395324707, 0.006017087936401367, 0.006042623996734619, 0.006076960086822509, 0.006073823928833008, 0.006103936195373535, 0.0060910720825195315, 0.006091839790344238, 0.006062975883483887, 0.006149312019348144, 0.006130303859710693, 0.006112576007843018, 0.006180736064910888, 0.006131840229034424, 0.0060217280387878415, 0.006031455993652343, 0.005939328193664551, 0.005975935935974121, 0.005959360122680664, 0.006052031993865967, 0.006234208106994629, 0.00655731201171875, 0.00655951976776123, 0.006455872058868408, 0.006457151889801026, 0.0064330239295959475, 0.006379007816314697, 0.006131392002105713, 0.006113696098327637, 0.006167263984680176, 0.00608403205871582, 0.006092160224914551, 0.006030144214630127, 0.006004672050476074, 0.005966207981109619, 0.005994304180145264, 0.005958784103393555, 0.005989183902740478, 0.005955584049224853, 0.005969120025634766, 0.006038368225097656, 0.006219711780548096, 0.0061562881469726565, 0.006203104019165039, 0.006199584007263183, 0.006162432193756104, 0.006068287849426269, 0.0060638079643249515, 0.005968128204345703, 0.0060201921463012694, 0.005993728160858154, 0.00617468786239624, 0.006400864124298095, 0.006084671974182129, 0.005830495834350586, 0.00610313606262207, 0.006026559829711914, 0.006130176067352295, 0.00604150390625, 0.006032896041870117, 0.0060217280387878415, 0.005990784168243408, 0.005957503795623779, 0.005926400184631348, 0.005952000141143799, 0.0060208640098571775, 0.006024608135223389, 0.006037407875061035, 0.006036416053771972, 0.0060085439682006836, 0.0060152640342712406, 0.006039552211761475, 0.006014944076538086, 0.006035520076751709, 0.006027232170104981, 0.006017216205596924, 0.005966976165771485, 0.005962431907653809, 0.005986303806304932, 0.005989408016204834, 0.00602950382232666, 0.005972767829895019, 0.006166528224945069, 0.006420447826385498, 0.0064980158805847165, 0.00632863998413086, 0.006455296039581298, 0.006471712112426758, 0.006507743835449218, 0.006408959865570068, 0.006246335983276367, 0.006293568134307861, 0.006256319999694824, 0.006134079933166504, 0.006110496044158935, 0.006025951862335205, 0.006008831977844238, 0.006017024040222168, 0.005963007926940918, 0.006025375843048096, 0.005999199867248535, 0.005965151786804199, 0.005992159843444824, 0.0059539518356323245, 0.006113311767578125, 0.006276768207550049, 0.00624073600769043, 0.006053664207458496, 0.0060136961936950685, 0.005996511936187744, 0.006004096031188965, 0.005982592105865479, 0.005983967781066895, 0.006072735786437989, 0.005949440002441406, 0.005951488018035888, 0.00601859188079834, 0.0060785279273986815, 0.006256703853607178, 0.006608831882476807, 0.006103104114532471, 0.005994336128234863, 0.005976416110992431, 0.0060165758132934575, 0.006070816040039063, 0.0060415358543396, 0.006125408172607422, 0.005971583843231201, 0.006031551837921143, 0.0060126399993896485, 0.0060824317932128905, 0.005994016170501709, 0.006110079765319824, 0.006024511814117431, 0.0060750718116760255, 0.006047935962677002, 0.0060661759376525876, 0.006077983856201172, 0.006032800197601319, 0.006023263931274414, 0.0060152320861816405, 0.006079360008239746, 0.006043456077575684, 0.006074048042297363, 0.006452991962432862, 0.006256608009338379, 0.00640880012512207, 0.006524767875671387, 0.006377471923828125, 0.0063805441856384275, 0.006441952228546143, 0.006280352115631103, 0.0062657279968261715, 0.006302815914154053, 0.006370463848114014, 0.0062991042137146, 0.006215968132019043, 0.006140223979949951, 0.006065695762634277, 0.006123680114746094, 0.005989376068115234, 0.005999839782714843, 0.005955359935760498, 0.006092512130737305, 0.005951712131500244, 0.00603276777267456, 0.006220064163208008, 0.006422880172729492, 0.006346176147460938, 0.0061543679237365725, 0.006097536087036132, 0.006162303924560547, 0.006078464031219482, 0.006067999839782715, 0.006039775848388672, 0.006053088188171386, 0.006179615974426269, 0.006032735824584961, 0.006146687984466553, 0.006250527858734131]",tokens/s,160.37232171093808,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,937.94304,6533.61152,0.0,6138.363904,6060.931072,s,1,7.03056591796875,7.03056591796875,0.0,7.03056591796875,7.03056591796875,7.03056591796875,7.03056591796875,[7.03056591796875],,kWh,5.269836891667031e-06,5.705820254481695e-07,2.50000199999989e-06,8.34042091711509e-06,,MB,1265.094656,6556.680192,0.0,6150.946816,5419.87328,s,10,4.751995330810547,0.4751995330810546,0.004148406510364178,0.4741061706542969,0.4794715362548828,0.48165538482666015,0.483402463684082,"[0.47891998291015625, 0.47111331176757815, 0.4728049621582031, 0.4756724853515625, 0.47898623657226563, 0.47389395141601565, 0.473724609375, 0.46872216796875, 0.47431838989257813, 0.4838392333984375]",tokens/s,538.7210680536047,kWh,1.4066788356943664e-05,1.5512795423604402e-06,9.333168842190487e-06,2.4951236741494588e-05,tokens/kWh,10260012.465605162,MB,1313.3824,6556.680192,0.0,6150.946816,5419.87584,s,10,15.942974975585939,1.594297497558594,0.0036387952473232316,1.5941470336914063,1.5998011474609375,1.6006009521484375,1.6012407958984374,"[1.5903463134765625, 1.589517578125, 1.5938515625, 1.5944425048828126, 1.591487548828125, 1.6014007568359374, 1.5948016357421875, 1.59199658203125, 1.595507080078125, 1.5996234130859375]",tokens/s,39.515836973007985,kWh,4.62588489234756e-05,5.1023211861562766e-06,3.0745529887410417e-05,8.21066999970423e-05,tokens/kWh,767294.2646857008,,s,630,15.940892042160012,0.025303003241523864,0.00042494545731801465,0.025210111618041993,0.025517606925964356,0.025883853435516357,0.026789559669494627,"[0.026491424560546876, 0.02535215950012207, 0.025362464904785158, 0.025143295288085937, 0.025170080184936522, 0.025184095382690428, 0.0253439998626709, 0.02517196846008301, 0.02509823989868164, 0.025260032653808592, 0.025116672515869142, 0.02509823989868164, 0.025081119537353515, 0.02512950325012207, 0.025064800262451174, 0.02516796875, 0.025134880065917967, 0.0249036808013916, 0.02514224052429199, 0.02531123161315918, 0.025194271087646485, 0.02539952087402344, 0.02509414482116699, 0.025145343780517578, 0.025061376571655275, 0.025284608840942382, 0.02533990478515625, 0.026314176559448243, 0.025184064865112304, 0.02515123176574707, 0.02598111915588379, 0.025219903945922852, 0.02513020706176758, 0.025242399215698243, 0.025515264511108398, 0.025279232025146484, 0.025227264404296876, 0.025362016677856446, 0.02518684768676758, 0.025316448211669923, 0.02597667121887207, 0.025056352615356447, 0.02540070343017578, 0.024870912551879884, 0.024844768524169922, 0.025038848876953124, 0.025145343780517578, 0.025016319274902343, 0.02503615951538086, 0.025055871963500977, 0.02528223991394043, 0.025479488372802735, 0.02506342315673828, 0.02509791946411133, 0.025213247299194337, 0.024941888809204102, 0.025211584091186522, 0.02509814453125, 0.02558576011657715, 0.025158720016479494, 0.02512291145324707, 0.025065311431884764, 0.025026655197143553, 0.026490976333618164, 0.025425151824951173, 0.02507792091369629, 0.02506220817565918, 0.02492403221130371, 0.02509404754638672, 0.024972576141357422, 0.024978208541870116, 0.024962528228759766, 0.025291231155395506, 0.025075136184692384, 0.024899263381958008, 0.02503343963623047, 0.02512886428833008, 0.025054624557495117, 0.024989984512329103, 0.02493440055847168, 0.02503059196472168, 0.025325824737548828, 0.02504640007019043, 0.0249574089050293, 0.025134719848632813, 0.024988576889038085, 0.025433504104614257, 0.02544291114807129, 0.02523756790161133, 0.02524153518676758, 0.02511430358886719, 0.025763999938964843, 0.025280672073364256, 0.025212032318115234, 0.025181055068969727, 0.025628288269042968, 0.02520694351196289, 0.02514361572265625, 0.02509555244445801, 0.02539308738708496, 0.025387615203857423, 0.0252620792388916, 0.02521820831298828, 0.02520969581604004, 0.025114559173583986, 0.02524985694885254, 0.025102176666259766, 0.02521308708190918, 0.025220544815063476, 0.02536412811279297, 0.025396127700805664, 0.025436159133911132, 0.025452415466308595, 0.025367904663085937, 0.025278495788574218, 0.02531193542480469, 0.025319488525390624, 0.02532102394104004, 0.025248191833496095, 0.02531123161315918, 0.025161216735839844, 0.025142784118652343, 0.02523257637023926, 0.025171775817871094, 0.025194208145141603, 0.0253623046875, 0.026443967819213866, 0.025511936187744142, 0.025569280624389647, 0.02532966423034668, 0.025458688735961913, 0.02532966423034668, 0.025399295806884766, 0.025535839080810547, 0.025379487991333008, 0.025360511779785155, 0.0255281925201416, 0.025836864471435548, 0.025456703186035157, 0.02545712089538574, 0.025342111587524415, 0.0252620792388916, 0.02532966423034668, 0.025210880279541017, 0.025253087997436523, 0.025185056686401367, 0.02517945671081543, 0.025329952239990235, 0.025242015838623046, 0.025307199478149415, 0.02524153518676758, 0.025484928131103514, 0.025387392044067383, 0.02533072090148926, 0.025228256225585936, 0.025207839965820312, 0.02538800048828125, 0.02527027130126953, 0.025161727905273438, 0.025058847427368164, 0.02546112060546875, 0.025057504653930664, 0.02510220718383789, 0.025180063247680663, 0.02513315200805664, 0.025187744140625, 0.025325727462768555, 0.02535878372192383, 0.025212928771972655, 0.025193471908569336, 0.025178848266601564, 0.025102624893188475, 0.02536969566345215, 0.025148319244384765, 0.025269407272338867, 0.025684831619262695, 0.025196544647216795, 0.025145343780517578, 0.025253536224365235, 0.025073087692260743, 0.025432384490966797, 0.025003679275512697, 0.025014848709106446, 0.025165824890136718, 0.02502284812927246, 0.024985599517822265, 0.025016319274902343, 0.025171039581298828, 0.025204736709594725, 0.026668224334716797, 0.02544732856750488, 0.02529052734375, 0.025187679290771484, 0.02536128044128418, 0.025206783294677734, 0.025208831787109375, 0.025223167419433593, 0.02515558433532715, 0.025212608337402343, 0.025256256103515624, 0.025255935668945313, 0.025156736373901367, 0.02510323143005371, 0.02510438346862793, 0.02508799934387207, 0.025056768417358398, 0.025057184219360353, 0.025456352233886717, 0.026794784545898436, 0.025262304306030273, 0.025206367492675782, 0.025155872344970704, 0.025188352584838865, 0.025083904266357423, 0.025196544647216795, 0.025194496154785157, 0.02509004783630371, 0.02514841651916504, 0.025992191314697266, 0.025190143585205077, 0.025106176376342774, 0.02533145523071289, 0.025203104019165038, 0.02527471923828125, 0.025315359115600587, 0.025208671569824218, 0.025305215835571288, 0.025267871856689453, 0.025211231231689453, 0.025199712753295897, 0.025185184478759767, 0.025179967880249024, 0.025103551864624023, 0.025956735610961915, 0.025063583374023438, 0.025096160888671875, 0.02523619270324707, 0.025237279891967772, 0.02665385627746582, 0.026317440032958984, 0.025419424057006836, 0.02521468734741211, 0.02515439987182617, 0.025083040237426756, 0.025133920669555665, 0.025109983444213866, 0.025167999267578126, 0.02511827278137207, 0.025068384170532226, 0.025017791748046875, 0.02514531135559082, 0.02514406394958496, 0.02663702392578125, 0.02568502426147461, 0.025254079818725586, 0.0252193603515625, 0.025043455123901368, 0.025370624542236327, 0.025260032653808592, 0.02517955207824707, 0.025200895309448242, 0.02597462463378906, 0.025126815795898438, 0.025082464218139647, 0.025165824890136718, 0.025124864578247072, 0.025093568801879882, 0.02515001678466797, 0.02506937599182129, 0.025135295867919922, 0.02520195198059082, 0.025166015625, 0.025416223526000977, 0.025141248703002928, 0.0265031681060791, 0.025483264923095703, 0.02539107131958008, 0.025221151351928713, 0.025128000259399413, 0.025107295989990234, 0.02505491256713867, 0.025140768051147462, 0.025071775436401367, 0.025107168197631837, 0.025233407974243165, 0.025234848022460937, 0.025139551162719726, 0.025168127059936523, 0.025144479751586915, 0.02519536018371582, 0.025126943588256834, 0.025152639389038087, 0.02528937530517578, 0.025655487060546874, 0.02520182418823242, 0.025108768463134767, 0.025103967666625978, 0.02519139289855957, 0.025143295288085937, 0.02512441635131836, 0.02531711959838867, 0.025215679168701172, 0.025161727905273438, 0.025091583251953126, 0.025115135192871094, 0.025202688217163087, 0.025157087326049803, 0.02514588737487793, 0.025214624404907227, 0.025362783432006836, 0.025144575119018554, 0.02551795196533203, 0.025188703536987305, 0.025192991256713867, 0.02513920021057129, 0.02677676773071289, 0.02557145690917969, 0.025342655181884766, 0.025390815734863282, 0.025340480804443358, 0.025280223846435548, 0.02535424041748047, 0.02540867233276367, 0.02523798370361328, 0.025298336029052734, 0.025143455505371094, 0.025181055068969727, 0.025225120544433592, 0.025172000885009767, 0.025894912719726562, 0.025206335067749025, 0.02514784049987793, 0.025953567504882813, 0.02520956802368164, 0.02522064018249512, 0.025111007690429687, 0.025198591232299804, 0.025380863189697265, 0.025472959518432616, 0.025129024505615234, 0.025206783294677734, 0.025126495361328126, 0.025098655700683595, 0.025290496826171877, 0.025155839920043947, 0.025165504455566406, 0.02507321548461914, 0.02505571174621582, 0.02503494453430176, 0.025051231384277343, 0.025214975357055663, 0.025204736709594725, 0.02530303955078125, 0.025111616134643553, 0.025265087127685548, 0.025176063537597656, 0.02517747116088867, 0.025082496643066405, 0.025159231185913088, 0.0250467529296875, 0.02526598358154297, 0.025211103439331056, 0.02523411178588867, 0.025407487869262696, 0.02529689598083496, 0.02509619140625, 0.025151487350463866, 0.025222751617431642, 0.025217439651489256, 0.025870336532592773, 0.032484737396240235, 0.02551612854003906, 0.025241567611694337, 0.026204736709594726, 0.025092096328735353, 0.025176000595092774, 0.02535615921020508, 0.02549135971069336, 0.02657561683654785, 0.025503904342651366, 0.025578752517700195, 0.025743967056274415, 0.02542297554016113, 0.025275264739990234, 0.025186016082763673, 0.025321311950683593, 0.025302560806274414, 0.025338176727294923, 0.025530464172363283, 0.025243839263916015, 0.02536857604980469, 0.026101856231689452, 0.025821407318115233, 0.02536476707458496, 0.025345760345458983, 0.025187776565551757, 0.025086271286010743, 0.025121023178100586, 0.025306175231933594, 0.02544735908508301, 0.025513439178466796, 0.02551171112060547, 0.025432607650756837, 0.02519785690307617, 0.02524460792541504, 0.025210527420043944, 0.02521504020690918, 0.025198879241943358, 0.025210527420043944, 0.025166175842285157, 0.025200639724731445, 0.025167423248291014, 0.025166271209716796, 0.025198591232299804, 0.02548512077331543, 0.025276128768920898, 0.0251232967376709, 0.025078847885131837, 0.02517888069152832, 0.025578943252563477, 0.025221887588500976, 0.025182207107543944, 0.025155296325683595, 0.02503878402709961, 0.025147743225097656, 0.025092096328735353, 0.025284608840942382, 0.0251691837310791, 0.025116416931152345, 0.02514019203186035, 0.02525388717651367, 0.025380863189697265, 0.025208831787109375, 0.025124864578247072, 0.02519424057006836, 0.025192415237426758, 0.025044384002685546, 0.025084800720214843, 0.025432064056396485, 0.02537388801574707, 0.025228096008300782, 0.02667519950866699, 0.02565878486633301, 0.025211488723754883, 0.025198591232299804, 0.025218080520629883, 0.025115615844726564, 0.025249792098999024, 0.025169919967651368, 0.025162975311279298, 0.02506831932067871, 0.025168991088867186, 0.025033632278442384, 0.025241600036621094, 0.02513491249084473, 0.025088191986083985, 0.02510256004333496, 0.025138847351074217, 0.025055360794067384, 0.02528623962402344, 0.02520323181152344, 0.025152511596679687, 0.02516057586669922, 0.025177248001098634, 0.025053119659423827, 0.025193376541137694, 0.02509187126159668, 0.02528483200073242, 0.025279712677001954, 0.025860128402709962, 0.025232128143310547, 0.025404895782470703, 0.025294719696044923, 0.025182912826538086, 0.025221088409423827, 0.02546073532104492, 0.025323488235473632, 0.0252109432220459, 0.025255775451660155, 0.02527039909362793, 0.02516387176513672, 0.02584364891052246, 0.02530508804321289, 0.025364383697509766, 0.02518364715576172, 0.025242271423339843, 0.02512076759338379, 0.025272319793701172, 0.025221120834350585, 0.025208511352539063, 0.025183807373046874, 0.025314016342163084, 0.025315359115600587, 0.02532966423034668, 0.025302560806274414, 0.02533340835571289, 0.025205568313598634, 0.02515350341796875, 0.025331743240356447, 0.025236768722534178, 0.025298944473266603, 0.02521353530883789, 0.02512089538574219, 0.02521062469482422, 0.026804704666137696, 0.025528160095214844, 0.02536412811279297, 0.02532908821105957, 0.025168895721435547, 0.025212928771972655, 0.027256607055664062, 0.025661216735839844, 0.025605791091918944, 0.025766687393188478, 0.025517087936401367, 0.025239744186401368, 0.025242399215698243, 0.02520412826538086, 0.025129119873046876, 0.02522972869873047, 0.025212928771972655, 0.025393056869506835, 0.025188480377197266, 0.02523036766052246, 0.02519910430908203, 0.025184703826904298, 0.025167903900146484, 0.025063295364379883, 0.025204864501953125, 0.025214975357055663, 0.0253687686920166, 0.02527008056640625, 0.025251840591430662, 0.02523516845703125, 0.02510054397583008, 0.025221151351928713, 0.025103839874267578, 0.02516636848449707, 0.025042943954467774, 0.025456640243530275, 0.02613657569885254, 0.02515558433532715, 0.02532761573791504, 0.025341951370239257, 0.02523257637023926, 0.02517491149902344, 0.02525103950500488, 0.02515567970275879, 0.025014911651611328, 0.025204736709594725, 0.025061376571655275, 0.02516489601135254, 0.025820064544677734, 0.02532352066040039, 0.02550169563293457, 0.025333791732788085, 0.025243263244628906, 0.02505116844177246, 0.025123136520385742, 0.02513715171813965, 0.025087520599365233, 0.02526265525817871, 0.025110591888427736, 0.02523494338989258, 0.025101696014404297, 0.02514384078979492, 0.025073631286621094, 0.02667519950866699, 0.025620479583740235, 0.02530508804321289, 0.025393152236938478, 0.02525388717651367, 0.025210880279541017, 0.025411584854125976, 0.025309024810791017, 0.027152544021606446, 0.02581679916381836, 0.025331167221069335, 0.025123647689819336, 0.025169727325439453, 0.025180063247680663, 0.025248031616210937, 0.025249792098999024, 0.025208831787109375, 0.025977951049804687, 0.025055360794067384, 0.025316127777099608, 0.025101408004760743, 0.02526710319519043, 0.02511257553100586, 0.025159391403198242, 0.02507804870605469, 0.02534918403625488, 0.027118528366088867, 0.02547318458557129, 0.0255629768371582, 0.025350143432617187, 0.025185632705688476, 0.02527052879333496, 0.025225631713867186, 0.025517568588256836, 0.02543052864074707, 0.025173343658447266, 0.025100992202758788, 0.02545212745666504, 0.02509657669067383, 0.02510236740112305, 0.02509129524230957, 0.025184511184692383, 0.025119232177734374, 0.025126527786254883, 0.025079296112060546, 0.025534784317016602, 0.02512544059753418, 0.025134592056274413, 0.025278079986572267, 0.025240447998046874, 0.025968639373779297, 0.026963712692260743, 0.025296735763549804, 0.02538470458984375, 0.025391775131225584, 0.02515478324890137, 0.025139999389648438, 0.02555459213256836, 0.02506947135925293, 0.025082239151000975, 0.02506915283203125, 0.025167776107788087, 0.025118976593017577]",tokens/s,39.52100035141028,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/05ab2ee8d6b593bdbab17d728de5c028a7a94d83/modeling_falcon.py"", line 843, in __init__ self.transformer = FalconModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/05ab2ee8d6b593bdbab17d728de5c028a7a94d83/modeling_falcon.py"", line 650, in __init__ self.h = nn.ModuleList([FalconDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/05ab2ee8d6b593bdbab17d728de5c028a7a94d83/modeling_falcon.py"", line 650, in self.h = nn.ModuleList([FalconDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/05ab2ee8d6b593bdbab17d728de5c028a7a94d83/modeling_falcon.py"", line 420, in __init__ self.mlp = FalconMLP(config) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/05ab2ee8d6b593bdbab17d728de5c028a7a94d83/modeling_falcon.py"", line 405, in __init__ self.dense_4h_to_h = FalconLinear(4 * hidden_size, hidden_size, bias=config.bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 512.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 214.12 MiB is free. Process 200033 has 14.53 GiB memory in use. Of the allocated memory 14.41 GiB is allocated by PyTorch, and 1.37 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 990, in __init__ self.model = GemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 775, in __init__ [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 775, in [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 565, in __init__ self.mlp = GemmaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 140, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 137763 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,782.413824,741.277696,0.0,346.03008,335.0016,s,1,7.04750732421875,7.04750732421875,0.0,7.04750732421875,7.04750732421875,7.04750732421875,7.04750732421875,[7.04750732421875],,kWh,2.1352690916804325e-06,2.2859520841081572e-07,1.0325008259967627e-06,3.396365126088011e-06,,MB,1113.2928,764.346368,0.0,358.612992,302.626816,s,21,0.5210687370300293,0.02481279700142997,0.0006611941273767262,0.024646720886230468,0.024784448623657227,0.024796543121337892,0.027166284942626955,"[0.02775872039794922, 0.024570175170898437, 0.024735679626464845, 0.024618560791015626, 0.024701824188232423, 0.024796543121337892, 0.024646720886230468, 0.024646591186523438, 0.024784448623657227, 0.024670400619506837, 0.024628608703613282, 0.024597312927246092, 0.024624095916748048, 0.024617055892944335, 0.02470003128051758, 0.02471356773376465, 0.024626623153686522, 0.024702688217163087, 0.024634464263916016, 0.024665472030639648, 0.024629152297973633]",tokens/s,10317.256856824592,kWh,8.470441486111197e-07,9.33701046825079e-08,5.591300769333392e-07,1.499544330226967e-06,tokens/kWh,170718527.51512358,MB,1138.54464,779.026432,0.0,373.293056,302.629376,s,21,9.710362335205078,0.46239820643833707,0.0026130249103120417,0.46193701171875,0.46614492797851564,0.4669422912597656,0.4686030334472656,"[0.46901821899414065, 0.4669422912597656, 0.4628614501953125, 0.46303500366210937, 0.46614492797851564, 0.462698974609375, 0.46578253173828127, 0.46192800903320314, 0.4624483947753906, 0.46095526123046876, 0.46193701171875, 0.4588849792480469, 0.4605008544921875, 0.4591085205078125, 0.45975241088867186, 0.4604742431640625, 0.4616591491699219, 0.45940585327148437, 0.46350448608398437, 0.4609849853515625, 0.46233477783203125]",tokens/s,136.24620321359603,kWh,1.330715044999928e-05,1.4675907316296727e-06,6.077078935733302e-06,2.085182011736225e-05,tokens/kWh,3021318.9853648846,,s,1323,9.70028831863405,0.007332039545452784,0.00013567678003000158,0.0073137922286987305,0.007405478477478028,0.00746493124961853,0.007900964536666867,"[0.007321599960327148, 0.007458816051483155, 0.007464384078979492, 0.007450751781463623, 0.007417888164520264, 0.007374944210052491, 0.00791971206665039, 0.007330016136169433, 0.007371808052062988, 0.007412479877471924, 0.007480639934539795, 0.00739737606048584, 0.007377823829650879, 0.0072969279289245605, 0.0072971200942993164, 0.0073619837760925295, 0.007322175979614258, 0.007415808200836181, 0.007343711853027343, 0.007299424171447754, 0.007313055992126465, 0.0074629120826721195, 0.00733190393447876, 0.007366752147674561, 0.007343616008758545, 0.007331808090209961, 0.007334688186645508, 0.007426047801971435, 0.007348320007324219, 0.007491775989532471, 0.007419616222381592, 0.00738918399810791, 0.0073373122215271, 0.007330463886260986, 0.007384736061096192, 0.0073734397888183595, 0.0073499841690063475, 0.007387231826782226, 0.007294144153594971, 0.007273183822631836, 0.007350240230560303, 0.007391583919525146, 0.007491168022155762, 0.007391327857971191, 0.007350272178649903, 0.007290880203247071, 0.007595424175262451, 0.008765376091003418, 0.009026207923889161, 0.0074035201072692874, 0.007334239959716797, 0.00762224006652832, 0.007505216121673584, 0.007385248184204101, 0.007383039951324463, 0.007392191886901855, 0.007370175838470459, 0.007259840011596679, 0.007280831813812256, 0.0074878082275390626, 0.007350560188293457, 0.007371935844421387, 0.007334784030914306, 0.007037375926971436, 0.007291135787963868, 0.007297056198120117, 0.007330944061279297, 0.007381247997283936, 0.007400832176208496, 0.007348320007324219, 0.007316351890563965, 0.007401472091674805, 0.007339360237121582, 0.007398047924041748, 0.007444255828857422, 0.007335391998291016, 0.00728764820098877, 0.007244863986968994, 0.00733846378326416, 0.007360896110534668, 0.007331840038299561, 0.007360511779785156, 0.007333951950073242, 0.007276607990264893, 0.007320864200592041, 0.007303775787353516, 0.007311359882354736, 0.0072887039184570315, 0.007288959980010987, 0.0072704958915710445, 0.007372799873352051, 0.0073769278526306156, 0.007998816013336182, 0.007430399894714355, 0.00751961612701416, 0.0074019842147827145, 0.008697855949401855, 0.00882259178161621, 0.007452832221984863, 0.0073957757949829105, 0.0073359360694885255, 0.007363776206970215, 0.007340960025787354, 0.007405119895935059, 0.007448416233062744, 0.007402143955230713, 0.007308640003204346, 0.007245696067810058, 0.007328383922576905, 0.0073292160034179685, 0.007309887886047363, 0.007279647827148438, 0.007290175914764404, 0.007327392101287842, 0.007458687782287598, 0.007440512180328369, 0.007303167819976806, 0.007370751857757568, 0.007315423965454102, 0.007261280059814453, 0.007557536125183105, 0.007438432216644287, 0.0074223999977111816, 0.00740556812286377, 0.007364607810974121, 0.007264256000518799, 0.007036928176879883, 0.00726201581954956, 0.007380159854888916, 0.007341055870056152, 0.007352352142333984, 0.007358431816101074, 0.00758131217956543, 0.007330336093902588, 0.007329631805419922, 0.007401472091674805, 0.00739737606048584, 0.007426144123077393, 0.007493535995483399, 0.007358784198760986, 0.00729807996749878, 0.007305600166320801, 0.0074423041343688965, 0.007430880069732666, 0.007374527931213379, 0.007366655826568603, 0.0073768959045410155, 0.00729641580581665, 0.007371359825134277, 0.007417471885681153, 0.007317887783050537, 0.00733081579208374, 0.007271743774414062, 0.007291935920715332, 0.007350944042205811, 0.007366015911102295, 0.00731609582901001, 0.007423999786376953, 0.007280640125274658, 0.007305471897125244, 0.007315199851989746, 0.007315743923187256, 0.007317215919494629, 0.007356416225433349, 0.00728227186203003, 0.007346591949462891, 0.00734822416305542, 0.0073211522102355955, 0.007364640235900879, 0.007303904056549073, 0.007271455764770508, 0.007287807941436767, 0.0073144960403442386, 0.00731606388092041, 0.0073361282348632815, 0.007323455810546875, 0.007264256000518799, 0.00730947208404541, 0.007411200046539307, 0.007332191944122314, 0.007342080116271973, 0.00732374382019043, 0.00730511999130249, 0.007294591903686524, 0.007334271907806397, 0.007309663772583008, 0.0073376321792602535, 0.007268511772155762, 0.007392288208007813, 0.007016928195953369, 0.0073564801216125485, 0.0072724480628967286, 0.008041760444641114, 0.007333792209625244, 0.007359360218048096, 0.007319488048553467, 0.007256063938140869, 0.007302656173706055, 0.007338047981262207, 0.007381631851196289, 0.007318496227264404, 0.007443295955657959, 0.007323647975921631, 0.0074336638450622555, 0.007567935943603515, 0.0074356160163879395, 0.007369152069091797, 0.007379456043243408, 0.007304768085479737, 0.00723740816116333, 0.007276576042175293, 0.007364160060882569, 0.007283487796783447, 0.007506144046783447, 0.007370528221130371, 0.007292640209197998, 0.007299359798431397, 0.007393280029296875, 0.007307263851165771, 0.007395071983337402, 0.007360928058624268, 0.007248960018157959, 0.007259263992309571, 0.0072681279182434085, 0.007286655902862549, 0.007380671977996826, 0.007294655799865723, 0.007234176158905029, 0.007354368209838867, 0.007321599960327148, 0.007342080116271973, 0.007342080116271973, 0.007302239894866943, 0.007338912010192871, 0.00733622407913208, 0.007339744091033935, 0.00738099193572998, 0.007350560188293457, 0.0072696318626403805, 0.007268703937530517, 0.007288735866546631, 0.0072893438339233394, 0.007304351806640625, 0.007479872226715088, 0.0073690562248229985, 0.007319200038909912, 0.007313536167144775, 0.0073229122161865235, 0.00730950403213501, 0.0072278079986572265, 0.0074299521446228025, 0.0073146882057189945, 0.007042975902557373, 0.007395359992980957, 0.007395391941070557, 0.007398623943328858, 0.007410463809967041, 0.0073359360694885255, 0.007612415790557861, 0.007259359836578369, 0.007343935966491699, 0.007319551944732666, 0.007314559936523438, 0.007364031791687012, 0.007663551807403564, 0.007315392017364502, 0.0073183999061584475, 0.007418591976165772, 0.007394559860229492, 0.007337759971618652, 0.007790624141693115, 0.008145919799804687, 0.007834496021270752, 0.008226367950439453, 0.007410111904144287, 0.007382976055145264, 0.007375264167785644, 0.007380640029907227, 0.007481599807739258, 0.007419424057006836, 0.007359871864318848, 0.007340479850769043, 0.0073480639457702635, 0.0072988481521606445, 0.007406367778778076, 0.007450784206390381, 0.007350240230560303, 0.00740064001083374, 0.007323679924011231, 0.007314080238342285, 0.007280640125274658, 0.007316736221313477, 0.007316224098205567, 0.007280640125274658, 0.007282368183135986, 0.007241631984710693, 0.007301536083221436, 0.007337376117706299, 0.0072914881706237795, 0.007318687915802002, 0.007258975982666015, 0.00730079984664917, 0.007408031940460205, 0.007364287853240967, 0.007348447799682617, 0.007367839813232422, 0.007245759963989257, 0.007332767963409424, 0.0074670081138610836, 0.007349472045898437, 0.00733673620223999, 0.007310368061065674, 0.007242239952087402, 0.00728553581237793, 0.007397215843200683, 0.007160223960876465, 0.007385087966918945, 0.0072993278503417965, 0.007341184139251709, 0.007346816062927246, 0.007270400047302246, 0.007347968101501465, 0.007333471775054932, 0.0073564801216125485, 0.0073508801460266116, 0.007253824234008789, 0.007301152229309082, 0.007346015930175781, 0.007379263877868652, 0.00738099193572998, 0.007426047801971435, 0.007294911861419678, 0.007358528137207031, 0.007299071788787842, 0.007284863948822022, 0.007317376136779785, 0.007329055786132813, 0.00739961576461792, 0.0073096961975097656, 0.0073053760528564455, 0.007343711853027343, 0.007294591903686524, 0.007346975803375244, 0.007344128131866455, 0.007298624038696289, 0.007352767944335937, 0.007346176147460938, 0.007351840019226074, 0.007342912197113037, 0.007286431789398193, 0.007301216125488281, 0.007364319801330566, 0.007530303955078125, 0.00741209602355957, 0.007454912185668946, 0.007391392230987549, 0.0074584641456604, 0.007395423889160156, 0.007406879901885986, 0.007397535800933838, 0.007360799789428711, 0.007357696056365967, 0.007265376091003418, 0.0072436161041259765, 0.00728227186203003, 0.007315360069274903, 0.007418015956878662, 0.0073014721870422366, 0.007245823860168457, 0.007274496078491211, 0.007286911964416504, 0.007314432144165039, 0.0073491201400756835, 0.007414783954620361, 0.007249023914337158, 0.007361440181732177, 0.0073021121025085445, 0.0072951998710632324, 0.007276639938354492, 0.0074496960639953615, 0.0073794879913330075, 0.007335328102111817, 0.0073062081336975095, 0.00730953598022461, 0.007360288143157959, 0.007325215816497803, 0.007379072189331055, 0.007362912178039551, 0.0072765440940856935, 0.007292992115020752, 0.0073645439147949215, 0.007655488014221191, 0.007366591930389404, 0.007355487823486328, 0.0072895679473876955, 0.00725548791885376, 0.007379007816314697, 0.007291007995605469, 0.007291200160980224, 0.007309567928314209, 0.007270400047302246, 0.007288832187652588, 0.007329311847686768, 0.007387616157531738, 0.007538496017456054, 0.00743228816986084, 0.007292191982269287, 0.007392159938812256, 0.00733190393447876, 0.007589727878570557, 0.008124416351318359, 0.008281375885009766, 0.008227359771728516, 0.007449984073638916, 0.007387968063354492, 0.007374176025390625, 0.007328703880310059, 0.007326879978179932, 0.007361055850982666, 0.007378975868225098, 0.007368703842163086, 0.007378943920135498, 0.007332159996032715, 0.007306943893432618, 0.007290272235870361, 0.007326496124267578, 0.007390048027038574, 0.007287519931793213, 0.007350272178649903, 0.007287231922149659, 0.007295904159545899, 0.00730614423751831, 0.007329247951507569, 0.007325600147247315, 0.007301663875579834, 0.007251520156860352, 0.007238175868988037, 0.007323040008544922, 0.0072967357635498045, 0.007344736099243164, 0.007267776012420654, 0.00705456018447876, 0.0072930240631103515, 0.007339807987213134, 0.007473375797271729, 0.007307648181915283, 0.007334303855895996, 0.0072715520858764645, 0.00726636791229248, 0.007283008098602295, 0.007313727855682373, 0.007290976047515869, 0.007473152160644531, 0.0072540159225463864, 0.007216383934020996, 0.007266272068023682, 0.007557472229003906, 0.007322400093078613, 0.007306528091430664, 0.007246208190917969, 0.0073619518280029295, 0.008227680206298828, 0.00754252815246582, 0.007325535774230957, 0.007311520099639893, 0.007301119804382325, 0.007287968158721924, 0.007243648052215577, 0.007230112075805664, 0.007329951763153076, 0.007325856208801269, 0.007356256008148193, 0.007481855869293213, 0.007384736061096192, 0.007364607810974121, 0.007413119792938233, 0.007368383884429931, 0.007373760223388672, 0.007333216190338135, 0.0073816637992858885, 0.007241312026977539, 0.007311135768890381, 0.007287775993347168, 0.007304031848907471, 0.007266848087310791, 0.0072371201515197756, 0.007263008117675781, 0.00726800012588501, 0.0072726402282714845, 0.007253280162811279, 0.0072345280647277835, 0.007226655960083008, 0.0072585601806640625, 0.007317376136779785, 0.0073136320114135745, 0.007286880016326904, 0.007245312213897705, 0.007275008201599121, 0.007258111953735351, 0.007428095817565918, 0.00732912015914917, 0.007254687786102295, 0.007217023849487305, 0.0073012480735778805, 0.006948863983154297, 0.007316031932830811, 0.007288512229919434, 0.00730841588973999, 0.007392320156097412, 0.007323455810546875, 0.007315264225006103, 0.007315680027008057, 0.007354335784912109, 0.007360000133514405, 0.0073118720054626465, 0.007258111953735351, 0.007345248222351075, 0.0073276801109313965, 0.007341023921966553, 0.007350272178649903, 0.007437535762786865, 0.0076500802040100095, 0.007684447765350342, 0.007245471954345703, 0.007296288013458252, 0.007356575965881348, 0.0073137922286987305, 0.0073851518630981446, 0.007481472015380859, 0.007284736156463623, 0.007333856105804443, 0.007317759990692139, 0.007322879791259765, 0.007323808193206787, 0.007272096157073975, 0.007321792125701904, 0.007334688186645508, 0.0073844480514526365, 0.0073647680282592776, 0.007352543830871582, 0.007307263851165771, 0.007546879768371582, 0.007370751857757568, 0.007320831775665284, 0.00738486385345459, 0.0072991042137146, 0.007308224201202393, 0.007395328044891358, 0.007321599960327148, 0.007363808155059814, 0.007291679859161377, 0.007283872127532959, 0.007260928153991699, 0.0073619837760925295, 0.0072997441291809085, 0.007325407981872559, 0.0072967357635498045, 0.007279104232788086, 0.007211071968078613, 0.007233535766601563, 0.007254111766815186, 0.007300159931182861, 0.007257952213287354, 0.007328767776489258, 0.007303167819976806, 0.007328991889953613, 0.0074271678924560545, 0.007000415802001953, 0.007299392223358154, 0.007573887825012207, 0.007312511920928955, 0.0072854719161987306, 0.0072887039184570315, 0.007604127883911133, 0.007291391849517822, 0.007296512126922608, 0.007289055824279785, 0.007227456092834473, 0.0072715840339660645, 0.007284512042999267, 0.007293087959289551, 0.007291744232177735, 0.007267839908599853, 0.007324160099029541, 0.007319551944732666, 0.007331840038299561, 0.0073134078979492185, 0.007387263774871826, 0.007274367809295654, 0.007259712219238281, 0.007305503845214844, 0.007304575920104981, 0.007322591781616211, 0.007280479907989502, 0.00723964786529541, 0.007284863948822022, 0.00729856014251709, 0.007381375789642334, 0.007513728141784668, 0.007276927947998047, 0.007259200096130371, 0.0072995519638061526, 0.007342400074005127, 0.007308767795562744, 0.007324351787567139, 0.007318719863891602, 0.007283679962158203, 0.007264095783233643, 0.007333888053894043, 0.007395328044891358, 0.007333663940429688, 0.007274720191955566, 0.007204351902008056, 0.007326208114624024, 0.007321599960327148, 0.007299071788787842, 0.007303328037261963, 0.007266464233398437, 0.007253888130187988, 0.007368512153625488, 0.007300511837005615, 0.007306879997253418, 0.007319968223571777, 0.007284351825714111, 0.007275455951690673, 0.007306464195251465, 0.007332640171051025, 0.007311359882354736, 0.007360703945159912, 0.007280064105987549, 0.006944767951965332, 0.007309375762939453, 0.007337920188903809, 0.00729702377319336, 0.007329504013061524, 0.007340320110321045, 0.007285056114196778, 0.007270080089569092, 0.007313024044036865, 0.007374752044677735, 0.00733135986328125, 0.007320511817932129, 0.007231328010559082, 0.007431583881378174, 0.007344096183776855, 0.0073359360694885255, 0.0073612799644470215, 0.007298367977142334, 0.007230175971984863, 0.007298975944519043, 0.0073974719047546385, 0.007366655826568603, 0.007291232109069824, 0.007266304016113281, 0.007702400207519531, 0.007789792060852051, 0.007716544151306152, 0.007355008125305176, 0.007377312183380127, 0.007388864040374756, 0.007356736183166504, 0.007339680194854736, 0.007218656063079834, 0.007310048103332519, 0.007311359882354736, 0.007345791816711426, 0.007344128131866455, 0.007291039943695068, 0.00729315185546875, 0.007315392017364502, 0.00734006404876709, 0.007387423992156982, 0.007362304210662841, 0.0072737598419189455, 0.0072873601913452145, 0.007297183990478516, 0.007319551944732666, 0.007300447940826416, 0.0073274879455566405, 0.00723964786529541, 0.00722441577911377, 0.007294367790222168, 0.007276991844177246, 0.007257215976715088, 0.0072204480171203615, 0.007251711845397949, 0.007268256187438965, 0.007351871967315674, 0.007291456222534179, 0.007274335861206055, 0.007271520137786865, 0.007279551982879638, 0.007280352115631103, 0.007026336193084717, 0.007270271778106689, 0.007311423778533935, 0.007276895999908447, 0.007231232166290283, 0.007291456222534179, 0.007272192001342773, 0.00731766414642334, 0.007276607990264893, 0.007249855995178222, 0.007288928031921387, 0.00732089614868164, 0.007280096054077149, 0.007291872024536133, 0.007268352031707764, 0.007211008071899414, 0.0072988801002502445, 0.0072969279289245605, 0.00728927993774414, 0.007235583782196045, 0.0072436161041259765, 0.007253856182098389, 0.007270559787750244, 0.007266560077667236, 0.007374591827392578, 0.007304959774017334, 0.007225791931152344, 0.007255871772766113, 0.007341440200805664, 0.00730617618560791, 0.007287936210632324, 0.007237919807434082, 0.007245471954345703, 0.0072930240631103515, 0.007309567928314209, 0.007321887969970703, 0.0073034238815307614, 0.007226816177368164, 0.007249216079711914, 0.007279679775238037, 0.007282336235046386, 0.007270271778106689, 0.007211232185363769, 0.007222559928894043, 0.007292031764984131, 0.00738486385345459, 0.007297247886657715, 0.00723744010925293, 0.007188000202178955, 0.00725651216506958, 0.0072960638999938964, 0.007472064018249512, 0.007297215938568115, 0.007278336048126221, 0.007223519802093506, 0.007304096221923828, 0.007330624103546143, 0.007292640209197998, 0.0073281598091125486, 0.007212416172027588, 0.007229087829589843, 0.0073248958587646485, 0.007292511940002441, 0.007000351905822754, 0.007320511817932129, 0.007258143901824951, 0.007254208087921142, 0.00730998420715332, 0.00733788776397705, 0.007347807884216308, 0.007303584098815918, 0.007368703842163086, 0.007286752223968506, 0.007341792106628418, 0.0073199357986450195, 0.007316991806030273, 0.007309343814849854, 0.007276959896087646, 0.0072596797943115235, 0.007336319923400879, 0.007333759784698486, 0.007450719833374023, 0.007333759784698486, 0.0073119039535522465, 0.007231200218200683, 0.007313344001770019, 0.0073523840904235836, 0.007341599941253662, 0.007289440155029297, 0.0075467519760131834, 0.007246016025543213, 0.007286687850952148, 0.007329696178436279, 0.007312960147857666, 0.0072740478515625, 0.007265376091003418, 0.007312191963195801, 0.007287551879882812, 0.00727791976928711, 0.007293983936309815, 0.007315296173095703, 0.007264639854431152, 0.007267231941223144, 0.00732806396484375, 0.007309792041778565, 0.007346271991729736, 0.0073053760528564455, 0.007286464214324951, 0.007272319793701172, 0.00736678409576416, 0.007316480159759522, 0.007332863807678222, 0.007288767814636231, 0.007213119983673096, 0.007319551944732666, 0.007333888053894043, 0.007360511779785156, 0.00729702377319336, 0.007219327926635742, 0.007257247924804687, 0.007281375885009766, 0.00733081579208374, 0.007296000003814697, 0.007278719902038574, 0.007256288051605225, 0.007255712032318115, 0.006916096210479736, 0.007288832187652588, 0.007290880203247071, 0.007300479888916016, 0.007254559993743897, 0.007213151931762695, 0.007265312194824219, 0.007293920040130616, 0.007291039943695068, 0.007263840198516846, 0.00729318380355835, 0.007255360126495362, 0.007278656005859375, 0.007297215938568115, 0.00724780797958374, 0.0075671358108520504, 0.007294816017150879, 0.007244671821594238, 0.007282495975494385, 0.007404831886291504, 0.007300000190734863, 0.0072724480628967286, 0.007240767955780029, 0.007328927993774414, 0.007348000049591064, 0.007337984085083008, 0.007272319793701172, 0.007225503921508789, 0.00726416015625, 0.0072705278396606446, 0.007318719863891602, 0.007274975776672363, 0.007260000228881836, 0.0072501440048217776, 0.00732803201675415, 0.007315392017364502, 0.0073134078979492185, 0.007280447959899902, 0.007267615795135498, 0.007214240074157715, 0.007302048206329345, 0.007317791938781739, 0.007376832008361816, 0.007244416236877441, 0.007256383895874023, 0.007241151809692383, 0.007292191982269287, 0.007351232051849365, 0.0073523840904235836, 0.0072715840339660645, 0.00719920015335083, 0.007326047897338867, 0.007304255962371826, 0.007343039989471435, 0.0072724480628967286, 0.0072540159225463864, 0.007258111953735351, 0.007270400047302246, 0.007270175933837891, 0.007262432098388672, 0.007242047786712646, 0.007165952205657959, 0.007249631881713867, 0.0069415678977966305, 0.0072806720733642576, 0.007266304016113281, 0.007309023857116699, 0.007244063854217529, 0.007248032093048096, 0.0073112001419067384, 0.007285952091217041, 0.007323520183563232, 0.007276927947998047, 0.007233151912689209, 0.0072481918334960935, 0.007321983814239502, 0.007315423965454102, 0.0073276481628417964, 0.007256192207336426, 0.0072849922180175785, 0.007268256187438965, 0.007274591922760009, 0.007310336112976074, 0.007256127834320068, 0.007285568237304687, 0.0073034238815307614, 0.007306496143341065, 0.007327968120574951, 0.007315360069274903, 0.007415808200836181, 0.007303679943084717, 0.007419904232025146, 0.007321599960327148, 0.007360511779785156, 0.007342080116271973, 0.007278592109680176, 0.007260159969329834, 0.007253664016723633, 0.007309663772583008, 0.007322976112365723, 0.007337984085083008, 0.007251743793487549, 0.007227935791015625, 0.007311583995819092, 0.007303296089172363, 0.007274496078491211, 0.007243743896484375, 0.007235968112945556, 0.00723740816116333, 0.007286399841308594, 0.007305471897125244, 0.00729702377319336, 0.007417664051055908, 0.0072726402282714845, 0.007262207984924316, 0.007278592109680176, 0.007294816017150879, 0.007280223846435547, 0.0072585282325744625, 0.00729094409942627, 0.007336031913757324, 0.007323488235473633, 0.0074035201072692874, 0.007270559787750244, 0.007285888195037842, 0.00725708818435669, 0.006989727973937989, 0.007406879901885986, 0.007295711994171143, 0.0073175039291381834, 0.0073400321006774905, 0.007231488227844239, 0.007275551795959473, 0.007283679962158203, 0.007327744007110596, 0.007318560123443603, 0.007285088062286377, 0.0072464637756347655, 0.0072379522323608395, 0.0073192319869995115, 0.007301280021667481, 0.0073705921173095705, 0.0072707200050354005, 0.007219232082366943, 0.007372447967529297, 0.007390880107879639, 0.007371103763580323, 0.007325632095336914, 0.007258175849914551, 0.0072308478355407714, 0.007297311782836914, 0.007291232109069824, 0.0077712640762329106, 0.00738156795501709, 0.007342400074005127, 0.007278592109680176, 0.007245696067810058, 0.00728275203704834, 0.007332191944122314, 0.007371744155883789, 0.007297215938568115, 0.007222015857696534, 0.007313024044036865, 0.007354368209838867, 0.007311520099639893, 0.007309055805206298, 0.007233823776245117, 0.00721340799331665, 0.007355264186859131, 0.007269152164459228, 0.007309311866760254, 0.007261375904083252, 0.007277088165283203, 0.0072358717918396, 0.007292031764984131, 0.007306111812591553, 0.007295231819152832, 0.0072680959701538084, 0.007276000022888184, 0.007309855937957764, 0.007341311931610107, 0.007314176082611084, 0.007346176147460938, 0.007252096176147461, 0.0072887039184570315, 0.007310495853424072, 0.00728707218170166, 0.007308127880096435, 0.007259744167327881, 0.007014336109161377, 0.007266687870025635, 0.007254271984100342, 0.007383039951324463, 0.007354688167572021, 0.007360256195068359, 0.007339968204498291, 0.007243328094482422, 0.007266751766204834, 0.007358463764190673, 0.007393248081207275, 0.007344319820404053, 0.0073480639457702635, 0.007305215835571289, 0.007269760131835937, 0.007352960109710693, 0.007301119804382325, 0.007346176147460938, 0.007286784172058106, 0.0072072319984436035, 0.007345856189727783, 0.007350143909454346, 0.007305280208587646, 0.007292992115020752, 0.007249216079711914, 0.007247968196868897, 0.007341983795166015, 0.007309855937957764, 0.007297183990478516, 0.007329631805419922, 0.007287968158721924, 0.0072594242095947266, 0.007323359966278076, 0.007294976234436036, 0.00767145586013794, 0.007331200122833252, 0.007285280227661133, 0.007283135890960694, 0.007300960063934326, 0.007319327831268311, 0.007303552150726318, 0.007263455867767334, 0.007262815952301025, 0.007263904094696045, 0.007334432125091553, 0.007299071788787842, 0.0072822079658508305, 0.007438303947448731, 0.007332543849945069, 0.007257919788360595, 0.0073218560218811036, 0.00733568000793457, 0.007278656005859375, 0.007337823867797851, 0.007265664100646972, 0.007346784114837647, 0.007339295864105225, 0.00728710412979126, 0.00737113618850708, 0.0072828478813171385, 0.007217152118682861, 0.0073194561004638675, 0.008033984184265137, 0.0069838399887084965, 0.007331999778747558, 0.0072991042137146, 0.007337535858154297, 0.007280416011810302, 0.007277184009552002, 0.00727785587310791, 0.00732419204711914, 0.007335999965667725, 0.007315584182739258, 0.007242976188659668, 0.007308063983917236, 0.007273695945739746, 0.007294079780578613, 0.0072670397758483884, 0.007300064086914062, 0.007258080005645752, 0.007234943866729736, 0.007239327907562256, 0.007349215984344483, 0.007249760150909424, 0.007206783771514892, 0.007249663829803467, 0.007292799949645996, 0.0076005120277404786, 0.007297311782836914, 0.007264256000518799, 0.007315455913543701, 0.007279615879058838, 0.007271423816680909, 0.0073177280426025395, 0.007270112037658692, 0.007288832187652588, 0.007278048038482666, 0.0073283519744873045, 0.007299071788787842, 0.007360064029693604, 0.007301599979400635, 0.0072706880569458005, 0.007219264030456543, 0.007294591903686524, 0.007277952194213867, 0.007286399841308594, 0.0072468481063842774, 0.007262207984924316, 0.007223296165466309, 0.007323040008544922, 0.00726691198348999, 0.007251232147216797, 0.007230495929718018, 0.007226367950439453, 0.0073491201400756835, 0.007296000003814697, 0.007364416122436523, 0.00724399995803833, 0.007215904235839844, 0.007269728183746338, 0.007344799995422363, 0.007308800220489502, 0.00728710412979126, 0.007295231819152832, 0.007214752197265625, 0.00737440013885498, 0.007156544208526612, 0.007325823783874512, 0.0074832639694213865, 0.0073175358772277835, 0.007353439807891846, 0.0073218240737915035, 0.007309567928314209, 0.0073257598876953125, 0.007320191860198975, 0.007341440200805664, 0.00727452802658081, 0.007292672157287598, 0.007285312175750732, 0.007288127899169922, 0.007353024005889892, 0.007349472045898437, 0.007328000068664551, 0.007262752056121826, 0.0073723201751708985, 0.007372831821441651, 0.007522751808166504, 0.007444128036499024, 0.007381247997283936, 0.007333727836608886, 0.007365151882171631, 0.0074268798828125, 0.007365248203277588, 0.007373248100280762, 0.007354335784912109, 0.007290815830230713, 0.0072353601455688475, 0.007389344215393066, 0.007389567852020264, 0.007400383949279785, 0.007484032154083252, 0.007356063842773437, 0.0073787522315979, 0.00740780782699585, 0.007384768009185791, 0.007397151947021484, 0.007387167930603027, 0.007334815979003906, 0.007371903896331787, 0.007307392120361328, 0.007350336074829102, 0.007353184223175049, 0.007310463905334473, 0.00728707218170166, 0.007217599868774414, 0.007284255981445312, 0.007360864162445069, 0.007405759811401368, 0.00738431978225708, 0.007316160202026367, 0.007496831893920898, 0.00722208023071289, 0.0072888960838317874, 0.007501183986663818, 0.007338624000549316, 0.007353439807891846, 0.007342976093292236, 0.00734006404876709, 0.007369855880737305, 0.006930431842803955, 0.007294879913330078, 0.007301023960113525, 0.007313344001770019, 0.007463168144226074, 0.0073640961647033695, 0.007297440052032471, 0.007309408187866211, 0.007329152107238769, 0.007322239875793457, 0.007407616138458252, 0.007417856216430664, 0.007290880203247071, 0.007343711853027343, 0.007339615821838379, 0.007326528072357178, 0.007301375865936279, 0.007253664016723633, 0.007386655807495117, 0.007313983917236328, 0.007315008163452148, 0.007311071872711182, 0.00728275203704834, 0.0072912960052490235, 0.007255551815032959, 0.007301568031311035, 0.007331711769104004, 0.00730079984664917, 0.007385856151580811, 0.007252255916595459, 0.007316448211669922, 0.007320511817932129, 0.0073029761314392086, 0.0072724480628967286, 0.007256063938140869, 0.007286784172058106, 0.007383039951324463, 0.007364607810974121, 0.007355936050415039, 0.007325632095336914, 0.007313439846038819, 0.007324160099029541, 0.007364607810974121, 0.007341472148895264, 0.007305823802947998, 0.007288127899169922, 0.007268511772155762, 0.007277120113372803, 0.007308640003204346, 0.0073079361915588375, 0.007325664043426514, 0.007299071788787842, 0.007271679878234863, 0.00728275203704834, 0.007306079864501953, 0.007337952136993408, 0.00731279993057251, 0.007287263870239258, 0.007210015773773193, 0.007277535915374756, 0.007314911842346191, 0.007363103866577149, 0.007300352096557617, 0.007080575942993164, 0.007353792190551758, 0.00723964786529541, 0.007326399803161621, 0.007358367919921875, 0.007464992046356201, 0.007366623878479004, 0.007298272132873535, 0.007222335815429687, 0.007355999946594238, 0.007295104026794433, 0.007352511882781982, 0.007311168193817138, 0.00729702377319336, 0.007255231857299805, 0.007295807838439941, 0.00734335994720459, 0.007387904167175293, 0.007413760185241699, 0.007253024101257324, 0.007555456161499024, 0.007348671913146972, 0.007316959857940674, 0.007373631954193115, 0.007346047878265381, 0.007340288162231446, 0.0073784317970275876, 0.007280608177185058, 0.007315743923187256, 0.007307263851165771, 0.007327936172485352, 0.007287871837615967, 0.007177055835723877, 0.007329696178436279, 0.007346176147460938, 0.007337984085083008, 0.007340320110321045, 0.007416672229766845, 0.007291359901428223, 0.00735916805267334, 0.007362271785736084, 0.007377056121826172, 0.007621823787689209, 0.007338272094726562, 0.007297408103942871, 0.0074477758407592775, 0.00739359998703003, 0.007312960147857666, 0.007320479869842529, 0.007288447856903076, 0.007298783779144287, 0.007332511901855469, 0.007346176147460938, 0.007372799873352051, 0.0073373122215271, 0.007329504013061524, 0.007269440174102783, 0.007286655902862549, 0.007295104026794433, 0.00730508804321289, 0.007331999778747558, 0.0072763838768005375, 0.007243008136749268]",tokens/s,136.3876986479409,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1234, in __init__ self.transformer = DbrxModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1009, in __init__ self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1009, in self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 788, in __init__ self.ffn = DbrxFFN(config=config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 765, in __init__ self.experts = DbrxExperts( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 704, in __init__ self.mlp = DbrxExpertGLU( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 682, in __init__ self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 97900 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,783.159296,1133.44512,0.0,738.197504,715.772928,s,1,7.06663818359375,7.06663818359375,0.0,7.06663818359375,7.06663818359375,7.06663818359375,7.06663818359375,[7.06663818359375],,kWh,3.0683363375070863e-06,3.3134657421837375e-07,9.327785240043074e-07,4.332461435729768e-06,,MB,1147.338752,1173.291008,0.0,767.557632,723.637248,s,11,0.6937036476135254,0.06306396796486595,0.0010367258932583786,0.06303084945678711,0.06340774536132812,0.0646856803894043,0.06570802841186524,"[0.06596361541748047, 0.06277628707885742, 0.062066654205322265, 0.06303247833251953, 0.061806304931640625, 0.06340774536132812, 0.062698974609375, 0.06335663986206054, 0.062423137664794924, 0.06314096069335938, 0.06303084945678711]",tokens/s,4059.3703228858376,kWh,2.0431546406759623e-06,2.251474102397912e-07,1.3615395507692388e-06,3.6298416016849926e-06,tokens/kWh,70526493.46493891,MB,1172.320256,1213.136896,0.0,807.40352,735.775744,s,11,9.432381652832031,0.8574892411665483,0.002954941198204269,0.858698486328125,0.8597750854492188,0.8612584533691406,0.8624451477050782,"[0.8552522583007812, 0.8554146118164062, 0.8523182983398437, 0.8535504150390625, 0.8597698974609375, 0.858861083984375, 0.8591412353515625, 0.8597750854492188, 0.8627418212890625, 0.858698486328125, 0.8568584594726563]",tokens/s,73.47030956830821,kWh,2.4743069268035864e-05,2.7288963556497834e-06,1.1863748807231034e-05,3.933571443091668e-05,tokens/kWh,1601597.9603127257,,s,693,9.426552733421326,0.01360252919685617,0.00023421402319323897,0.013552384376525879,0.013768626976013184,0.013935711860656739,0.014409163513183595,"[0.013556032180786133, 0.01360867214202881, 0.013643168449401855, 0.013607808113098144, 0.013897727966308594, 0.013780896186828612, 0.013569439888000488, 0.013677248001098633, 0.013581727981567383, 0.013580767631530762, 0.013491552352905273, 0.013515104293823242, 0.01357209587097168, 0.013429183959960938, 0.013487199783325195, 0.013463616371154785, 0.013419424057006836, 0.013576128005981446, 0.013453696250915527, 0.013432095527648926, 0.013537631988525391, 0.013701120376586913, 0.013501536369323731, 0.013560447692871094, 0.013496543884277344, 0.01355577564239502, 0.013422592163085938, 0.013420736312866211, 0.013576000213623048, 0.013495519638061523, 0.013654111862182617, 0.013584575653076171, 0.01352467155456543, 0.014236703872680664, 0.013633312225341797, 0.013686783790588379, 0.013621248245239258, 0.01368892765045166, 0.013557184219360351, 0.013543904304504395, 0.013649920463562011, 0.013537280082702637, 0.013489407539367676, 0.013580927848815918, 0.013553631782531739, 0.01371894359588623, 0.013517151832580567, 0.013500415802001953, 0.01354793643951416, 0.013451264381408692, 0.013613056182861329, 0.013500415802001953, 0.013490367889404296, 0.01363657569885254, 0.013552127838134765, 0.013656415939331056, 0.013567999839782715, 0.013522368431091308, 0.013539775848388672, 0.013377951622009278, 0.013418208122253418, 0.013526304244995117, 0.013456128120422363, 0.013297280311584473, 0.013674976348876954, 0.013577471733093262, 0.013691295623779296, 0.013680992126464843, 0.013742143630981446, 0.01377683162689209, 0.013554783821105957, 0.013631936073303222, 0.013457887649536133, 0.01415708827972412, 0.013691743850708009, 0.01367852783203125, 0.013649855613708496, 0.013482239723205567, 0.013604607582092285, 0.013459263801574707, 0.01344438362121582, 0.013597599983215332, 0.01349238395690918, 0.01378105640411377, 0.013592351913452148, 0.013514368057250977, 0.013615488052368164, 0.01350211238861084, 0.013508992195129394, 0.013545503616333008, 0.013444895744323731, 0.013527199745178223, 0.0135697603225708, 0.013493951797485351, 0.013578847885131836, 0.013469023704528809, 0.013508416175842286, 0.01353600025177002, 0.013377056121826172, 0.013496895790100098, 0.01356326389312744, 0.013900032043457031, 0.013701248168945312, 0.013561599731445313, 0.013674976348876954, 0.01350864028930664, 0.013592864036560058, 0.013526752471923828, 0.013393183708190919, 0.013515487670898437, 0.013495967864990235, 0.013547871589660644, 0.01375382423400879, 0.013484576225280761, 0.013595840454101563, 0.013462016105651856, 0.01343507194519043, 0.013498496055603028, 0.013480159759521484, 0.013495391845703125, 0.013486751556396484, 0.01341875171661377, 0.013522047996520997, 0.013865632057189941, 0.013502559661865234, 0.01352070426940918, 0.013373215675354004, 0.013569279670715332, 0.013509599685668946, 0.01344540786743164, 0.013448384284973145, 0.01351529598236084, 0.013539104461669922, 0.01356208038330078, 0.013615103721618652, 0.013464608192443848, 0.013549856185913086, 0.013511360168457031, 0.013445119857788086, 0.013667360305786132, 0.013450207710266114, 0.013592160224914551, 0.01354793643951416, 0.013403615951538085, 0.013936767578125, 0.013773440361022949, 0.013564991950988769, 0.01358512020111084, 0.01351471996307373, 0.013674304008483887, 0.013461759567260742, 0.013497407913208009, 0.013537983894348144, 0.013449376106262206, 0.013522815704345704, 0.01353916835784912, 0.013471839904785156, 0.01360307216644287, 0.013428735733032226, 0.013398015975952148, 0.013472031593322754, 0.01336515235900879, 0.0136145601272583, 0.01343727970123291, 0.013407808303833008, 0.013479616165161133, 0.013441951751708984, 0.013457247734069824, 0.013542464256286621, 0.013415360450744629, 0.013567999839782715, 0.01381772804260254, 0.013456992149353027, 0.013455967903137207, 0.013420479774475098, 0.01352899169921875, 0.013512800216674805, 0.013367136001586915, 0.013463168144226075, 0.013469471931457519, 0.013492351531982421, 0.013480223655700684, 0.013484383583068848, 0.01375641632080078, 0.013678624153137206, 0.013485983848571777, 0.0135414400100708, 0.013495871543884277, 0.013496959686279297, 0.013339232444763184, 0.013522720336914063, 0.013553152084350586, 0.013628064155578613, 0.013455424308776855, 0.013434880256652832, 0.01348624038696289, 0.013373279571533203, 0.013475839614868163, 0.01343283176422119, 0.013404159545898438, 0.013578335762023925, 0.01344054412841797, 0.01339395236968994, 0.013486432075500489, 0.0133754243850708, 0.013420607566833496, 0.013500415802001953, 0.01343280029296875, 0.01352883243560791, 0.01345952033996582, 0.013418720245361329, 0.013619199752807617, 0.013436927795410156, 0.013423904418945313, 0.013564031600952148, 0.01359727954864502, 0.014243295669555664, 0.014405920028686523, 0.014350784301757813, 0.013569055557250976, 0.013648672103881836, 0.013567584037780762, 0.013482399940490723, 0.013524991989135742, 0.01345910358428955, 0.013573472023010253, 0.01347481632232666, 0.013389823913574218, 0.013499872207641601, 0.013460000038146973, 0.013514752388000489, 0.013450528144836426, 0.013445440292358399, 0.013495967864990235, 0.013451456069946289, 0.013426431655883789, 0.013529024124145507, 0.013380031585693359, 0.013482239723205567, 0.013893343925476075, 0.013512415885925293, 0.013607423782348632, 0.013541407585144043, 0.01356822395324707, 0.013553791999816895, 0.013488224029541016, 0.013600704193115234, 0.013578399658203125, 0.013479295730590821, 0.013603424072265625, 0.013450976371765136, 0.013508447647094727, 0.013562432289123535, 0.013839360237121581, 0.013609472274780274, 0.01367910385131836, 0.013600895881652832, 0.013494144439697266, 0.013610783576965333, 0.01345967960357666, 0.013525343894958497, 0.013534879684448243, 0.01369324779510498, 0.01360863971710205, 0.015853407859802247, 0.013883551597595216, 0.013639679908752441, 0.013628543853759766, 0.01354377555847168, 0.013548064231872558, 0.013503968238830566, 0.013558303833007813, 0.013504544258117675, 0.013549375534057616, 0.013512864112854003, 0.013500415802001953, 0.013611007690429687, 0.013562208175659179, 0.013480799674987793, 0.013568832397460937, 0.013500415802001953, 0.01345315170288086, 0.013508768081665038, 0.01347379207611084, 0.014075648307800294, 0.013514880180358887, 0.013555839538574218, 0.013854207992553711, 0.013648384094238282, 0.01358614444732666, 0.013479583740234375, 0.013598752021789551, 0.013549280166625977, 0.013503264427185059, 0.013543328285217286, 0.013467840194702149, 0.013452768325805664, 0.013504032135009766, 0.013437952041625977, 0.013496319770812988, 0.013426688194274903, 0.013477343559265137, 0.013676799774169923, 0.013648159980773926, 0.013596672058105469, 0.013549568176269532, 0.01362339210510254, 0.013535200119018554, 0.01354304027557373, 0.013863583564758301, 0.01356275177001953, 0.013450016021728515, 0.013529439926147462, 0.014190239906311035, 0.01471014404296875, 0.013615103721618652, 0.014029984474182128, 0.01356272029876709, 0.013633536338806153, 0.013504159927368165, 0.013507136344909668, 0.013647647857666015, 0.013728032112121582, 0.013745823860168456, 0.01355065631866455, 0.013442048072814941, 0.013684479713439942, 0.013565471649169922, 0.013595487594604492, 0.013486144065856933, 0.013535039901733398, 0.013630559921264648, 0.013678591728210449, 0.013591456413269042, 0.013512895584106446, 0.013418304443359374, 0.013604928016662597, 0.01346275234222412, 0.013662976264953613, 0.013527327537536622, 0.013479616165161133, 0.014118304252624512, 0.013556320190429688, 0.013535231590270995, 0.013625344276428223, 0.013730079650878906, 0.01365782356262207, 0.013461759567260742, 0.013782303810119629, 0.013610655784606934, 0.01350489616394043, 0.013564191818237304, 0.013535391807556153, 0.01377280044555664, 0.013622976303100586, 0.013629759788513184, 0.013801471710205078, 0.013550623893737793, 0.01363475227355957, 0.013446399688720703, 0.013556511878967285, 0.013501440048217773, 0.013552384376525879, 0.013626879692077636, 0.013484800338745117, 0.013538784027099609, 0.013709152221679687, 0.013726143836975098, 0.013758336067199707, 0.013740480422973633, 0.013620927810668945, 0.013527039527893067, 0.01345638370513916, 0.013455904006958008, 0.013492704391479492, 0.013489727973937988, 0.013608927726745606, 0.014916288375854491, 0.013785152435302735, 0.013815584182739258, 0.013614303588867187, 0.013490559577941895, 0.013904735565185547, 0.015267552375793457, 0.014446463584899902, 0.01364185619354248, 0.01358233642578125, 0.013496319770812988, 0.013557760238647461, 0.013705216407775878, 0.013434623718261719, 0.013664064407348632, 0.013443679809570312, 0.01350380802154541, 0.013660832405090332, 0.013413344383239746, 0.013466879844665528, 0.013475040435791015, 0.013380031585693359, 0.013505760192871094, 0.013535584449768066, 0.01348038387298584, 0.013501536369323731, 0.013593152046203613, 0.013469504356384278, 0.01359280014038086, 0.013457728385925292, 0.013668383598327637, 0.013778911590576173, 0.013677887916564942, 0.013496447563171386, 0.01351689624786377, 0.01353286361694336, 0.013462431907653808, 0.013551487922668456, 0.01433568000793457, 0.013592608451843262, 0.013508864402770996, 0.0134202241897583, 0.013438336372375488, 0.013437376022338868, 0.013462047576904296, 0.014327391624450684, 0.013492639541625977, 0.013769951820373535, 0.01373468780517578, 0.013450559616088867, 0.013687616348266601, 0.01347980785369873, 0.013639967918395996, 0.013571935653686524, 0.013657983779907226, 0.0135098876953125, 0.013538047790527344, 0.013492223739624023, 0.01347379207611084, 0.013479935646057128, 0.013737792015075684, 0.013559743881225586, 0.013537535667419434, 0.01372163200378418, 0.01360636806488037, 0.014370783805847168, 0.013732416152954102, 0.01363702392578125, 0.013486080169677735, 0.013498016357421875, 0.013437888145446778, 0.01345644760131836, 0.013657024383544922, 0.013501472473144531, 0.013460288047790528, 0.014229663848876953, 0.013506112098693847, 0.013533503532409667, 0.013484160423278808, 0.013602463722229004, 0.013711711883544923, 0.01356390380859375, 0.013704992294311524, 0.013576288223266602, 0.013789312362670898, 0.013578240394592284, 0.013844703674316406, 0.013860639572143554, 0.013588831901550294, 0.01363321590423584, 0.01358784008026123, 0.013615167617797852, 0.013594240188598632, 0.01368899154663086, 0.013669119834899903, 0.013576512336730956, 0.013610560417175293, 0.013611136436462402, 0.013538944244384765, 0.013836095809936524, 0.014023232460021972, 0.014206496238708496, 0.013935008049011231, 0.013930303573608398, 0.013758496284484863, 0.013613280296325683, 0.013380800247192383, 0.013679712295532227, 0.013554464340209961, 0.01334992027282715, 0.013492128372192384, 0.013506943702697755, 0.013421695709228515, 0.013550080299377442, 0.013433888435363769, 0.01356278419494629, 0.013715519905090331, 0.013567999839782715, 0.013665375709533691, 0.013612256050109864, 0.01364345645904541, 0.013694560050964356, 0.01349891185760498, 0.01357376003265381, 0.013457216262817383, 0.01339027214050293, 0.013645824432373046, 0.013316672325134277, 0.013495424270629882, 0.013538175582885743, 0.013456543922424317, 0.013488991737365723, 0.01349836826324463, 0.014232704162597656, 0.013593855857849122, 0.013465279579162598, 0.015788064002990723, 0.015582592010498046, 0.01374028778076172, 0.013781087875366211, 0.013642208099365234, 0.013458463668823242, 0.013510944366455079, 0.013443584442138673, 0.013588383674621582, 0.013643199920654298, 0.013728320121765137, 0.013558879852294922, 0.013515680313110352, 0.013538911819458007, 0.013589216232299804, 0.013446847915649414, 0.013645600318908692, 0.01360256004333496, 0.013805312156677246, 0.013556127548217773, 0.013539392471313477, 0.013538847923278809, 0.013598912239074707, 0.013754912376403809, 0.013739711761474609, 0.013825823783874511, 0.014125599861145019, 0.013623295783996582, 0.01356118392944336, 0.01405404758453369, 0.01374944019317627, 0.013544256210327149, 0.013685888290405273, 0.01356214427947998, 0.013407135963439941, 0.013455231666564942, 0.013483039855957031, 0.013749024391174317, 0.013723775863647461, 0.01359654426574707, 0.013676544189453126, 0.013571328163146972, 0.013763327598571778, 0.013523039817810058, 0.013457088470458985, 0.013598496437072755, 0.013422880172729492, 0.01358358383178711, 0.013542143821716309, 0.014073472023010253, 0.013711872100830079, 0.013549599647521973, 0.013632543563842773, 0.013540032386779786, 0.013492351531982421, 0.013695648193359375, 0.01356227207183838, 0.0137576322555542, 0.013758591651916504, 0.01353983974456787, 0.01366972827911377, 0.013532959938049316, 0.013599360466003418, 0.013473952293395996, 0.013462719917297363, 0.013650848388671874, 0.014149632453918457, 0.013674495697021484, 0.013740032196044923, 0.013854240417480469, 0.013668064117431641, 0.013615776062011718, 0.013684127807617187, 0.01358028793334961, 0.013591232299804688, 0.013491904258728028, 0.013503040313720703, 0.01363327980041504, 0.013512703895568847, 0.013575327873229981, 0.013586784362792969, 0.013636416435241699, 0.013653696060180664, 0.013450688362121582, 0.013603391647338868, 0.013813535690307617, 0.014044768333435058, 0.013981568336486816, 0.013904640197753906, 0.013791359901428223, 0.013729887962341309, 0.013659232139587403, 0.013565952301025391, 0.013736543655395507, 0.013584159851074218, 0.013622591972351075, 0.013671520233154297, 0.013536383628845215, 0.013671199798583984, 0.013549056053161621, 0.013607423782348632, 0.013701120376586913, 0.013537088394165038, 0.013578335762023925, 0.01366256046295166, 0.013428031921386719, 0.013576640129089355, 0.013367391586303711, 0.013402015686035156, 0.01351193618774414, 0.013484864234924317, 0.013475711822509766, 0.013438816070556641, 0.01348761558532715, 0.013702048301696777, 0.013442015647888184, 0.013484895706176758, 0.013399200439453125, 0.013638527870178222, 0.013588095664978027, 0.013707615852355958, 0.013645759582519531, 0.013723775863647461, 0.013538463592529297, 0.013648799896240234, 0.013532608032226563, 0.013521023750305176, 0.013729280471801757, 0.01359177589416504, 0.013604063987731934, 0.013546272277832032, 0.01345529556274414, 0.013507648468017578, 0.013445728302001953, 0.013684864044189454, 0.013615167617797852, 0.013416031837463378, 0.013664735794067382, 0.013746399879455567, 0.013702879905700684, 0.013813695907592773, 0.013629055976867676, 0.013563712120056153, 0.013429311752319336, 0.013528127670288086, 0.013661375999450684, 0.01360051155090332, 0.013623295783996582, 0.013541376113891602, 0.013626655578613282, 0.013630175590515136, 0.013462944030761719, 0.013544032096862793, 0.013438976287841797, 0.013447296142578126, 0.013491104125976563, 0.013450400352478027, 0.013465503692626953, 0.013468671798706054, 0.013419424057006836, 0.013379584312438965, 0.013355008125305176, 0.013522239685058593, 0.013638208389282227, 0.01351471996307373, 0.014046624183654785, 0.01411689567565918, 0.014015199661254883, 0.01394611167907715, 0.014023712158203125, 0.013598431587219238, 0.013592255592346192, 0.013680480003356933, 0.0135316162109375, 0.013569952011108399, 0.013488256454467773, 0.013467616081237792, 0.013527199745178223, 0.013412192344665527, 0.01340556812286377]",tokens/s,73.51574001628468,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1133, in __init__ self.model = StableLmModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 914, in __init__ [StableLmDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 914, in [StableLmDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 688, in __init__ self.self_attn = ATTENTION_CLASSES[config._attn_implementation](config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 339, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.use_qkv_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 14.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 189234 has 14.73 GiB memory in use. Of the allocated memory 14.54 GiB is allocated by PyTorch, and 78.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 743, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 175, in __init__ self.dense = nn.Linear(config.hidden_size, config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 40714 has 14.73 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 20.86 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,788.422656,763.232256,0.0,360.710144,345.493504,s,1,7.455087890625,7.455087890625,0.0,7.455087890625,7.455087890625,7.455087890625,7.455087890625,[7.455087890625],,kWh,2.675399883317671e-06,2.8781907854014776e-07,9.583341000068968e-07,3.921553061864716e-06,,MB,1143.631872,775.815168,0.0,362.807296,319.011328,s,17,0.39619734382629385,0.023305726107429058,0.0002778715206000846,0.023273664474487303,0.023351884841918944,0.023560076904296873,0.024196738281249998,"[0.02435590362548828, 0.02326211166381836, 0.023286912918090822, 0.023345727920532227, 0.023250080108642577, 0.023263679504394532, 0.02328281593322754, 0.023273664474487303, 0.02332044792175293, 0.02331270408630371, 0.023361120223999023, 0.023291999816894532, 0.02313209533691406, 0.023109439849853516, 0.023046112060546874, 0.023073312759399413, 0.023229215621948244]",tokens/s,10984.424978649178,kWh,7.947728495697403e-07,8.764900303181575e-08,5.257899737717208e-07,1.4082118263732767e-06,tokens/kWh,181790832.32052174,MB,1182.437376,798.88384,0.0,385.875968,319.013888,s,17,9.946066406249999,0.5850627297794118,0.013180063758045698,0.5898274536132813,0.5983458129882813,0.6008399291992188,0.6011444995117188,"[0.5904949951171875, 0.5925106811523437, 0.6007447509765625, 0.5875509033203125, 0.5947955932617187, 0.5966371459960937, 0.5946541137695313, 0.5967465209960937, 0.6012206420898437, 0.5898274536132813, 0.587629150390625, 0.5828248901367188, 0.5670865478515625, 0.5648525390625, 0.5611036987304687, 0.5653825073242188, 0.5720042724609375]",tokens/s,107.6807610420734,kWh,1.6582429173467125e-05,1.8287589048676044e-06,6.637467390816643e-06,2.504865546915138e-05,tokens/kWh,2515105.055342692,,s,1071,9.935685695648191,0.00927701745625415,0.0003153263789410202,0.009294912338256836,0.009535776138305665,0.009632480144500733,0.010184636592864987,"[0.009412128448486328, 0.009339360237121582, 0.009355615615844727, 0.009266624450683594, 0.009380224227905274, 0.009354911804199218, 0.00928547191619873, 0.009658720016479492, 0.009334783554077148, 0.009399968147277832, 0.009398624420166016, 0.009238688468933106, 0.009231583595275879, 0.009239520072937012, 0.009336447715759278, 0.009289759635925293, 0.00934502410888672, 0.009236479759216308, 0.009400320053100587, 0.009411711692810059, 0.009323552131652832, 0.009283391952514649, 0.009606816291809083, 0.00947424030303955, 0.009439423561096191, 0.009483967781066895, 0.009357952117919921, 0.009485440254211426, 0.009525823593139648, 0.009442399978637696, 0.009308992385864258, 0.009285728454589843, 0.009367615699768066, 0.009258943557739258, 0.009333919525146485, 0.009352128028869628, 0.00994099235534668, 0.009289631843566895, 0.009242624282836913, 0.009407967567443847, 0.00926159954071045, 0.00924073600769043, 0.009938783645629883, 0.009256671905517579, 0.009290016174316407, 0.009352800369262695, 0.009224608421325683, 0.009217791557312012, 0.009247008323669434, 0.009183199882507324, 0.009428735733032226, 0.009230208396911622, 0.009316736221313477, 0.009303327560424804, 0.009203904151916504, 0.009314847946166991, 0.009368960380554198, 0.009396320343017578, 0.00926371192932129, 0.009308320045471191, 0.009397088050842285, 0.009604000091552734, 0.009365535736083985, 0.009393664360046386, 0.009548992156982421, 0.00939964771270752, 0.009400351524353028, 0.009330623626708985, 0.009285823822021484, 0.009469599723815917, 0.009263648033142089, 0.009261119842529296, 0.009381888389587402, 0.009230143547058105, 0.010677599906921386, 0.00918825626373291, 0.009031807899475098, 0.009106592178344726, 0.009114303588867188, 0.009162495613098145, 0.009363807678222657, 0.009297792434692382, 0.009361120223999023, 0.009535776138305665, 0.009271295547485351, 0.009357024192810058, 0.00930399990081787, 0.00918934440612793, 0.009314047813415527, 0.009325119972229004, 0.00943712043762207, 0.009357439994812011, 0.009385984420776367, 0.009303839683532714, 0.00942307186126709, 0.009460063934326171, 0.009344672203063965, 0.00954195213317871, 0.009426624298095702, 0.009392255783081055, 0.009447487831115722, 0.009617119789123536, 0.009408608436584472, 0.009463808059692384, 0.009592831611633301, 0.0092805757522583, 0.009260095596313477, 0.00914739227294922, 0.009344896316528321, 0.009316672325134277, 0.009333632469177245, 0.009238335609436035, 0.009642239570617675, 0.009428223609924317, 0.009396063804626464, 0.009396608352661134, 0.009349408149719239, 0.009461759567260742, 0.0094169282913208, 0.009375519752502441, 0.009465120315551757, 0.009429984092712403, 0.009557727813720704, 0.009715583801269531, 0.009500191688537598, 0.009595135688781738, 0.009504768371582031, 0.009547776222229003, 0.009553600311279296, 0.00969372844696045, 0.009950943946838379, 0.009551936149597168, 0.0096112642288208, 0.00981606388092041, 0.009571743965148925, 0.009708127975463866, 0.009586688041687011, 0.00951411247253418, 0.009573216438293458, 0.009649279594421387, 0.01007040023803711, 0.009615967750549317, 0.009523136138916016, 0.009580224037170411, 0.00989014434814453, 0.00953164768218994, 0.0094236478805542, 0.009521183967590332, 0.009526176452636719, 0.009414560317993164, 0.009504863739013672, 0.010132543563842774, 0.00951308822631836, 0.009536319732666016, 0.009431039810180664, 0.009509951591491698, 0.009523232460021973, 0.009503840446472168, 0.009596416473388672, 0.009541376113891602, 0.009445183753967286, 0.009537856101989746, 0.009701600074768066, 0.009480416297912598, 0.009408479690551757, 0.009526592254638672, 0.009355520248413086, 0.009392704010009766, 0.009365407943725586, 0.009287712097167968, 0.009305567741394042, 0.009209759712219238, 0.009478752136230468, 0.00929587173461914, 0.009406463623046875, 0.009349120140075684, 0.009291808128356933, 0.009367520332336426, 0.009407839775085448, 0.00940662384033203, 0.009375231742858887, 0.009394911766052247, 0.009433376312255859, 0.00953286361694336, 0.009451168060302734, 0.0094967041015625, 0.009589247703552246, 0.009801535606384278, 0.00928816032409668, 0.009277312278747559, 0.009115967750549316, 0.009054752349853515, 0.008982303619384766, 0.009023296356201171, 0.009166720390319825, 0.009480863571166993, 0.00936844825744629, 0.010021663665771485, 0.0100414400100708, 0.00932367992401123, 0.009340928077697755, 0.009216608047485352, 0.009258560180664063, 0.009155167579650878, 0.00919331169128418, 0.00944758415222168, 0.009318400382995605, 0.009330944061279297, 0.00939583969116211, 0.009461888313293457, 0.009449472427368164, 0.00935321617126465, 0.009533408164978028, 0.009457695960998535, 0.00929792022705078, 0.009379776000976562, 0.009512800216674805, 0.009355680465698242, 0.009434111595153808, 0.009315135955810547, 0.009287327766418456, 0.009363807678222657, 0.009287775993347168, 0.009277215957641602, 0.009342687606811523, 0.00923196792602539, 0.009192255973815918, 0.009283583641052246, 0.00930406379699707, 0.009256959915161133, 0.00931430435180664, 0.009275391578674316, 0.009326016426086426, 0.009345600128173828, 0.009430303573608398, 0.009257696151733398, 0.009338879585266113, 0.009367551803588867, 0.00923852825164795, 0.009226240158081055, 0.009207039833068848, 0.009116000175476074, 0.00909558391571045, 0.009049632072448731, 0.009150943756103515, 0.009279295921325684, 0.009126272201538086, 0.009717439651489258, 0.009441408157348632, 0.009279583930969238, 0.009189215660095215, 0.009273728370666505, 0.009735296249389649, 0.009567104339599609, 0.00922214412689209, 0.009287039756774902, 0.00939628791809082, 0.009494303703308105, 0.009349375724792481, 0.009437215805053712, 0.00934166431427002, 0.009244480133056641, 0.009249792098999024, 0.00931065559387207, 0.009433759689331054, 0.00922111988067627, 0.009270144462585448, 0.009417920112609864, 0.00930611228942871, 0.009226400375366211, 0.009444000244140625, 0.009297183990478516, 0.00926358413696289, 0.009434783935546875, 0.009283519744873047, 0.009123711585998536, 0.009128416061401367, 0.009226752281188964, 0.009267135620117188, 0.009196864128112794, 0.009360128402709961, 0.009219903945922852, 0.009193471908569336, 0.009195072174072266, 0.009283647537231446, 0.009300352096557617, 0.00932044792175293, 0.009340448379516602, 0.010033599853515625, 0.00977513599395752, 0.009926655769348144, 0.010252287864685058, 0.009492735862731933, 0.009824000358581542, 0.009489888191223145, 0.0094235200881958, 0.00950432014465332, 0.00946003246307373, 0.009451775550842286, 0.009660160064697266, 0.00944761562347412, 0.009482048034667968, 0.009460895538330079, 0.009703871726989746, 0.009619872093200683, 0.009508671760559083, 0.009585184097290039, 0.009481023788452149, 0.009481056213378906, 0.009508768081665038, 0.009557472229003907, 0.009603487968444823, 0.00937171173095703, 0.00935324764251709, 0.009345151901245118, 0.009295007705688477, 0.009362367630004883, 0.00936464023590088, 0.009422944068908692, 0.00950761604309082, 0.0093306884765625, 0.00940550422668457, 0.009524160385131836, 0.009469471931457519, 0.009570240020751953, 0.009619999885559082, 0.009494527816772461, 0.009457663536071777, 0.009594559669494629, 0.009500543594360351, 0.00953990364074707, 0.009588864326477051, 0.00943513584136963, 0.009439231872558594, 0.009543647766113281, 0.009395456314086914, 0.009681695938110352, 0.00973209571838379, 0.00963587188720703, 0.009510784149169922, 0.009415040016174317, 0.009594304084777832, 0.00942518424987793, 0.010057727813720703, 0.009469951629638672, 0.009465855598449707, 0.00951699161529541, 0.009325920104980468, 0.009408351898193359, 0.009489248275756836, 0.009492511749267579, 0.009459615707397461, 0.009533535957336426, 0.00961292839050293, 0.009578720092773437, 0.009461695671081544, 0.009775296211242675, 0.009430975914001464, 0.00935536003112793, 0.009280768394470215, 0.009293984413146973, 0.009394495964050294, 0.009428895950317383, 0.009392512321472169, 0.00954918384552002, 0.00942137622833252, 0.009381983757019043, 0.009312064170837402, 0.00932470417022705, 0.009307423591613769, 0.009240927696228028, 0.009169280052185058, 0.009235615730285645, 0.00923526382446289, 0.009545760154724121, 0.009342432022094727, 0.009408960342407227, 0.00944057559967041, 0.009465536117553712, 0.009539487838745118, 0.00956492805480957, 0.009527456283569337, 0.009481504440307617, 0.009508928298950196, 0.009365535736083985, 0.009412768363952637, 0.009408960342407227, 0.009378687858581543, 0.009376064300537109, 0.009331616401672363, 0.009323391914367676, 0.009312255859375, 0.009449760437011718, 0.009409152030944824, 0.009367136001586914, 0.009282048225402833, 0.009223615646362305, 0.009165375709533692, 0.009190591812133789, 0.009284319877624512, 0.009250240325927734, 0.009529536247253417, 0.009421183586120605, 0.00933683204650879, 0.009314111709594727, 0.009332927703857422, 0.009348192214965821, 0.00952790355682373, 0.00953987216949463, 0.009392288208007813, 0.009273216247558594, 0.009240575790405273, 0.009250816345214843, 0.009170944213867188, 0.00925267219543457, 0.009283040046691894, 0.009272031784057617, 0.009259008407592773, 0.009268223762512207, 0.00932156753540039, 0.009228384017944336, 0.009289536476135254, 0.009184255599975585, 0.009419551849365234, 0.009449600219726562, 0.009457056045532226, 0.009343680381774902, 0.00937564754486084, 0.00925875186920166, 0.009228768348693848, 0.009309760093688965, 0.009343104362487793, 0.009372032165527344, 0.009237343788146973, 0.00985974407196045, 0.00936569595336914, 0.013103232383728028, 0.009487936019897461, 0.010156479835510254, 0.009287903785705567, 0.009323967933654784, 0.009181183815002441, 0.00927948760986328, 0.00924828815460205, 0.009363936424255372, 0.00928767967224121, 0.009169024467468262, 0.009200639724731445, 0.00920400047302246, 0.00952768039703369, 0.009286879539489746, 0.009495360374450684, 0.009349311828613281, 0.009506815910339356, 0.009510623931884765, 0.00945577621459961, 0.009429120063781738, 0.009676799774169922, 0.009364543914794922, 0.009441791534423828, 0.009531935691833497, 0.009461503982543945, 0.009335040092468262, 0.009346976280212402, 0.00937980842590332, 0.009398303985595702, 0.009347071647644043, 0.009364800453186035, 0.009415360450744628, 0.009435168266296387, 0.009421024322509765, 0.009399616241455078, 0.00996406364440918, 0.010440640449523926, 0.011192288398742676, 0.009629119873046875, 0.00957094383239746, 0.009395615577697754, 0.009641599655151368, 0.009546943664550781, 0.009469087600708009, 0.009531968116760254, 0.009422847747802734, 0.009284735679626465, 0.009278335571289063, 0.009412416458129883, 0.009338335990905762, 0.009415391921997071, 0.00940214443206787, 0.009336447715759278, 0.009392736434936523, 0.0093920316696167, 0.0092609281539917, 0.009381376266479492, 0.009652192115783692, 0.009599455833435059, 0.009496864318847656, 0.009387840270996093, 0.009439423561096191, 0.009467904090881347, 0.009359359741210938, 0.009412575721740722, 0.009360896110534669, 0.009381695747375489, 0.009391327857971192, 0.009529151916503907, 0.009401503562927247, 0.009500479698181153, 0.009388192176818848, 0.009385824203491211, 0.010250335693359374, 0.00955395221710205, 0.009463680267333985, 0.00940675163269043, 0.009479904174804687, 0.009462112426757813, 0.009432736396789551, 0.009477824211120605, 0.009658687591552735, 0.00963584041595459, 0.009645919799804687, 0.00947424030303955, 0.009495807647705077, 0.009480575561523437, 0.009449824333190918, 0.009490431785583496, 0.009488384246826171, 0.009430368423461915, 0.009481951713562012, 0.00946886444091797, 0.009511008262634277, 0.00943513584136963, 0.009527199745178223, 0.009446656227111817, 0.009300415992736816, 0.00926518440246582, 0.009371935844421387, 0.00939417552947998, 0.009586591720581055, 0.009418848037719727, 0.009349120140075684, 0.009422080039978027, 0.009192192077636718, 0.00922214412689209, 0.009432064056396485, 0.009466815948486328, 0.009353280067443848, 0.009365504264831542, 0.009457695960998535, 0.009428192138671875, 0.009430047988891601, 0.00947548770904541, 0.009455936431884766, 0.009367679595947266, 0.009340000152587891, 0.009335871696472167, 0.009469663619995117, 0.009361632347106933, 0.009346847534179687, 0.009389408111572265, 0.009357407569885253, 0.00958521556854248, 0.011084959983825684, 0.011393183708190919, 0.01111302375793457, 0.009417951583862305, 0.009393343925476074, 0.009492159843444824, 0.009312576293945312, 0.009338272094726563, 0.009409184455871583, 0.009350175857543946, 0.009299072265625, 0.009386848449707032, 0.009312288284301758, 0.009296799659729004, 0.009334815979003906, 0.00927945613861084, 0.009312255859375, 0.00934870433807373, 0.009382304191589355, 0.009554047584533692, 0.00962342357635498, 0.009439231872558594, 0.00931987190246582, 0.00933516788482666, 0.009339072227478028, 0.009348608016967774, 0.009351679801940918, 0.009213279724121094, 0.009108415603637695, 0.009371359825134278, 0.009379712104797364, 0.009275520324707031, 0.00923852825164795, 0.009164799690246582, 0.009242591857910156, 0.009272992134094238, 0.009255295753479003, 0.00926915168762207, 0.009293888092041016, 0.00924675178527832, 0.009223551750183105, 0.00928831958770752, 0.009388031959533692, 0.009352352142333984, 0.00928444766998291, 0.009363007545471192, 0.00926255989074707, 0.009317343711853027, 0.00935321617126465, 0.009295647621154784, 0.009327072143554688, 0.009471136093139649, 0.00974028778076172, 0.009407072067260743, 0.009312543869018555, 0.009359071731567383, 0.009423968315124511, 0.009446240425109864, 0.00940447998046875, 0.009355263710021973, 0.009362943649291992, 0.009431551933288575, 0.009409536361694336, 0.009350048065185548, 0.009377375602722168, 0.009418944358825683, 0.009321887969970704, 0.009642656326293946, 0.009390591621398926, 0.009492480278015136, 0.009515135765075684, 0.009537407875061035, 0.00944547176361084, 0.00947804832458496, 0.009430111885070801, 0.009389984130859374, 0.009570752143859863, 0.009254528045654296, 0.009362367630004883, 0.009312543869018555, 0.009369536399841309, 0.009353119850158692, 0.009821279525756836, 0.010741696357727051, 0.00940278434753418, 0.009384544372558593, 0.009291616439819336, 0.009274399757385254, 0.00947913646697998, 0.009377663612365722, 0.009304191589355469, 0.009881600379943848, 0.009378016471862793, 0.009328415870666504, 0.009347295761108399, 0.009209888458251953, 0.009271039962768554, 0.009184639930725097, 0.009210495948791505, 0.009279647827148437, 0.009557855606079102, 0.009188672065734864, 0.009179743766784668, 0.009070976257324219, 0.0090928316116333, 0.009114720344543458, 0.009045984268188477, 0.009290687561035156, 0.009098943710327148, 0.00902560043334961, 0.009006624221801757, 0.009173727989196778, 0.009218048095703125, 0.009181023597717285, 0.00908073616027832, 0.00921129608154297, 0.009210720062255859, 0.009129568099975586, 0.009140671730041504, 0.009139295578002929, 0.009226143836975098, 0.00921455955505371, 0.009183615684509277, 0.009223775863647461, 0.009466272354125976, 0.009283583641052246, 0.009203712463378906, 0.009502495765686035, 0.009128191947937011, 0.009153887748718262, 0.009228575706481933, 0.009164192199707032, 0.00916374397277832, 0.009193375587463378, 0.009335712432861328, 0.009200575828552247, 0.00900825595855713, 0.0090632963180542, 0.00903987216949463, 0.009030719757080079, 0.009099807739257813, 0.009150560379028321, 0.009278847694396972, 0.00925603199005127, 0.009250783920288086, 0.01138707160949707, 0.009421695709228515, 0.00930515193939209, 0.009178879737854004, 0.009213600158691406, 0.00925487995147705, 0.009142656326293946, 0.009047776222229005, 0.009170559883117676, 0.009214624404907227, 0.00926915168762207, 0.009298111915588379, 0.00920479965209961, 0.009201663970947266, 0.009188096046447753, 0.009336544036865234, 0.009453120231628418, 0.009294912338256836, 0.009311936378479003, 0.009295583724975586, 0.009288288116455079, 0.00937052822113037, 0.00938486385345459, 0.00935110378265381, 0.009387552261352539, 0.009293631553649902, 0.009292448043823242, 0.009271391868591309, 0.009312159538269043, 0.009357248306274414, 0.009387104034423829, 0.009200608253479004, 0.00926035213470459, 0.009138879776000976, 0.00903987216949463, 0.00912112045288086, 0.009101632118225098, 0.009168895721435547, 0.00901974391937256, 0.009127872467041016, 0.009163007736206055, 0.009114784240722656, 0.009273247718811035, 0.00928825569152832, 0.00911788845062256, 0.009107680320739745, 0.009060192108154296, 0.008998847961425782, 0.008884063720703125, 0.009123040199279784, 0.0093089599609375, 0.009086976051330567, 0.008970239639282226, 0.008888256072998047, 0.008980511665344238, 0.0090665283203125, 0.009003007888793945, 0.008937472343444825, 0.009385024070739746, 0.009093279838562012, 0.009075488090515137, 0.009193471908569336, 0.009154335975646972, 0.009238752365112304, 0.009191712379455567, 0.009125663757324218, 0.009068479537963867, 0.008947711944580078, 0.00889475154876709, 0.008895711898803711, 0.008858112335205079, 0.008812800407409668, 0.008806143760681152, 0.008839136123657226, 0.009093376159667968, 0.008885536193847656, 0.008815072059631347, 0.00882915210723877, 0.008819519996643066, 0.008729536056518555, 0.008867903709411621, 0.008843296051025391, 0.008772640228271485, 0.008791328430175781, 0.00885251235961914, 0.008798687934875489, 0.008831104278564454, 0.008990880012512206, 0.009074560165405273, 0.009021408081054687, 0.008929311752319336, 0.00884124755859375, 0.009081952095031738, 0.009044416427612305, 0.008894271850585937, 0.0090251522064209, 0.00903270435333252, 0.009027839660644531, 0.009288928031921388, 0.009124383926391602, 0.009029631614685058, 0.008959263801574706, 0.008927136421203614, 0.00904918384552002, 0.009118656158447265, 0.009030624389648438, 0.008994624137878418, 0.009002016067504883, 0.009062591552734376, 0.008901568412780761, 0.009061311721801758, 0.009108384132385254, 0.008717599868774415, 0.008865983963012695, 0.008813311576843261, 0.008803903579711914, 0.008759488105773925, 0.008917023658752441, 0.008835071563720704, 0.008821791648864746, 0.008821663856506347, 0.008845376014709474, 0.008853504180908203, 0.008771583557128907, 0.008749055862426757, 0.008738816261291504, 0.008771136283874511, 0.008821375846862793, 0.008752320289611816, 0.008718208312988281, 0.008802751541137695, 0.008810784339904785, 0.008859744071960449, 0.009115584373474121, 0.008976384162902832, 0.008980480194091797, 0.008935423851013183, 0.008923359870910644, 0.008881600379943847, 0.008953599929809571, 0.008915552139282227, 0.008945664405822755, 0.008879167556762695, 0.008858559608459472, 0.00882688045501709, 0.008811967849731445, 0.008776608467102051, 0.008746368408203125, 0.008769503593444824, 0.008833503723144532, 0.008826848030090331, 0.008800127983093261, 0.008787584304809571, 0.008992511749267578, 0.009349056243896484, 0.00923094367980957, 0.009519200325012207, 0.009371839523315429, 0.009295712471008301, 0.00902284812927246, 0.009146656036376953, 0.009164192199707032, 0.00900937557220459, 0.009069248199462891, 0.009052160263061524, 0.009123007774353027, 0.009073472023010254, 0.009484288215637206, 0.009084927558898925, 0.009078816413879394, 0.009145888328552246, 0.009466272354125976, 0.009184288024902345, 0.009040896415710448, 0.00898044776916504, 0.009182080268859864, 0.009207743644714356, 0.009308159828186035, 0.00933683204650879, 0.009211872100830078, 0.009080032348632812, 0.008887104034423828, 0.008825152397155762, 0.008736448287963867, 0.008833024024963379, 0.008816639900207519, 0.00874723243713379, 0.008826047897338866, 0.008716896057128906, 0.008794112205505371, 0.00889840030670166, 0.009113375663757323, 0.008975808143615722, 0.008784288406372071, 0.008806976318359375, 0.00897225570678711, 0.00902348804473877, 0.00908902359008789, 0.00906867218017578, 0.009078656196594238, 0.009048064231872559, 0.009084927558898925, 0.00911366367340088, 0.008992704391479492, 0.008899744033813477, 0.008825695991516114, 0.00890675163269043, 0.00880246353149414, 0.00870793628692627, 0.008765439987182617, 0.008740863800048827, 0.008842816352844238, 0.008848896026611328, 0.008833184242248535, 0.00886473560333252, 0.00883078384399414, 0.008788000106811524, 0.008839136123657226, 0.008824831962585449, 0.008797696113586426, 0.008882687568664551, 0.008823967933654786, 0.008833663940429688, 0.008851872444152833, 0.008816448211669922, 0.008767487525939942, 0.008826111793518067, 0.008794879913330078, 0.008761343955993652, 0.008760736465454102, 0.008766048431396485, 0.008767487525939942, 0.008822784423828126, 0.009024543762207031, 0.009009440422058106, 0.008768192291259766, 0.008744959831237792, 0.00886128044128418, 0.009186623573303222, 0.009056639671325684, 0.009007424354553222, 0.009252863883972168, 0.009016832351684571, 0.008963839530944824, 0.008919487953186036, 0.008865823745727539, 0.008948063850402832, 0.009018655776977538, 0.008960512161254883, 0.00903388786315918, 0.00902143955230713, 0.008912320137023927, 0.008956543922424317, 0.008964159965515136, 0.00888742446899414, 0.00881328010559082, 0.008761247634887696, 0.008806048393249511, 0.00877184009552002, 0.008786144256591797, 0.008769536018371582, 0.008737088203430176, 0.00872118377685547, 0.008872096061706542, 0.008949888229370118, 0.008782464027404785, 0.008749055862426757, 0.008906111717224122, 0.008849056243896484, 0.008839296340942384, 0.009136992454528809, 0.009041472434997559, 0.009003456115722657, 0.008927103996276855, 0.009150591850280762, 0.00907875156402588, 0.009160736083984374, 0.009113311767578125, 0.009179360389709472, 0.009095232009887695, 0.009042207717895507, 0.009090784072875977, 0.009100576400756837, 0.009030559539794921, 0.008990495681762696, 0.008968223571777343, 0.008955904006958008, 0.009071647644042969, 0.009001791954040527, 0.009130144119262695, 0.008910847663879394, 0.008845312118530273, 0.008824831962585449, 0.00887183952331543, 0.008912991523742676, 0.009009152412414552, 0.00898252773284912, 0.00894502353668213, 0.008960543632507324, 0.009027680397033692, 0.009158656120300293, 0.008696063995361328, 0.008929023742675781, 0.00894976043701172, 0.00901039981842041, 0.008994720458984374, 0.008887328147888184, 0.008823840141296387, 0.008902496337890625, 0.008952799797058106, 0.008985759735107422, 0.008987296104431152, 0.008945664405822755, 0.009006303787231445, 0.008903648376464844, 0.008879776000976563, 0.008999296188354492, 0.009115327835083007, 0.009001248359680176, 0.00917296028137207, 0.009215456008911133, 0.00920246410369873, 0.00926470375061035, 0.009410783767700196, 0.009167936325073242, 0.009017375946044922, 0.009144831657409667, 0.00924617576599121, 0.009157376289367676, 0.009201855659484863, 0.009185343742370606, 0.00923151969909668, 0.009039839744567872, 0.009120127677917481, 0.009118047714233399, 0.009099360466003417, 0.009054304122924805, 0.008974240303039551, 0.00905561637878418, 0.009103103637695313, 0.009094143867492676, 0.009138143539428711, 0.009123744010925293, 0.009225600242614746, 0.009055904388427734, 0.008994879722595215, 0.009003168106079101, 0.008925951957702637, 0.009140352249145507, 0.00922815990447998, 0.009246720314025878, 0.009176608085632323, 0.009145952224731446, 0.009048352241516113, 0.009036640167236328, 0.009004799842834473, 0.009064448356628419, 0.009064064025878907, 0.009006784439086915, 0.009031552314758301, 0.009142623901367188, 0.0089682559967041, 0.009188799858093262, 0.009186304092407227]",tokens/s,107.79326488448557,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,837.378048,14689.435648,0.0,14294.188032,14284.158464,s,1,7.67521337890625,7.67521337890625,0.0,7.67521337890625,7.67521337890625,7.67521337890625,7.67521337890625,[7.67521337890625],,kWh,1.4913201391709664e-05,1.6374225206308957e-06,7.240561347987562e-06,2.379118526032812e-05,,MB,1108.60288,14993.522688,0.0,14587.789312,14512.892416,s,10,13.831203735351563,1.3831203735351563,0.005871257155461055,1.3818245849609374,1.391693359375,1.3923574829101562,1.3928887817382811,"[1.3721275634765624, 1.3819266357421875, 1.3801014404296874, 1.3817225341796875, 1.37926953125, 1.3808548583984375, 1.3878416748046876, 1.3827921142578126, 1.3915457763671875, 1.3930216064453125]",tokens/s,185.08873479007644,kWh,4.046693066208491e-05,4.4630393216569606e-06,2.6789104764601834e-05,7.17190747483437e-05,tokens/kWh,3569482.747766655,MB,1123.889152,15098.380288,0.0,14692.646912,14646.153216,s,10,43.86718408203125,4.386718408203125,0.0031396886923479163,4.385417236328125,4.38997939453125,4.391894970703125,4.3934274316406245,"[4.3893564453125, 4.383873046875, 4.38734375, 4.38417919921875, 4.38458154296875, 4.38401171875, 4.38422119140625, 4.3862529296875, 4.3895537109375, 4.393810546875]",tokens/s,14.36153273075166,kWh,0.000128290075229163,1.4151362859655802e-05,8.529765157139767e-05,0.00022773908966021648,tokens/kWh,276632.3519339395,,s,630,43.86357635498046,0.06962472437298486,0.0003836243782883348,0.06962299346923828,0.06999763946533202,0.0701092010498047,0.07131899414062501,"[0.07148953247070312, 0.06933856201171874, 0.06915724945068359, 0.06906285095214844, 0.06904985809326172, 0.06947686767578125, 0.06943856048583984, 0.06923971557617188, 0.06963814544677735, 0.06922444915771485, 0.06935346984863282, 0.06929730987548828, 0.06950313568115235, 0.06970639801025391, 0.06972013092041016, 0.06969136047363281, 0.06957833862304688, 0.06917491149902344, 0.069153564453125, 0.06951094055175781, 0.06960326385498047, 0.06940262603759766, 0.06939826965332031, 0.06927823638916016, 0.06958080291748046, 0.06958284759521484, 0.06937728118896484, 0.06945049285888671, 0.0696258544921875, 0.0697630386352539, 0.06975081634521485, 0.06960521697998047, 0.06968335723876953, 0.0696975326538086, 0.06953369903564453, 0.07010099029541016, 0.069984130859375, 0.06956633758544922, 0.069604736328125, 0.06987436676025391, 0.06936790466308594, 0.0693853759765625, 0.06999468994140624, 0.06988992309570312, 0.06990275573730469, 0.07011484527587891, 0.07140643310546875, 0.06964166259765625, 0.06981903839111328, 0.06985507202148437, 0.06988339233398437, 0.06975350189208984, 0.06977913665771485, 0.0696671371459961, 0.0697891845703125, 0.06981683349609374, 0.06974435424804687, 0.06986959838867188, 0.06980226898193359, 0.06995555114746094, 0.06989209747314454, 0.06975667572021485, 0.06962217712402344, 0.07135222625732422, 0.06936370849609375, 0.06899712371826172, 0.06900297546386719, 0.06910348510742187, 0.06912380981445312, 0.06887699127197265, 0.06901145935058593, 0.069146240234375, 0.06937580871582032, 0.06944620513916015, 0.06926335906982421, 0.06919071960449219, 0.06935561370849609, 0.06974345397949219, 0.06968038177490235, 0.06923545837402344, 0.06927696228027344, 0.06910435485839844, 0.06937190246582031, 0.06935247802734375, 0.06953398132324219, 0.06913504028320312, 0.06951692962646484, 0.06967539215087891, 0.06966585540771485, 0.06957766723632812, 0.06952140808105468, 0.06944153594970703, 0.06963552093505859, 0.06984665679931641, 0.06970841979980469, 0.06940499114990234, 0.06934912109375, 0.06944576263427735, 0.06943142700195312, 0.06958451080322266, 0.06940300750732421, 0.06932275390625, 0.069570556640625, 0.06950819396972656, 0.06963292694091797, 0.06971596527099609, 0.06979366302490235, 0.06987789154052734, 0.069914306640625, 0.06977158355712891, 0.06962947082519531, 0.06972259521484375, 0.06952345275878906, 0.06967295837402344, 0.06967478179931641, 0.06976051330566406, 0.06990643310546875, 0.07005027008056641, 0.06974899291992187, 0.06984422302246093, 0.07026262664794922, 0.0700560302734375, 0.07003014373779297, 0.07040204620361327, 0.06994944000244141, 0.06991667175292969, 0.07123763275146484, 0.06943475341796874, 0.06907564544677734, 0.06919366455078126, 0.06899251556396484, 0.06901401519775391, 0.0690847396850586, 0.06907129669189453, 0.06923209381103515, 0.06926595306396484, 0.06939647674560546, 0.06921398162841796, 0.06941907501220704, 0.06978166198730469, 0.07022502136230468, 0.06957965087890625, 0.06956031799316406, 0.06957875061035156, 0.0694824981689453, 0.06914662170410156, 0.06923878479003906, 0.0692462387084961, 0.06963228607177735, 0.06935750579833984, 0.07023056030273438, 0.06960739135742187, 0.06967622375488282, 0.06983763122558594, 0.06990348815917968, 0.06985552215576171, 0.0697534408569336, 0.06964019012451172, 0.06963404846191407, 0.069316162109375, 0.06940624237060547, 0.06959401702880859, 0.06942924499511718, 0.06940672302246094, 0.06938540649414063, 0.06959801483154297, 0.0694988784790039, 0.06971548461914062, 0.06986799621582031, 0.0699669418334961, 0.06987257385253906, 0.069846435546875, 0.06980172729492187, 0.06991545867919922, 0.06973849487304687, 0.0696627197265625, 0.06974771118164062, 0.0695367660522461, 0.06954179382324219, 0.0698345947265625, 0.06991487884521484, 0.0698490219116211, 0.07006623840332031, 0.0699901123046875, 0.07022783660888672, 0.06992694091796875, 0.07000511932373046, 0.06994287872314453, 0.06979145812988281, 0.07158169555664062, 0.0693616943359375, 0.06920598602294922, 0.06920396423339843, 0.06907904052734375, 0.06913433837890624, 0.069123779296875, 0.0690643539428711, 0.06907766723632812, 0.06913200378417969, 0.06921449279785157, 0.06922022247314454, 0.06925865936279296, 0.06963385772705079, 0.06965862274169922, 0.06957711791992187, 0.06964889526367188, 0.06946406555175781, 0.06913228607177735, 0.06930786895751953, 0.06925965118408203, 0.06913606262207031, 0.06927613067626953, 0.0692674560546875, 0.06921401977539063, 0.06956050872802734, 0.06967910766601562, 0.06985731506347656, 0.06983881378173828, 0.06957846069335938, 0.07005983734130859, 0.06970825958251953, 0.06968112182617188, 0.06942912292480469, 0.0693814697265625, 0.06960006713867188, 0.06935266876220703, 0.06939523315429688, 0.07006963348388671, 0.06970406341552735, 0.06964864349365234, 0.06967091369628907, 0.06994310760498047, 0.06971206665039062, 0.0698502426147461, 0.06966675567626954, 0.06977632141113281, 0.06981427001953125, 0.06956963348388671, 0.06976092529296875, 0.06970265960693359, 0.06981807708740234, 0.06962579345703125, 0.06952339172363281, 0.06967132568359374, 0.06972402954101563, 0.0699024658203125, 0.06981836700439453, 0.06987558746337891, 0.0698694076538086, 0.07016067504882813, 0.06986547088623046, 0.0697092514038086, 0.07101286315917969, 0.06932505798339844, 0.06958454132080077, 0.06906095886230469, 0.06921193695068359, 0.0694151382446289, 0.06912409973144532, 0.06899468994140626, 0.06902742767333984, 0.06910975646972656, 0.06946431732177734, 0.06959926605224609, 0.06925772857666015, 0.069615234375, 0.06980032348632813, 0.069644287109375, 0.06918527984619141, 0.06913459014892578, 0.06914457702636718, 0.06926239776611329, 0.06924143981933593, 0.06921660614013672, 0.06917027282714844, 0.06918646240234375, 0.06943949127197266, 0.0693446044921875, 0.06938848114013672, 0.06991209411621094, 0.06993196868896484, 0.06977651214599609, 0.06958684539794922, 0.06940499114990234, 0.06962246704101563, 0.06975389099121093, 0.06944242858886719, 0.06974470520019531, 0.06949478149414062, 0.0693511962890625, 0.06948681640625, 0.06940643310546875, 0.06951350402832031, 0.06969139099121094, 0.07001910400390625, 0.06987158203125, 0.06981171417236329, 0.06973900604248047, 0.06990227508544922, 0.0697242202758789, 0.06982450866699219, 0.06977065277099609, 0.06951382446289063, 0.06979379272460938, 0.0696844482421875, 0.06960822296142578, 0.0696627197265625, 0.06997401428222656, 0.07002835083007812, 0.07021231842041016, 0.06982383728027344, 0.06993106842041015, 0.07039046478271484, 0.0699920654296875, 0.0699148178100586, 0.07136966705322266, 0.06942924499511718, 0.06906060791015625, 0.06910361480712891, 0.06933229064941407, 0.06928864288330078, 0.06911385345458984, 0.06926950073242187, 0.0690268783569336, 0.06917113494873046, 0.06912716674804688, 0.06924214172363281, 0.06921062469482422, 0.06965443420410156, 0.06963641357421875, 0.06942896270751953, 0.06951760101318359, 0.06914662170410156, 0.06940057373046875, 0.06948770904541016, 0.06956256103515625, 0.06950166320800781, 0.069168701171875, 0.0692040023803711, 0.06946173095703125, 0.06963629150390625, 0.06932077026367188, 0.06954000091552734, 0.06968144226074219, 0.06971139526367187, 0.06969391632080078, 0.06949874877929688, 0.06948876953125, 0.06937737274169922, 0.06966521453857422, 0.06973190307617187, 0.06942991638183593, 0.06932473754882812, 0.06936991882324219, 0.06970687866210938, 0.06935142517089844, 0.06942604827880859, 0.0697343978881836, 0.06979763031005859, 0.06985343933105469, 0.06974259185791015, 0.06971600341796876, 0.06965245056152344, 0.06945331573486328, 0.06983238220214844, 0.06994412994384766, 0.06974022674560547, 0.06960364532470703, 0.07042864227294922, 0.0698120346069336, 0.06976448059082031, 0.0698499526977539, 0.0700211181640625, 0.06990233612060547, 0.06990636444091797, 0.06999660491943359, 0.070076416015625, 0.06994944000244141, 0.07112908935546874, 0.06944153594970703, 0.06910771179199218, 0.06920396423339843, 0.06904994964599609, 0.06911138916015624, 0.06896636962890625, 0.06906124877929687, 0.06908902740478516, 0.06913686370849609, 0.06932479858398438, 0.06919891357421876, 0.06932572937011719, 0.06940879821777343, 0.06940467071533203, 0.06931635284423829, 0.06919602966308594, 0.06920191955566406, 0.06977519989013672, 0.06944579315185546, 0.06919782257080079, 0.06916268920898437, 0.069281982421875, 0.06962351989746093, 0.06941903686523437, 0.0693905258178711, 0.06978169250488281, 0.06978678131103516, 0.06952582550048828, 0.06979843139648438, 0.06958258819580078, 0.06958220672607422, 0.06942400360107422, 0.06928793334960938, 0.06970982360839843, 0.06955964660644531, 0.06970774078369141, 0.0696951675415039, 0.07006060791015625, 0.06964268493652344, 0.06992870330810547, 0.06993536376953124, 0.06962588500976563, 0.06974848175048828, 0.06974281311035156, 0.069930908203125, 0.06965257263183594, 0.06965042877197265, 0.06936083221435548, 0.06958573150634766, 0.06957164764404297, 0.06999750518798828, 0.06988777923583984, 0.06984111785888672, 0.06988582611083985, 0.0699208984375, 0.06986281585693359, 0.06980258941650391, 0.06999244689941406, 0.0698936996459961, 0.06990892791748046, 0.07008016204833985, 0.06992316436767577, 0.07142601776123046, 0.06943334197998047, 0.06899472045898437, 0.06901996612548827, 0.06916909027099609, 0.06907810974121094, 0.06906963348388671, 0.06919522857666016, 0.06920054626464844, 0.06921382141113282, 0.06952540588378907, 0.06939907073974609, 0.06955741119384766, 0.06974140930175782, 0.0699513931274414, 0.06965567779541015, 0.069333984375, 0.06922835540771484, 0.06917129516601563, 0.06928115081787109, 0.06926972961425781, 0.06931097412109374, 0.06932246398925782, 0.06922882843017578, 0.06941081237792969, 0.06967446136474609, 0.06946604919433594, 0.069552734375, 0.06974854278564453, 0.06982470703125, 0.06980515289306641, 0.07046377563476562, 0.06951705932617187, 0.06967724609375, 0.06934188842773438, 0.06961357116699218, 0.06940876770019531, 0.06930355072021484, 0.06956639862060547, 0.06951200103759765, 0.06944563293457032, 0.06945996856689453, 0.0700967025756836, 0.07010230255126954, 0.0700384292602539, 0.06978150177001953, 0.06976102447509766, 0.0696556167602539, 0.06966368103027344, 0.0696094741821289, 0.06994329833984375, 0.06966067504882813, 0.06963404846191407, 0.06972608184814454, 0.06978892517089844, 0.06986953735351563, 0.06990873718261718, 0.07002713775634765, 0.06994818878173828, 0.07004774475097657, 0.07004364776611328, 0.06999654388427734, 0.07003472137451172, 0.07095539093017578, 0.06944541168212891, 0.06913184356689453, 0.06909008026123047, 0.06914252471923828, 0.06910361480712891, 0.06931660461425782, 0.06918553924560547, 0.06922147369384765, 0.06920694732666016, 0.06938371276855469, 0.06932720184326172, 0.06940275573730469, 0.06963404846191407, 0.06965846252441406, 0.0699024658203125, 0.06981145477294921, 0.06921218872070313, 0.06909625244140626, 0.0693656997680664, 0.06919782257080079, 0.06940009307861328, 0.06948912048339843, 0.0693780517578125, 0.06926131439208984, 0.06955830383300782, 0.06982176208496094, 0.06976934051513672, 0.06990617370605469, 0.06990723419189453, 0.06957465362548829, 0.06962995147705078, 0.06959913635253906, 0.06938419342041016, 0.06941295623779296, 0.0693759994506836, 0.06954300689697265, 0.06951209259033203, 0.06935955047607421, 0.06955219268798828, 0.0697548828125, 0.07003103637695313, 0.06999884796142578, 0.06997772979736328, 0.0697041244506836, 0.06974463653564453, 0.06983270263671874, 0.06979923248291016, 0.0697946548461914, 0.06991241455078125, 0.07024832153320312, 0.06991462707519532, 0.06990563201904297, 0.06985932922363282, 0.07044528198242188, 0.07016313934326172, 0.07026467132568359, 0.0701822738647461, 0.07001372528076172, 0.07036431884765625, 0.0700681915283203, 0.06992371368408203, 0.07003529357910156, 0.07164012908935546, 0.06991149139404297, 0.06931251525878906, 0.06922374725341797, 0.06925142669677735, 0.06909372711181641, 0.06908665466308593, 0.06913286590576172, 0.06979174041748047, 0.06960694122314454, 0.06955260467529296, 0.0693759994506836, 0.06929129791259765, 0.06982886505126953, 0.06971849822998047, 0.06948863983154296, 0.06930809783935547, 0.06935078430175781, 0.06947936248779298, 0.06957997131347657, 0.07076537322998047, 0.06935529327392578, 0.06926771545410157, 0.06966614532470704, 0.06944217681884765, 0.0693207015991211, 0.06960511779785156, 0.06945613098144532, 0.06966681671142579, 0.07002480316162109, 0.07021609497070312, 0.06984633636474609, 0.06961017608642578, 0.06972621154785157, 0.06979993438720702, 0.07004080200195313, 0.06964304351806641, 0.069607421875, 0.06955548858642578, 0.06996355438232423, 0.06996399688720703, 0.06979452514648438, 0.06999244689941406, 0.06994124603271484, 0.06991462707519532, 0.0700145263671875, 0.06962630462646484, 0.06962179565429688, 0.07027247619628907, 0.06970829010009766, 0.06970982360839843, 0.06992272186279297, 0.06955836486816407, 0.06988390350341797, 0.06996355438232423, 0.06986774444580078, 0.06993852996826172, 0.06986953735351563, 0.070008544921875, 0.07031084442138671, 0.0700203857421875, 0.0699411849975586, 0.06998713684082031]",tokens/s,14.362713949759069,, bfloat16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.403008,1326.383104,0.0,931.135488,917.648384,s,1,7.63307861328125,7.63307861328125,0.0,7.63307861328125,7.63307861328125,7.63307861328125,7.63307861328125,[7.63307861328125],,kWh,1.0338465666666252e-05,1.13285999688864e-06,3.105280261997412e-06,1.4576605925552304e-05,,MB,1135.603712,1456.406528,0.0,1050.673152,1018.330112,s,10,0.7034482269287109,0.0703448226928711,0.0008076623861901718,0.07017057418823242,0.0707837059020996,0.0716950527191162,0.07242413017272949,"[0.0726063995361328, 0.07038086700439453, 0.06963465881347657, 0.07012895965576171, 0.06993052673339843, 0.07021218872070313, 0.07058118438720704, 0.06974800109863281, 0.07038400268554687, 0.06984143829345703]",tokens/s,3639.21593942611,kWh,2.329240217592651e-06,2.56763201417403e-07,1.5366215996666443e-06,4.122625018676698e-06,tokens/kWh,62096358.228130154,MB,1162.858496,1473.183744,0.0,1067.450368,1032.767488,s,10,13.302551025390624,1.3302551025390623,0.024940005579764144,1.3159708251953126,1.3628881225585936,1.3655794982910157,1.367732598876953,"[1.3622900390625, 1.3682708740234375, 1.3511224365234376, 1.3586226806640624, 1.3164345703125, 1.3095928955078124, 1.307552978515625, 1.302308349609375, 1.315507080078125, 1.31084912109375]",tokens/s,47.3593372276879,kWh,3.8109842997824186e-05,4.20321723531936e-06,1.767769932733434e-05,5.999075956047789e-05,tokens/kWh,1050161.7325996421,,s,630,13.296805097579947,0.0211060398374285,0.0005385663482820034,0.020997008323669436,0.02172949161529541,0.021857902526855467,0.02261669342041016,"[0.022597631454467772, 0.021526527404785157, 0.02134342384338379, 0.0214182071685791, 0.021647872924804686, 0.02165353584289551, 0.02230284881591797, 0.021929983139038087, 0.02179680061340332, 0.02147532844543457, 0.02156470489501953, 0.021756288528442382, 0.02178492736816406, 0.02179836845397949, 0.021703359603881835, 0.02156732749938965, 0.021453855514526367, 0.021554143905639648, 0.021456480026245117, 0.021507711410522462, 0.021325952529907228, 0.02159174346923828, 0.021931072235107422, 0.02183065605163574, 0.021742431640625, 0.021707103729248046, 0.02154863929748535, 0.021587648391723634, 0.021436864852905274, 0.021700927734375, 0.0222096004486084, 0.02158451271057129, 0.02149177551269531, 0.02155232048034668, 0.021584127426147463, 0.021605056762695314, 0.021738880157470702, 0.0214136962890625, 0.021658239364624025, 0.021723455429077148, 0.02163596725463867, 0.021720191955566407, 0.021691583633422853, 0.02162259292602539, 0.021740224838256834, 0.02165555191040039, 0.02157583999633789, 0.021528032302856444, 0.021591615676879884, 0.021457056045532226, 0.021041183471679686, 0.02081046485900879, 0.02141584014892578, 0.021598207473754884, 0.021302688598632814, 0.021457504272460938, 0.021532447814941406, 0.021798208236694337, 0.021652383804321287, 0.021726272583007813, 0.021573631286621094, 0.02139814376831055, 0.021291328430175782, 0.021510143280029297, 0.02159119987487793, 0.02144470405578613, 0.021570304870605468, 0.02223308753967285, 0.02168012809753418, 0.021338111877441408, 0.02104876708984375, 0.021000288009643556, 0.02151641654968262, 0.021782655715942383, 0.022624479293823243, 0.02234339141845703, 0.021600831985473634, 0.02215228843688965, 0.02167788887023926, 0.02157401657104492, 0.021508895874023437, 0.021968544006347655, 0.021540319442749024, 0.02161292839050293, 0.02156764793395996, 0.021411584854125976, 0.021298944473266603, 0.021227136611938476, 0.02191244888305664, 0.021729280471801758, 0.021763744354248046, 0.02145110321044922, 0.021521856307983398, 0.021525056838989257, 0.021354496002197267, 0.021484735488891602, 0.021832319259643556, 0.021975200653076173, 0.022322656631469727, 0.021938751220703125, 0.022347776412963868, 0.022716415405273437, 0.021846048355102538, 0.021401567459106444, 0.021297407150268555, 0.021462783813476563, 0.021796863555908205, 0.021540864944458008, 0.02166988754272461, 0.021497856140136717, 0.021683328628540038, 0.02194428825378418, 0.02155788803100586, 0.02177872085571289, 0.021589311599731445, 0.021657440185546876, 0.021940288543701173, 0.02181427192687988, 0.021816511154174805, 0.022708831787109376, 0.021864511489868163, 0.021682111740112305, 0.0216964168548584, 0.02168025588989258, 0.021612512588500978, 0.021380096435546874, 0.021208703994750975, 0.021369375228881837, 0.02177039909362793, 0.021630655288696288, 0.021644607543945312, 0.022151872634887694, 0.021448703765869142, 0.021169919967651368, 0.02114358329772949, 0.021694143295288085, 0.021748191833496095, 0.021297536849975585, 0.021289888381958007, 0.021183263778686522, 0.021206111907958985, 0.02091859245300293, 0.021012351989746093, 0.0214268798828125, 0.021451967239379883, 0.021248863220214843, 0.0211267204284668, 0.0213222713470459, 0.02145849609375, 0.0216210880279541, 0.02145280075073242, 0.021587039947509764, 0.02152579116821289, 0.021505279541015623, 0.02154319953918457, 0.021606496810913086, 0.021522432327270507, 0.021383167266845703, 0.02147532844543457, 0.021537919998168946, 0.021441408157348633, 0.02217513656616211, 0.021218048095703126, 0.021415359497070314, 0.021092800140380858, 0.021483488082885742, 0.021340383529663085, 0.021753631591796874, 0.02155264091491699, 0.021606016159057616, 0.021639711380004884, 0.0217545280456543, 0.021277952194213866, 0.020963903427124023, 0.020832128524780273, 0.021217279434204102, 0.02180499267578125, 0.02173139190673828, 0.021690271377563478, 0.021427871704101563, 0.02144095993041992, 0.02148953628540039, 0.021319807052612303, 0.02125619125366211, 0.02129305648803711, 0.021514240264892577, 0.021602304458618164, 0.02127667236328125, 0.021186559677124024, 0.02140812873840332, 0.02176576042175293, 0.021624191284179688, 0.021646656036376954, 0.021587648391723634, 0.021403648376464843, 0.021587968826293946, 0.02150918388366699, 0.021399648666381835, 0.02099292755126953, 0.02099510383605957, 0.021535648345947265, 0.02160867118835449, 0.02149558448791504, 0.021563135147094726, 0.021566848754882812, 0.021711936950683595, 0.022163040161132814, 0.021571807861328125, 0.021524511337280273, 0.021698816299438477, 0.021481184005737303, 0.02207043266296387, 0.024928895950317383, 0.02184982490539551, 0.021383167266845703, 0.02158233642578125, 0.021397504806518555, 0.021315616607666017, 0.02128700828552246, 0.0214649600982666, 0.02128291130065918, 0.02143577575683594, 0.020889888763427733, 0.021646751403808593, 0.02177680015563965, 0.02154745674133301, 0.02135264015197754, 0.02127177619934082, 0.0215347843170166, 0.021653215408325197, 0.021590656280517578, 0.021583999633789062, 0.021612607955932617, 0.021534719467163087, 0.02271027183532715, 0.021413888931274414, 0.020996095657348633, 0.021243743896484375, 0.02168764877319336, 0.021784959793090822, 0.0214881591796875, 0.0211691837310791, 0.021033504486083984, 0.021164384841918946, 0.021284799575805664, 0.02158006477355957, 0.021384992599487306, 0.021288415908813477, 0.021391904830932618, 0.021606399536132814, 0.021489664077758788, 0.02142617607116699, 0.02091663932800293, 0.021643264770507813, 0.021413888931274414, 0.021665023803710937, 0.022551551818847656, 0.021619808197021483, 0.0214800968170166, 0.02152889633178711, 0.021487295150756838, 0.0211494083404541, 0.02095337677001953, 0.020762624740600585, 0.020727807998657227, 0.020707359313964845, 0.020774879455566407, 0.02126665687561035, 0.021325056076049804, 0.021209983825683593, 0.021097152709960938, 0.020837503433227537, 0.020583904266357422, 0.020625791549682616, 0.020488000869750975, 0.02047609519958496, 0.02048723220825195, 0.020454336166381835, 0.020557695388793946, 0.020559999465942384, 0.020563968658447264, 0.020561759948730468, 0.020437152862548828, 0.02056972885131836, 0.020474239349365233, 0.020502527236938475, 0.020598688125610352, 0.020615455627441406, 0.020645727157592775, 0.02071548843383789, 0.020731903076171874, 0.02085068893432617, 0.020609024047851563, 0.020658176422119142, 0.020764671325683593, 0.020694623947143553, 0.020728031158447267, 0.021235071182250976, 0.02084947204589844, 0.02065328025817871, 0.020948991775512696, 0.02064259147644043, 0.020862464904785157, 0.02090991973876953, 0.02174131202697754, 0.020972000122070313, 0.020951007843017577, 0.020821855545043944, 0.02077743911743164, 0.020721824645996093, 0.020710655212402344, 0.020760799407958986, 0.020799488067626954, 0.020748832702636718, 0.020717567443847656, 0.02096758460998535, 0.021202623367309572, 0.020926624298095702, 0.020901023864746095, 0.020617696762084962, 0.020652416229248047, 0.020700639724731445, 0.020554271697998047, 0.020690944671630858, 0.020692991256713866, 0.020738048553466795, 0.020572160720825194, 0.020590591430664062, 0.020768768310546876, 0.02099612808227539, 0.021086463928222655, 0.02112483215332031, 0.021112831115722656, 0.020860576629638673, 0.020709888458251953, 0.023066272735595705, 0.020792959213256836, 0.020821920394897463, 0.02054742431640625, 0.020437183380126952, 0.02080620765686035, 0.020719680786132812, 0.021372127532958984, 0.020670944213867188, 0.020645343780517578, 0.0210565128326416, 0.020676448822021486, 0.020586496353149415, 0.020554784774780274, 0.02155766487121582, 0.0205963191986084, 0.02075894355773926, 0.020826688766479494, 0.020780799865722656, 0.02061747169494629, 0.02046976089477539, 0.020463615417480468, 0.020597856521606447, 0.02041529655456543, 0.020491424560546874, 0.020487104415893555, 0.020418560028076172, 0.020600128173828124, 0.020888256072998046, 0.020961280822753905, 0.021006080627441408, 0.020869312286376954, 0.020778751373291014, 0.02073017692565918, 0.020817920684814452, 0.02062745666503906, 0.020590591430664062, 0.020485248565673828, 0.020646976470947265, 0.02061497688293457, 0.020662559509277343, 0.02057366371154785, 0.02053116798400879, 0.020390527725219727, 0.020537343978881836, 0.020815872192382814, 0.020912128448486327, 0.021594112396240234, 0.020979007720947265, 0.02063350486755371, 0.02062214469909668, 0.020557792663574218, 0.020448959350585938, 0.020511039733886717, 0.020639999389648438, 0.0205185604095459, 0.020424800872802733, 0.020496383666992187, 0.02066201591491699, 0.02184217643737793, 0.021061632156372072, 0.020510879516601563, 0.020465696334838867, 0.02052012825012207, 0.020386432647705077, 0.02046063995361328, 0.020568992614746092, 0.020609024047851563, 0.02061484718322754, 0.02051718330383301, 0.02067036819458008, 0.0205164794921875, 0.02058243179321289, 0.020588991165161132, 0.020508607864379882, 0.02056608009338379, 0.023451648712158202, 0.020735712051391603, 0.02062735939025879, 0.020516544342041015, 0.02051862335205078, 0.020495328903198242, 0.02046281623840332, 0.02062345504760742, 0.020520832061767576, 0.02062745666503906, 0.02058336067199707, 0.020694143295288087, 0.02105187225341797, 0.020827423095703124, 0.020581344604492187, 0.020508703231811524, 0.021874624252319334, 0.02118662452697754, 0.020649984359741212, 0.020619264602661135, 0.021360095977783204, 0.020636192321777345, 0.020743711471557617, 0.020643455505371094, 0.020519775390625, 0.02053513526916504, 0.02164486312866211, 0.02097417640686035, 0.02055561637878418, 0.020544864654541015, 0.020424671173095703, 0.020645151138305663, 0.020695808410644532, 0.020649984359741212, 0.020510719299316405, 0.02052412796020508, 0.02068355178833008, 0.020743871688842775, 0.020494783401489258, 0.020450912475585937, 0.020611488342285156, 0.020564096450805664, 0.020563840866088867, 0.020756832122802736, 0.020581695556640626, 0.020576608657836913, 0.02047177505493164, 0.02052092742919922, 0.02062339210510254, 0.020449312210083007, 0.020463615417480468, 0.020404096603393554, 0.020804895401000976, 0.020849760055541993, 0.020788991928100586, 0.020620479583740234, 0.020656095504760743, 0.020458240509033204, 0.020463712692260744, 0.020602880477905275, 0.020561920166015626, 0.020619264602661135, 0.020553407669067384, 0.02051513671875, 0.020653663635253908, 0.020644256591796875, 0.020707328796386718, 0.02069196891784668, 0.0206812801361084, 0.020532800674438478, 0.02058464050292969, 0.020918975830078124, 0.020563968658447264, 0.02050214385986328, 0.020504255294799805, 0.02041516876220703, 0.022337535858154296, 0.020785280227661133, 0.02076201629638672, 0.020655616760253907, 0.020655071258544922, 0.020642879486083985, 0.020847551345825194, 0.021036096572875976, 0.021221855163574218, 0.02116399955749512, 0.020859392166137695, 0.02081558418273926, 0.02051100730895996, 0.020512767791748047, 0.02049843215942383, 0.02058639907836914, 0.020586591720581054, 0.02040652847290039, 0.02061235237121582, 0.020551551818847658, 0.020430912017822267, 0.020560447692871093, 0.020539392471313478, 0.021753984451293944, 0.020602176666259766, 0.020643903732299806, 0.02060873603820801, 0.020570560455322264, 0.020472095489501952, 0.020466943740844727, 0.0204083194732666, 0.020564800262451173, 0.020482303619384766, 0.02060406494140625, 0.020533855438232423, 0.02086092758178711, 0.021056575775146483, 0.020751615524291993, 0.020930143356323243, 0.02113545608520508, 0.021727231979370116, 0.021716543197631836, 0.021051296234130858, 0.021044767379760743, 0.020877504348754884, 0.02225663948059082, 0.021180416107177736, 0.020877119064331054, 0.020776960372924806, 0.020705087661743164, 0.020738239288330077, 0.02072812843322754, 0.02070319938659668, 0.020997888565063478, 0.02068191909790039, 0.020789087295532225, 0.020587455749511718, 0.020760576248168947, 0.02087500762939453, 0.020519168853759765, 0.02087731170654297, 0.02064588737487793, 0.02083839988708496, 0.021583520889282226, 0.021025119781494142, 0.02166374397277832, 0.02077401542663574, 0.02108095932006836, 0.020578304290771485, 0.020857887268066408, 0.02072675132751465, 0.02066431999206543, 0.02059644889831543, 0.02176950454711914, 0.021230527877807617, 0.021084224700927735, 0.02123366355895996, 0.021002464294433594, 0.020777055740356445, 0.0208155517578125, 0.02128892707824707, 0.02117635154724121, 0.021127168655395507, 0.021153087615966796, 0.021492576599121092, 0.021489503860473633, 0.021152095794677736, 0.020925151824951173, 0.02084566307067871, 0.021243999481201172, 0.021345983505249022, 0.021276191711425783, 0.02126902389526367, 0.02118396759033203, 0.02096623992919922, 0.020848320007324218, 0.020796800613403322, 0.020834943771362305, 0.020602880477905275, 0.020587776184082033, 0.02049420738220215, 0.020490816116333008, 0.020631296157836914, 0.02058425521850586, 0.02077568054199219, 0.02062131118774414, 0.020572160720825194, 0.020752384185791017, 0.020981760025024415, 0.02136412811279297, 0.021004095077514648, 0.020795200347900392, 0.020568735122680665, 0.020582304000854493, 0.02071139144897461, 0.02068115234375, 0.020676383972167967, 0.020512256622314453, 0.0205599365234375, 0.020578975677490233, 0.02048204803466797, 0.020570112228393556, 0.020520959854125977, 0.02051024055480957, 0.02057881546020508, 0.020772512435913087, 0.020777280807495118, 0.020813215255737306, 0.020719648361206055, 0.02123404884338379, 0.020660415649414062, 0.02060492706298828, 0.020510656356811523, 0.020465728759765624, 0.020989952087402345, 0.02068604850769043, 0.020591392517089843, 0.020531200408935548, 0.020551136016845703, 0.020476415634155275, 0.020449312210083007, 0.020600223541259767, 0.020673023223876954]",tokens/s,47.37980254479786,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1001, in __init__ self.model = GemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 780, in __init__ [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 780, in [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 571, in __init__ self.mlp = GemmaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 167, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 442736 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1001, in __init__ self.model = GemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 780, in __init__ [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 780, in [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 571, in __init__ self.mlp = GemmaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 166, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 166.12 MiB is free. Process 441987 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,941.522944,12526.157824,0.0,12123.635712,12121.851904,s,1,7.30981591796875,7.30981591796875,0.0,7.30981591796875,7.30981591796875,7.30981591796875,7.30981591796875,[7.30981591796875],,kWh,6.108519608339217e-06,6.664611696264716e-07,2.17500173999019e-06,8.949982517955878e-06,,MB,1283.489792,12551.323648,0.0,12138.315776,10311.21664,s,10,3.4640921936035154,0.3464092193603515,0.007146287203217333,0.3475733642578125,0.3519137512207031,0.3525267608642578,0.35301716857910154,"[0.3266773376464844, 0.34695498657226564, 0.34592825317382814, 0.34338629150390626, 0.34711322021484375, 0.3501602783203125, 0.3480335083007812, 0.35177752685546876, 0.3531397705078125, 0.3509210205078125]",tokens/s,739.0103544954919,kWh,9.670148122042168e-06,1.066127845198761e-06,6.430704069291142e-06,1.716698003653207e-05,tokens/kWh,14912349.14092176,MB,1312.251904,12557.615104,0.0,12144.607232,10311.2192,s,10,27.008072265625003,2.7008072265625,0.007129148502947273,2.699569091796875,2.705652221679687,2.712232482910156,2.717496691894531,"[2.697590087890625, 2.693035888671875, 2.691891845703125, 2.697677490234375, 2.718812744140625, 2.70418994140625, 2.699623291015625, 2.699514892578125, 2.70215478515625, 2.703581298828125]",tokens/s,23.326359386332197,kWh,7.922722524462192e-05,8.739111493525589e-06,5.260048204450651e-05,0.00014056681878265405,tokens/kWh,448185.42914748105,,s,630,27.00531581497194,0.04286558065868559,0.00030271322441724897,0.04281118392944336,0.04315574722290039,0.04324519557952881,0.04452955753326416,"[0.044399040222167965, 0.043340286254882815, 0.04281494522094727, 0.042553886413574216, 0.04254646301269531, 0.042864639282226565, 0.042840801239013675, 0.04300185775756836, 0.0428851203918457, 0.0425799674987793, 0.04272742462158203, 0.042618881225585936, 0.04274585723876953, 0.04317184066772461, 0.043046817779541016, 0.04272956848144531, 0.04248521423339844, 0.04262255859375, 0.04271577453613281, 0.042719551086425785, 0.04268592071533203, 0.04304508972167969, 0.042971263885498046, 0.04305635070800781, 0.04274595260620117, 0.04310451126098633, 0.04332812881469727, 0.042729408264160156, 0.04266604614257812, 0.04264755249023437, 0.04264511871337891, 0.04265203094482422, 0.04256563186645508, 0.042487808227539066, 0.04256358337402344, 0.042530784606933596, 0.04256774520874024, 0.04255740737915039, 0.0425814094543457, 0.04284889602661133, 0.04270800018310547, 0.04262368011474609, 0.0425816650390625, 0.04280518341064453, 0.04275904083251953, 0.04280012893676758, 0.04274665451049805, 0.04270489501953125, 0.04318627166748047, 0.04328799819946289, 0.04307980728149414, 0.04276464080810547, 0.042651424407958986, 0.042772289276123046, 0.04304268646240234, 0.04264400100708008, 0.042616832733154295, 0.042995712280273435, 0.04292752075195312, 0.04284870529174805, 0.04283737564086914, 0.04278265762329102, 0.04277443313598633, 0.0444169921875, 0.04327974319458008, 0.04298201751708984, 0.04278185653686523, 0.042637950897216795, 0.0429277458190918, 0.04263296127319336, 0.04263817596435547, 0.042624446868896486, 0.042578495025634766, 0.042768352508544924, 0.042861663818359375, 0.04263212966918945, 0.04259603118896484, 0.042635520935058596, 0.042934337615966794, 0.042595455169677734, 0.042621822357177735, 0.04347615814208984, 0.04252278518676758, 0.04242214584350586, 0.04257167816162109, 0.042522945404052735, 0.042678848266601566, 0.042622177124023435, 0.04260124969482422, 0.0426879997253418, 0.042764415740966795, 0.042598304748535154, 0.04270051193237305, 0.04274655914306641, 0.0425753288269043, 0.04257222366333008, 0.0424920654296875, 0.04248598480224609, 0.04270038223266601, 0.04267571258544922, 0.042611392974853515, 0.04263935852050781, 0.04249977493286133, 0.042555713653564455, 0.04256563186645508, 0.04254719924926758, 0.042527904510498045, 0.04270985412597656, 0.04304076766967774, 0.042893310546875, 0.042784160614013675, 0.04273417663574219, 0.042858497619628906, 0.04273356628417969, 0.0428851203918457, 0.04274339294433594, 0.04269302368164062, 0.04303462219238281, 0.042794689178466794, 0.04300831985473633, 0.04274585723876953, 0.04300185775756836, 0.04263958358764648, 0.042598175048828124, 0.042630142211914065, 0.04273459243774414, 0.04454601669311523, 0.043112449645996094, 0.04274383926391601, 0.042575870513916016, 0.042699806213378905, 0.04248630523681641, 0.04261318588256836, 0.04274995040893555, 0.04285440063476562, 0.04295884704589844, 0.04269206237792969, 0.04273721694946289, 0.04259920120239258, 0.04278905487060547, 0.0425164794921875, 0.042571102142333984, 0.04261545562744141, 0.04267750549316406, 0.042597118377685546, 0.04255539321899414, 0.042544254302978514, 0.042557857513427735, 0.04283843231201172, 0.04272320175170898, 0.04275987243652344, 0.04287263870239258, 0.042789505004882815, 0.042647136688232425, 0.04261088180541992, 0.04264511871337891, 0.04257449722290039, 0.042624191284179686, 0.042512351989746094, 0.04252758407592774, 0.04271923065185547, 0.042625022888183595, 0.04256358337402344, 0.04258816146850586, 0.04257753753662109, 0.04254947280883789, 0.042614368438720705, 0.04251091384887695, 0.0425533447265625, 0.04265369415283203, 0.04272332763671875, 0.04285177612304687, 0.04285702514648437, 0.04266368103027344, 0.042766559600830076, 0.042942142486572264, 0.04272777557373047, 0.04292784118652344, 0.04281372833251953, 0.04281753540039063, 0.04275199890136719, 0.042606433868408206, 0.04256502532958984, 0.04267216110229492, 0.042838657379150394, 0.04270499038696289, 0.04302995300292969, 0.0426819839477539, 0.0428205451965332, 0.044520286560058596, 0.04310015869140625, 0.04268032073974609, 0.042540992736816406, 0.042506305694580075, 0.04262819290161133, 0.04261775970458984, 0.04255654525756836, 0.0425841293334961, 0.042593086242675784, 0.042527809143066406, 0.042508544921875, 0.04253152084350586, 0.04251219177246094, 0.04259244918823242, 0.04261273574829102, 0.04256121444702148, 0.042578239440917966, 0.04259132766723633, 0.04252150344848633, 0.04263494491577149, 0.04271068954467774, 0.042799774169921874, 0.04307308959960938, 0.043131328582763674, 0.04286873626708984, 0.042684417724609375, 0.0426956787109375, 0.04266227340698242, 0.04258675384521484, 0.04262092971801758, 0.042659137725830076, 0.04266393661499023, 0.04266393661499023, 0.04281401443481445, 0.04256985473632813, 0.04259612655639648, 0.04251465606689453, 0.04252262496948242, 0.04253417587280273, 0.042606399536132815, 0.04309699249267578, 0.04282572937011719, 0.042958240509033206, 0.043037281036376954, 0.04311040115356445, 0.04286806488037109, 0.04297385787963867, 0.04301824188232422, 0.04326588821411133, 0.04321500778198242, 0.04300799942016602, 0.04308515167236328, 0.04308841705322266, 0.0430695686340332, 0.0429567985534668, 0.04306739044189453, 0.04304076766967774, 0.043087646484375, 0.04306470489501953, 0.04308598327636719, 0.04302691268920898, 0.04293040084838867, 0.044590625762939456, 0.043590110778808595, 0.04322304153442383, 0.04297679901123047, 0.04315555191040039, 0.0430145263671875, 0.043138561248779295, 0.04308553695678711, 0.04308803176879883, 0.04317865753173828, 0.04303606414794922, 0.04320483016967774, 0.043012447357177734, 0.0429150390625, 0.043087871551513675, 0.043084606170654294, 0.04300387191772461, 0.043063297271728515, 0.04298060989379883, 0.04300278472900391, 0.04287062454223633, 0.0429117431640625, 0.043399166107177735, 0.04330464172363281, 0.04326841735839844, 0.04310774230957031, 0.04315331268310547, 0.04308652877807617, 0.043063297271728515, 0.043169792175292966, 0.04308582305908203, 0.04309417724609375, 0.0430918083190918, 0.0430489616394043, 0.043068958282470704, 0.043039169311523434, 0.042993694305419924, 0.04315356826782227, 0.04301356887817383, 0.04369465637207031, 0.04312400054931641, 0.04305977630615234, 0.043089534759521486, 0.043036319732666015, 0.043254016876220706, 0.04326601409912109, 0.04334233474731446, 0.043259902954101564, 0.04319609451293945, 0.04314553451538086, 0.04304457473754883, 0.04312851333618164, 0.04327280044555664, 0.043163616180419924, 0.04303647994995117, 0.04323535919189453, 0.043155166625976564, 0.043176414489746094, 0.04301619338989258, 0.0430489616394043, 0.04317174530029297, 0.04302640151977539, 0.043210880279541015, 0.04461119842529297, 0.04341715240478516, 0.043158271789550784, 0.04297017669677734, 0.043109375, 0.043093921661376954, 0.04293804931640625, 0.043073951721191404, 0.04295270538330078, 0.043071487426757815, 0.04288716888427734, 0.04281139373779297, 0.04305100631713867, 0.04294451141357422, 0.04284832000732422, 0.04285555267333984, 0.04293510437011719, 0.043169792175292966, 0.04277775955200196, 0.042879840850830075, 0.04292310333251953, 0.04300022506713867, 0.04306585693359375, 0.04315750503540039, 0.04319023895263672, 0.04304233551025391, 0.04298499298095703, 0.04284515380859375, 0.04314726257324219, 0.042958206176757815, 0.04291648101806641, 0.04277043151855469, 0.042726398468017575, 0.042810367584228515, 0.04262041473388672, 0.042533374786376955, 0.04281315231323242, 0.04265350341796875, 0.04280368041992187, 0.04264467239379883, 0.042688926696777346, 0.04266960144042969, 0.04261526489257812, 0.0426473274230957, 0.042735233306884765, 0.04309075164794922, 0.043036865234375, 0.0430489616394043, 0.043044769287109375, 0.04287823867797851, 0.0428180160522461, 0.042668384552001955, 0.04278681564331055, 0.042663520812988284, 0.042662303924560545, 0.04268851089477539, 0.042676223754882815, 0.04271708679199219, 0.04311049652099609, 0.0428092155456543, 0.04300198364257812, 0.042962944030761716, 0.04277043151855469, 0.04462623977661133, 0.04305145645141602, 0.043106273651123045, 0.04298339080810547, 0.042686176300048825, 0.042670654296875, 0.042698047637939454, 0.04258457565307617, 0.042719295501708984, 0.0428087043762207, 0.042786880493164064, 0.042858367919921876, 0.04283065414428711, 0.0426473274230957, 0.04259657669067383, 0.042788864135742184, 0.042837566375732425, 0.04277088165283203, 0.04273516845703125, 0.042721214294433596, 0.042523136138916014, 0.04289718246459961, 0.042829601287841794, 0.042880767822265624, 0.042805599212646483, 0.04284656143188476, 0.042805248260498044, 0.042656864166259766, 0.04267715072631836, 0.042562976837158206, 0.04258038330078125, 0.04252896118164062, 0.04252435302734375, 0.042538944244384765, 0.04268889617919922, 0.04270064163208008, 0.04265382385253906, 0.04285356903076172, 0.042632030487060546, 0.04292607879638672, 0.04268159866333008, 0.042844928741455075, 0.04265532684326172, 0.04290297698974609, 0.042904544830322265, 0.043014144897460936, 0.04316364669799805, 0.043374591827392575, 0.04303462219238281, 0.043012096405029294, 0.042976608276367186, 0.04296480178833008, 0.04316428756713867, 0.04324313735961914, 0.04278716659545898, 0.04284543991088867, 0.04299059295654297, 0.042992992401123045, 0.04289148712158203, 0.04299398422241211, 0.04284787368774414, 0.04277081680297851, 0.04270259094238281, 0.044636478424072264, 0.04334182357788086, 0.04290505599975586, 0.04280579376220703, 0.04271488189697266, 0.04275571060180664, 0.04262464141845703, 0.04284723281860352, 0.04295212936401367, 0.042744033813476565, 0.04266633605957031, 0.04273971176147461, 0.04271014404296875, 0.04261999893188476, 0.0427026252746582, 0.04267212677001953, 0.042641407012939454, 0.042599872589111326, 0.042629791259765626, 0.04254300689697266, 0.04257558441162109, 0.04257574462890625, 0.042829345703125, 0.042852897644042966, 0.042875232696533205, 0.04284384155273437, 0.042802688598632815, 0.04275616073608399, 0.042711135864257815, 0.04265619277954102, 0.04266976165771484, 0.04269644927978516, 0.042576671600341794, 0.04275609588623047, 0.04282720184326172, 0.04268064117431641, 0.04272483062744141, 0.0430450553894043, 0.04271523284912109, 0.042756607055664066, 0.042889217376708984, 0.04274995040893555, 0.042927936553955076, 0.042928321838378906, 0.04320627212524414, 0.043037151336669924, 0.042939327239990235, 0.0431638069152832, 0.04308870315551758, 0.04291584014892578, 0.04290764617919922, 0.04286873626708984, 0.042891265869140625, 0.042995712280273435, 0.043065441131591796, 0.04277443313598633, 0.04259635162353516, 0.04268236923217773, 0.04282681655883789, 0.043204704284667966, 0.043020767211914064, 0.04275238418579102, 0.04300595092773438, 0.044533344268798826, 0.04332992172241211, 0.04297884750366211, 0.0429859504699707, 0.04284415817260742, 0.04269875335693359, 0.042898494720458986, 0.04276319885253906, 0.04309097671508789, 0.042939231872558596, 0.042790431976318356, 0.04280790328979492, 0.04267788696289063, 0.04268899154663086, 0.04271299362182617, 0.042692222595214845, 0.042961280822753904, 0.04267007827758789, 0.042603679656982425, 0.04284297561645508, 0.04282479858398437, 0.04283254241943359, 0.04300006484985352, 0.04316678237915039, 0.04294547271728515, 0.0428092155456543, 0.04277260971069336, 0.04273766326904297, 0.042777694702148435, 0.04266019058227539, 0.042663486480712894, 0.04265267181396484, 0.04324687957763672, 0.04288934326171875, 0.04262473678588867, 0.04272403335571289, 0.04259571075439453, 0.04263209533691406, 0.0425588493347168, 0.0425552978515625, 0.042641792297363285, 0.042724895477294925, 0.042738399505615234, 0.04262662506103516, 0.043024833679199216, 0.04294246292114258, 0.04322099304199219, 0.04321811294555664, 0.04320134353637695, 0.04307068634033203, 0.04316022491455078, 0.043061374664306644, 0.04284415817260742, 0.04271120071411133, 0.04270678329467773, 0.04289664077758789, 0.042679039001464844, 0.043063297271728515, 0.04306739044189453, 0.04293395233154297, 0.04306288146972656, 0.04302713775634766, 0.04306844711303711, 0.04454751968383789, 0.04325839996337891, 0.04312451171875, 0.04308377456665039, 0.0427031021118164, 0.04282777786254883, 0.04285747146606445, 0.042728446960449216, 0.0425984001159668, 0.04257382583618164, 0.04268137741088867, 0.04279571151733398, 0.042600608825683596, 0.04290777587890625, 0.042705982208251954, 0.04268054580688477, 0.04268719863891601, 0.04261273574829102, 0.04251638412475586, 0.04281097412109375, 0.042850303649902347, 0.04275046539306641, 0.04297283172607422, 0.042956321716308594, 0.0431767692565918, 0.04285235214233398, 0.04279241561889648, 0.04290323257446289, 0.042747936248779296, 0.04309270477294922, 0.042855934143066404, 0.0427639045715332, 0.042738655090332034, 0.04271104049682617, 0.04277414321899414, 0.04300147247314453, 0.042763008117675784, 0.04269987106323242, 0.04290652847290039, 0.04283766555786133, 0.04287932968139648, 0.04275305557250977, 0.04305142211914063, 0.0430843505859375, 0.043014110565185545, 0.04318956756591797, 0.042945247650146484, 0.042992767333984376, 0.04315020751953125, 0.043194366455078126, 0.04284415817260742, 0.04300707244873047, 0.04307766342163086, 0.043081790924072265, 0.04301820755004883, 0.04304572677612305, 0.04300185775756836, 0.04310006332397461, 0.04284425735473633, 0.042686752319335934, 0.04293132781982422, 0.043022945404052736, 0.04290150451660156]",tokens/s,23.328740323441206,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,939.978752,6540.886016,0.0,6138.363904,6060.931072,s,1,7.1839013671875,7.1839013671875,0.0,7.1839013671875,7.1839013671875,7.1839013671875,7.1839013671875,[7.1839013671875],,kWh,5.199559137524072e-06,5.660340290339267e-07,2.4686130860318567e-06,8.234206252589856e-06,,MB,1277.280256,6563.954688,0.0,6150.946816,5419.87328,s,10,4.629353240966798,0.4629353240966797,0.0051292289145212664,0.4608850250244141,0.46960369873046875,0.47012527465820314,0.47054253540039065,"[0.457906005859375, 0.45776809692382814, 0.4660736083984375, 0.4706468505859375, 0.45955029296875, 0.45679397583007814, 0.46100726318359375, 0.4693565673828125, 0.46076278686523436, 0.46948779296875]",tokens/s,552.9930136559136,kWh,1.3557313913448718e-05,1.4951269590251687e-06,8.970259701455606e-06,2.4022700573929494e-05,tokens/kWh,10656587.05657026,MB,1325.899776,6563.954688,0.0,6150.946816,5419.87584,s,10,16.29367370605469,1.6293673706054688,0.0062193872434990775,1.6291291503906251,1.6387874267578124,1.6390975708007813,1.6393456860351563,"[1.62502392578125, 1.63940771484375, 1.62734619140625, 1.6225037841796874, 1.62470947265625, 1.638718505859375, 1.633542724609375, 1.620237548828125, 1.630912109375, 1.631271728515625]",tokens/s,38.665313382696105,kWh,4.7256918971551196e-05,5.212403879866261e-06,3.126093915014515e-05,8.37302620015626e-05,tokens/kWh,752416.1335936615,,s,630,16.29156422042846,0.02585962574671185,0.0004566980213121472,0.025774607658386232,0.0261959716796875,0.026659265995025633,0.02752619281768799,"[0.026744224548339843, 0.02596873664855957, 0.025726848602294922, 0.026009952545166016, 0.02574569511413574, 0.025534624099731444, 0.02560598373413086, 0.025757120132446288, 0.025510175704956055, 0.025463071823120118, 0.02564672088623047, 0.025788288116455078, 0.025522560119628907, 0.025608320236206055, 0.025994720458984374, 0.026477088928222658, 0.02574131202697754, 0.025632768630981444, 0.025585664749145507, 0.025585664749145507, 0.02551398468017578, 0.025517568588256836, 0.02556979179382324, 0.025710079193115236, 0.025632415771484375, 0.02564796829223633, 0.02557142448425293, 0.025542560577392577, 0.025600000381469725, 0.0256463680267334, 0.02559868812561035, 0.025544704437255858, 0.025571584701538086, 0.02607030487060547, 0.02573311996459961, 0.02564963150024414, 0.025591808319091795, 0.02765228843688965, 0.026748159408569335, 0.02551251220703125, 0.0255133113861084, 0.025859935760498047, 0.025550752639770507, 0.025960639953613283, 0.025488096237182616, 0.02552627182006836, 0.02548121643066406, 0.025651199340820312, 0.025786367416381836, 0.026209760665893554, 0.02556572723388672, 0.026042367935180662, 0.025686016082763673, 0.02570649528503418, 0.0269434871673584, 0.025632768630981444, 0.025665536880493164, 0.025619935989379884, 0.025850400924682618, 0.025909120559692383, 0.02586432075500488, 0.025999359130859375, 0.026007551193237305, 0.02675632095336914, 0.025897760391235352, 0.025966688156127928, 0.025991071701049806, 0.025776128768920898, 0.025931776046752928, 0.025860095977783205, 0.029036640167236328, 0.025991071701049806, 0.02585785675048828, 0.025880767822265626, 0.02576348876953125, 0.02688444709777832, 0.02589619255065918, 0.02586844825744629, 0.025803359985351562, 0.02572902488708496, 0.02586419105529785, 0.025890815734863282, 0.02581849670410156, 0.025748096466064452, 0.02573107147216797, 0.027693056106567384, 0.026058271408081056, 0.02611452865600586, 0.02587612724304199, 0.02602572822570801, 0.025786975860595703, 0.025938976287841798, 0.0257873592376709, 0.02634060859680176, 0.02574617576599121, 0.026621952056884765, 0.026941343307495116, 0.026806207656860353, 0.025698463439941407, 0.025945600509643556, 0.027535871505737306, 0.025845535278320314, 0.0255817928314209, 0.025636863708496094, 0.025636863708496094, 0.025591808319091795, 0.02560972785949707, 0.025877248764038085, 0.026833824157714844, 0.02576265525817871, 0.025804800033569338, 0.025759807586669924, 0.025744800567626954, 0.026007871627807617, 0.02638627243041992, 0.025799039840698243, 0.025796607971191408, 0.025548799514770508, 0.02549964714050293, 0.025587167739868164, 0.025659936904907227, 0.0256180477142334, 0.025607648849487304, 0.025468032836914064, 0.025974559783935546, 0.02571059226989746, 0.026675296783447267, 0.025617759704589845, 0.02555766487121582, 0.026050495147705077, 0.025468576431274415, 0.025475584030151367, 0.025497503280639648, 0.025827327728271485, 0.02586787223815918, 0.025745536804199217, 0.025542943954467774, 0.02549964714050293, 0.02549283218383789, 0.02553104019165039, 0.025624576568603515, 0.025827327728271485, 0.025976831436157227, 0.02574131202697754, 0.025636863708496094, 0.02556723213195801, 0.02575564765930176, 0.025789920806884765, 0.025757568359375, 0.025746015548706053, 0.025831071853637696, 0.02577974319458008, 0.02582134437561035, 0.026183679580688478, 0.02593152046203613, 0.02584060859680176, 0.02595840072631836, 0.025831424713134765, 0.026583040237426758, 0.026216447830200194, 0.026195968627929687, 0.025874431610107423, 0.025894752502441408, 0.025952415466308595, 0.025804607391357422, 0.025782463073730468, 0.025771711349487306, 0.025802047729492187, 0.02572313690185547, 0.025764608383178712, 0.025819135665893556, 0.025763103485107422, 0.025827392578125, 0.027091552734375, 0.025913408279418945, 0.02596793556213379, 0.025948863983154297, 0.026011520385742188, 0.025831424713134765, 0.02586595153808594, 0.025825216293334962, 0.025768415451049805, 0.02569215965270996, 0.025768159866333008, 0.025650976181030273, 0.025647104263305662, 0.02550275230407715, 0.02560099220275879, 0.025803007125854493, 0.026675424575805663, 0.02570230484008789, 0.02555084800720215, 0.025820703506469728, 0.02619599914550781, 0.02627014350891113, 0.025677824020385744, 0.02547711944580078, 0.0254749755859375, 0.025591808319091795, 0.0255263671875, 0.025510976791381836, 0.02574006462097168, 0.02581315231323242, 0.02572287940979004, 0.025665536880493164, 0.025643007278442383, 0.029306880950927733, 0.025525663375854494, 0.025510496139526367, 0.025554943084716796, 0.025790464401245116, 0.025585664749145507, 0.025517248153686525, 0.0260467529296875, 0.02560185623168945, 0.02565603256225586, 0.025571327209472656, 0.02557139205932617, 0.025604032516479493, 0.025823200225830078, 0.025704000473022463, 0.02562505531311035, 0.025665536880493164, 0.025625823974609375, 0.025603103637695312, 0.025499391555786132, 0.02555904006958008, 0.025607776641845704, 0.025482784271240233, 0.025482112884521485, 0.025432064056396485, 0.025647104263305662, 0.02548940849304199, 0.026109407424926758, 0.025445152282714843, 0.025519071578979494, 0.025438880920410155, 0.025622655868530273, 0.025785472869873045, 0.025774911880493166, 0.025941856384277345, 0.025927743911743163, 0.025770143508911134, 0.02558140754699707, 0.025710752487182617, 0.026447296142578125, 0.02569468879699707, 0.025699840545654298, 0.025538816452026367, 0.02578816032409668, 0.025622400283813476, 0.025713375091552734, 0.02698588752746582, 0.026167903900146484, 0.026636287689208983, 0.025868288040161135, 0.02591119956970215, 0.025987167358398438, 0.026117151260375976, 0.026022815704345705, 0.026046527862548827, 0.0261214714050293, 0.025932544708251952, 0.025740928649902343, 0.025729248046875, 0.025682079315185548, 0.025650432586669922, 0.025843456268310548, 0.025764671325683594, 0.025786752700805663, 0.025853759765625, 0.02574950408935547, 0.025627904891967774, 0.02554547119140625, 0.025995264053344725, 0.02571059226989746, 0.025675775527954102, 0.025649152755737304, 0.02608332824707031, 0.025763776779174803, 0.026216032028198243, 0.025670047760009765, 0.025710079193115236, 0.02579104042053223, 0.025862144470214843, 0.025767648696899414, 0.025917280197143556, 0.025742816925048827, 0.02573388862609863, 0.025700576782226564, 0.02550169563293457, 0.02556662368774414, 0.025692768096923828, 0.0262674560546875, 0.02555523109436035, 0.025542560577392577, 0.0255468807220459, 0.02549337577819824, 0.025406623840332033, 0.025485536575317384, 0.025394847869873047, 0.0254169921875, 0.025452096939086913, 0.02548080062866211, 0.025890560150146485, 0.02551683235168457, 0.0257126407623291, 0.025622112274169922, 0.025547168731689454, 0.025938016891479492, 0.025513887405395508, 0.025605567932128905, 0.0258374080657959, 0.026000223159790038, 0.02575961685180664, 0.02687443161010742, 0.025900127410888672, 0.025870399475097658, 0.025690975189208983, 0.026023935317993165, 0.025891839981079103, 0.026070016860961914, 0.02581907272338867, 0.025625791549682617, 0.025560127258300782, 0.025720703125, 0.025751583099365233, 0.025818336486816407, 0.025851743698120117, 0.025760000228881835, 0.02608367919921875, 0.025685312271118164, 0.0255927677154541, 0.02584752082824707, 0.025958080291748047, 0.025930335998535156, 0.031635040283203124, 0.028150144577026366, 0.02625334358215332, 0.025937471389770508, 0.025698816299438477, 0.025900672912597657, 0.025801023483276366, 0.025960447311401368, 0.025960447311401368, 0.026154144287109375, 0.026515775680541993, 0.025969184875488282, 0.026003551483154298, 0.025849632263183594, 0.0258602237701416, 0.025833471298217774, 0.026484735488891603, 0.025689952850341796, 0.025806880950927733, 0.025847936630249025, 0.02591744041442871, 0.025720640182495116, 0.02584105682373047, 0.025647903442382814, 0.025628671646118165, 0.02573311996459961, 0.025786367416381836, 0.025774080276489256, 0.02560588836669922, 0.025604352951049805, 0.02556048011779785, 0.02551055908203125, 0.02581088066101074, 0.025638912200927736, 0.025527807235717775, 0.025477312088012696, 0.026387903213500978, 0.02674163246154785, 0.025788415908813478, 0.026730495452880858, 0.02573516845703125, 0.02569011116027832, 0.026670368194580078, 0.025787168502807618, 0.02664569664001465, 0.02750249671936035, 0.025998239517211915, 0.025949792861938478, 0.025923776626586913, 0.025614559173583986, 0.02573516845703125, 0.025667200088500975, 0.025636768341064452, 0.025971168518066405, 0.025562528610229493, 0.025606143951416017, 0.025598560333251953, 0.02707449531555176, 0.027121055603027345, 0.025823904037475587, 0.025738271713256836, 0.02622729682922363, 0.02566739273071289, 0.026284799575805665, 0.025745216369628905, 0.025544416427612304, 0.02558185577392578, 0.025524192810058594, 0.025606176376342774, 0.026071039199829102, 0.025610240936279297, 0.025597471237182617, 0.025492992401123047, 0.025504735946655272, 0.025470912933349608, 0.025793695449829103, 0.026426271438598634, 0.026057727813720705, 0.0259102725982666, 0.026052608489990234, 0.026000415802001953, 0.025848896026611327, 0.025768896102905274, 0.02574198341369629, 0.025816640853881835, 0.026141183853149414, 0.026093727111816407, 0.026036319732666017, 0.025830400466918944, 0.025710912704467775, 0.02583126449584961, 0.02586662483215332, 0.02609987258911133, 0.025834815979003906, 0.025931999206542967, 0.025901792526245117, 0.02601785659790039, 0.026093568801879883, 0.025991167068481445, 0.02615609550476074, 0.025844671249389647, 0.025812416076660155, 0.025596351623535157, 0.02562409591674805, 0.025950815200805662, 0.02672640037536621, 0.025964256286621093, 0.025835647583007812, 0.0257959041595459, 0.025561567306518554, 0.025516416549682615, 0.025785919189453124, 0.026054176330566406, 0.02590812873840332, 0.025806848526000976, 0.025657344818115234, 0.02569830322265625, 0.025550495147705077, 0.02589936065673828, 0.026537919998168947, 0.026320383071899413, 0.02595692825317383, 0.02573833656311035, 0.0258035831451416, 0.026002719879150392, 0.025835744857788084, 0.025725120544433593, 0.026, 0.025759519577026366, 0.025829376220703124, 0.025804800033569338, 0.02576166343688965, 0.025991296768188475, 0.02595430374145508, 0.02565849685668945, 0.026399072647094728, 0.02571232032775879, 0.02563580894470215, 0.025541696548461914, 0.02556755256652832, 0.025450271606445314, 0.025428287506103514, 0.025481088638305664, 0.025497503280639648, 0.02579315185546875, 0.025606143951416017, 0.025579519271850586, 0.02547439956665039, 0.02551055908203125, 0.02553241539001465, 0.02553856086730957, 0.025620479583740235, 0.025503744125366212, 0.025443328857421874, 0.02551091194152832, 0.025503744125366212, 0.02548940849304199, 0.025529727935791016, 0.025561151504516603, 0.025579647064208986, 0.02553286361694336, 0.02553241539001465, 0.02582476806640625, 0.02548921585083008, 0.0254901123046875, 0.025470943450927735, 0.02538857650756836, 0.025381439208984374, 0.02674355125427246, 0.025636735916137694, 0.025575551986694336, 0.025593856811523437, 0.025575328826904296, 0.02555708885192871, 0.025491455078125, 0.025810943603515626, 0.02586198425292969, 0.02592076873779297, 0.026114816665649413, 0.02595414352416992, 0.026308927536010742, 0.026171072006225586, 0.0259172477722168, 0.02576639938354492, 0.02572492790222168, 0.025650880813598634, 0.0255614070892334, 0.025624576568603515, 0.02586419105529785, 0.025630720138549806, 0.025792512893676758, 0.02588870429992676, 0.025808639526367187, 0.025707967758178712, 0.026434431076049803, 0.02662326431274414, 0.02583830451965332, 0.0257392635345459, 0.02593382453918457, 0.02602569580078125, 0.026104352951049806, 0.026041343688964845, 0.025914112091064454, 0.02585100746154785, 0.025807008743286133, 0.02591315269470215, 0.02601215934753418, 0.025801120758056642, 0.025851072311401366, 0.025850624084472657, 0.02582124710083008, 0.02593769645690918, 0.025774303436279296, 0.02591279983520508, 0.025717279434204102, 0.02584137535095215, 0.025913631439208985, 0.025814464569091797, 0.026038848876953125, 0.026109952926635743, 0.026030080795288086, 0.02614080047607422, 0.026019712448120118, 0.025961599349975585, 0.025887584686279295, 0.025894432067871093, 0.02583302307128906, 0.02590188789367676, 0.025704479217529295, 0.025702495574951172, 0.02575276756286621, 0.026739967346191405, 0.025748224258422853, 0.025755199432373047, 0.025555391311645508, 0.02574336051940918, 0.025757696151733397, 0.025608192443847655, 0.025671775817871095, 0.02593577575683594, 0.026128095626831056, 0.025714080810546876, 0.025553792953491212, 0.025844863891601563, 0.025727872848510743, 0.02567353630065918, 0.025495744705200194, 0.026204160690307617, 0.025525344848632812, 0.025529247283935547, 0.025486783981323244, 0.02558527946472168, 0.02571718406677246, 0.02566371154785156, 0.025949951171875, 0.026730335235595704, 0.026085920333862304, 0.02585206413269043, 0.025833471298217774, 0.026068992614746093, 0.02592563247680664, 0.025859167098999023, 0.025750431060791015, 0.025777376174926758, 0.025770784378051758, 0.02574336051940918, 0.025593856811523437, 0.025814847946166994, 0.025876672744750976, 0.02571673583984375, 0.025677824020385744, 0.02566147232055664, 0.02578838348388672, 0.02566067123413086, 0.02564556884765625, 0.025568639755249024, 0.025575807571411133, 0.02577987289428711, 0.026214271545410155, 0.026106176376342775, 0.026067615509033203, 0.025942111968994142, 0.027023263931274414, 0.02721129608154297, 0.025917919158935546, 0.02597430419921875, 0.026014175415039063, 0.026218496322631835, 0.025804319381713868, 0.026190303802490236, 0.025964000701904296, 0.026019807815551757, 0.02570707130432129, 0.026619455337524415]",tokens/s,38.67031989537412,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpfmu1xx38/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 790, in __init__ self.model = RecurrentGemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 660, in __init__ [RecurrentGemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 660, in [RecurrentGemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 490, in __init__ self.mlp_block = RecurrentGemmaMlp(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 472, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 453072 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1001, in __init__ self.model = GemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 780, in __init__ [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 780, in [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 571, in __init__ self.mlp = GemmaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 167, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 443619 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpxgpp2dp1/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1005.346816,7163.74016,0.0,6761.218048,6730.975744,s,1,7.86497509765625,7.86497509765625,0.0,7.86497509765625,7.86497509765625,7.86497509765625,7.86497509765625,[7.86497509765625],,kWh,7.710496879190032e-06,8.386465843687782e-07,4.289170097981909e-06,1.283831356154072e-05,,MB,1519.947776,7203.586048,0.0,6784.28672,5879.090688,s,10,3.876493774414062,0.38764937744140626,0.0025442042488361214,0.3879522705078125,0.39042902526855466,0.3912836654663086,0.3919673776245117,"[0.38702328491210936, 0.38394540405273436, 0.3890393371582031, 0.3921383056640625, 0.38722207641601564, 0.38483621215820313, 0.38868246459960937, 0.39023910522460936, 0.3890076599121094, 0.38435992431640625]",tokens/s,660.3905872096873,kWh,1.1233471729490074e-05,1.2382290887882134e-06,5.224811872154161e-06,1.7696512690432448e-05,tokens/kWh,14466126.99791442,MB,1564.83584,7205.6832,0.0,6786.383872,5879.093248,s,10,25.2996044921875,2.52996044921875,0.01227767926885505,2.5250467529296876,2.539435205078125,2.5509923828125,2.5602381249999997,"[2.52127392578125, 2.51908837890625, 2.53352978515625, 2.520111328125, 2.5236806640625, 2.525563720703125, 2.562549560546875, 2.536866943359375, 2.52452978515625, 2.532410400390625]",tokens/s,24.901575050097858,kWh,7.254057630008996e-05,8.001958523627824e-06,4.8103869679646654e-05,0.00012864640450336443,tokens/kWh,489714.4249247354,,s,630,25.2978847808838,0.04015537266806951,0.0005011412613278848,0.04008342552185058,0.040663134384155274,0.040982735824584955,0.04181701541900635,"[0.04016892623901367, 0.039747486114501955, 0.039844478607177734, 0.040140033721923825, 0.04031907272338867, 0.040229534149169924, 0.040185855865478515, 0.040060352325439454, 0.04006256103515625, 0.04005756759643555, 0.03979289627075195, 0.039806976318359374, 0.0399441909790039, 0.03989913558959961, 0.03991312026977539, 0.040104286193847656, 0.03992166519165039, 0.03988592147827148, 0.04008639907836914, 0.04011420822143555, 0.040126399993896486, 0.040422752380371095, 0.04039344024658203, 0.03984998321533203, 0.039798782348632815, 0.03978035354614258, 0.03980492782592773, 0.03986022567749024, 0.04012236785888672, 0.04018780899047852, 0.039966560363769534, 0.03987212753295898, 0.03977401733398438, 0.039544704437255856, 0.03958780670166016, 0.040198944091796876, 0.040476863861083984, 0.040339488983154294, 0.039974143981933594, 0.03977081680297852, 0.03974671936035156, 0.03959078216552735, 0.04063641738891602, 0.040697376251220704, 0.04005526351928711, 0.04029353713989258, 0.039836158752441404, 0.039900638580322265, 0.03979257583618164, 0.04036870574951172, 0.040226558685302734, 0.04006361770629883, 0.04000979232788086, 0.040230846405029295, 0.03966086578369141, 0.03969523239135742, 0.039761726379394534, 0.040420513153076175, 0.04032803344726563, 0.039895103454589846, 0.03984489440917969, 0.03997174453735351, 0.03993132781982422, 0.04023196792602539, 0.03970966339111328, 0.039575584411621095, 0.039718879699707034, 0.040531967163085936, 0.04046950531005859, 0.04006604766845703, 0.03997488021850586, 0.040249374389648436, 0.04017951965332031, 0.03998291015625, 0.03971315383911133, 0.03987046432495117, 0.040030208587646485, 0.0400445442199707, 0.039833534240722654, 0.03999545669555664, 0.04013260650634766, 0.040032257080078126, 0.03996057510375976, 0.039929279327392576, 0.04021865463256836, 0.039940704345703126, 0.03995577621459961, 0.040086143493652346, 0.04012825775146484, 0.040583358764648435, 0.040296512603759764, 0.04023910522460938, 0.04033536148071289, 0.04030607986450195, 0.039998367309570314, 0.0398988151550293, 0.04040499114990234, 0.040153087615966795, 0.04001136016845703, 0.03993747329711914, 0.04014179229736328, 0.03991142272949219, 0.04005196762084961, 0.04016559982299805, 0.040280609130859374, 0.04045167922973633, 0.04044022369384766, 0.04117241668701172, 0.03987305450439453, 0.03970572662353516, 0.03967398452758789, 0.0395906867980957, 0.03959177780151367, 0.03958595275878906, 0.03958339309692383, 0.03949417495727539, 0.03951504135131836, 0.039488414764404296, 0.03962265777587891, 0.03953078460693359, 0.039452224731445315, 0.039968574523925784, 0.04000579071044922, 0.039753150939941403, 0.03959475326538086, 0.0395489273071289, 0.04022342300415039, 0.039825408935546876, 0.039972862243652346, 0.04068758392333984, 0.04024732971191406, 0.0401324462890625, 0.04000950241088867, 0.04020172882080078, 0.039885345458984374, 0.03954044723510742, 0.03946662521362305, 0.0396627197265625, 0.03950707244873047, 0.039702625274658204, 0.039586177825927736, 0.039951969146728515, 0.04028076934814453, 0.04115014266967774, 0.04041878509521484, 0.0403647346496582, 0.0411874885559082, 0.040632320404052735, 0.040638496398925784, 0.04021593475341797, 0.04011609649658203, 0.040067745208740235, 0.040019615173339844, 0.04011459350585937, 0.04045619201660156, 0.04017372894287109, 0.040083297729492186, 0.03995974349975586, 0.04043449783325195, 0.04061721420288086, 0.040319904327392575, 0.0401192626953125, 0.04051238250732422, 0.039985088348388674, 0.04120991897583008, 0.040159488677978514, 0.03990502548217773, 0.04003782272338867, 0.04020636749267578, 0.0399664306640625, 0.039908161163330076, 0.03995977783203125, 0.042611328125, 0.041500511169433596, 0.04043708801269531, 0.04020323181152344, 0.03999129486083984, 0.03986182403564453, 0.039635391235351564, 0.039501823425292966, 0.040253662109375, 0.04267967987060547, 0.0400654411315918, 0.04004217529296875, 0.03978681564331055, 0.03959603118896484, 0.039880702972412106, 0.03969555282592773, 0.039775039672851564, 0.040280094146728514, 0.040023551940917966, 0.0398191032409668, 0.03966428756713867, 0.04257984161376953, 0.03981119918823242, 0.03955507278442383, 0.03953049468994141, 0.0395335693359375, 0.03946393585205078, 0.039485439300537106, 0.04040499114990234, 0.04031615829467773, 0.040563457489013674, 0.04054204940795898, 0.04025312042236328, 0.04010646438598633, 0.04005238342285156, 0.03988441467285156, 0.039949024200439456, 0.040081153869628905, 0.040083553314208986, 0.039862430572509766, 0.040753311157226565, 0.04036374282836914, 0.04037545776367187, 0.04033225631713867, 0.04016332626342774, 0.04016128158569336, 0.04044537734985352, 0.04019462585449219, 0.04022428894042969, 0.04013308715820312, 0.039939231872558593, 0.039897953033447266, 0.03985203170776367, 0.039929855346679685, 0.039800830841064457, 0.040591358184814456, 0.03998720169067383, 0.03985612869262695, 0.03960198211669922, 0.03969801712036133, 0.03984239959716797, 0.03965449523925781, 0.040145824432373044, 0.04006707382202149, 0.04032921600341797, 0.04080009460449219, 0.04003036880493164, 0.03977987289428711, 0.039739681243896485, 0.03967136001586914, 0.03962944030761719, 0.03972710418701172, 0.03950102233886719, 0.039527198791503904, 0.03952012634277344, 0.03951814270019531, 0.03962694549560547, 0.03957350540161133, 0.03964899063110352, 0.039473438262939455, 0.040417152404785155, 0.04004207992553711, 0.040476993560791014, 0.04008723068237305, 0.039967262268066406, 0.039832992553710936, 0.039617118835449217, 0.03970579147338867, 0.039453502655029296, 0.039636993408203126, 0.03960422515869141, 0.03993167877197266, 0.04014473724365234, 0.03977603149414063, 0.0396580810546875, 0.040871200561523435, 0.04073289489746094, 0.0402861442565918, 0.04015980911254883, 0.04039884948730469, 0.040648704528808595, 0.040304641723632816, 0.03998515319824219, 0.04011529541015625, 0.04035712051391602, 0.04000937652587891, 0.04020374298095703, 0.03998275375366211, 0.04077801513671875, 0.040185504913330075, 0.04012492752075195, 0.040237503051757814, 0.04024115371704102, 0.040243198394775394, 0.040183712005615234, 0.0403743667602539, 0.040072479248046876, 0.04005551910400391, 0.040081375122070315, 0.03981929779052734, 0.040662017822265625, 0.039943168640136716, 0.03996790313720703, 0.039930721282958985, 0.03983087921142578, 0.03983388900756836, 0.03978483200073242, 0.039634944915771485, 0.04009369659423828, 0.04021996688842774, 0.03990393447875976, 0.039976158142089845, 0.03973564910888672, 0.03998495864868164, 0.04052851104736328, 0.040279422760009766, 0.04019081497192383, 0.03988809585571289, 0.0398485107421875, 0.03963904190063477, 0.03980192184448242, 0.03958265686035156, 0.03945676803588867, 0.0402823371887207, 0.04011788940429688, 0.03967580795288086, 0.0409615364074707, 0.039488544464111326, 0.03947423934936523, 0.039531391143798826, 0.03997500610351563, 0.039894943237304685, 0.04019200134277344, 0.0399027214050293, 0.040083744049072265, 0.039748062133789064, 0.03949951934814453, 0.03984384155273438, 0.03978617477416992, 0.04002028656005859, 0.039814369201660156, 0.040210208892822265, 0.040414207458496096, 0.04029644775390625, 0.04006092834472656, 0.040134654998779294, 0.03991961669921875, 0.03991756820678711, 0.03973734283447266, 0.03980287933349609, 0.0397127685546875, 0.04132454299926758, 0.04004044723510742, 0.04000342559814453, 0.04050960159301758, 0.04072652816772461, 0.04059545516967773, 0.040406272888183596, 0.040067840576171875, 0.04017766571044922, 0.039880352020263674, 0.03972668838500976, 0.03970755386352539, 0.03965507125854492, 0.039663806915283206, 0.03993423843383789, 0.04063817596435547, 0.04001116943359375, 0.03970655822753906, 0.03970934295654297, 0.040050048828125, 0.04110172653198242, 0.040482177734375, 0.040514400482177734, 0.04024099349975586, 0.04035583877563476, 0.04029612731933594, 0.04029487991333008, 0.04038387298583984, 0.03998976135253906, 0.04016527938842773, 0.04017513656616211, 0.040112831115722655, 0.04022886276245117, 0.0399598388671875, 0.04005551910400391, 0.04056614303588867, 0.039936641693115234, 0.039653377532958986, 0.03991532897949219, 0.039919296264648435, 0.04296489715576172, 0.040294975280761716, 0.0403210563659668, 0.04039884948730469, 0.040153087615966795, 0.04053567886352539, 0.04090099334716797, 0.040581119537353515, 0.040453983306884767, 0.040422943115234374, 0.04017145538330078, 0.040377025604248044, 0.04021452713012695, 0.04031692886352539, 0.04013804626464844, 0.040301055908203126, 0.04022889709472656, 0.04376591873168945, 0.04101516723632812, 0.04151631927490235, 0.04074991989135742, 0.04021379089355469, 0.0404835205078125, 0.040123519897460935, 0.040157505035400394, 0.04059590530395508, 0.040435104370117186, 0.040807167053222654, 0.04058009719848633, 0.040505599975585935, 0.04059212875366211, 0.043653057098388674, 0.04140652847290039, 0.040787967681884765, 0.04048230361938476, 0.040349342346191405, 0.040655902862548825, 0.040742271423339846, 0.04096249771118164, 0.04115456008911133, 0.040836544036865235, 0.04051001739501953, 0.040262687683105466, 0.04029539108276367, 0.04065894317626953, 0.040820735931396485, 0.040770751953125, 0.04128646469116211, 0.040776927947998046, 0.04081948852539063, 0.040385696411132814, 0.0404222412109375, 0.04065894317626953, 0.04110905456542969, 0.04051017761230469, 0.040510814666748045, 0.04053596878051758, 0.04067318344116211, 0.041019264221191405, 0.04110812759399414, 0.04039606475830078, 0.040436161041259765, 0.040266014099121096, 0.04038451385498047, 0.04160665512084961, 0.04063846588134765, 0.0412902717590332, 0.04039436721801758, 0.04039443206787109, 0.04044812774658203, 0.04023555374145508, 0.04085145568847656, 0.04106649780273437, 0.04079596710205078, 0.040415264129638674, 0.04024745559692383, 0.03996377563476562, 0.04017619323730469, 0.04011040115356445, 0.04047043228149414, 0.04039075088500976, 0.040269920349121094, 0.041024478912353515, 0.04038137435913086, 0.04018953704833984, 0.0403779182434082, 0.04005769729614258, 0.04002816009521484, 0.04005174255371094, 0.0400923843383789, 0.03994854354858399, 0.040235008239746094, 0.04085964965820312, 0.04001180648803711, 0.039909343719482425, 0.04016128158569336, 0.040630271911621094, 0.04016742324829101, 0.04003446578979492, 0.04020649719238281, 0.04003398513793945, 0.039892833709716795, 0.03956342315673828, 0.03966566467285156, 0.0399600944519043, 0.04050991821289063, 0.039986942291259764, 0.03986867141723633, 0.040308734893798825, 0.04070809555053711, 0.040310657501220704, 0.04003033447265625, 0.040019519805908205, 0.04002633666992188, 0.040115806579589845, 0.03980252838134766, 0.03958678436279297, 0.039734783172607424, 0.03965292739868164, 0.03961686325073242, 0.039561824798583986, 0.040288223266601565, 0.03995238494873047, 0.039747486114501955, 0.039630912780761716, 0.039559200286865236, 0.040005664825439456, 0.040263294219970706, 0.04016124725341797, 0.04002256011962891, 0.04043254470825195, 0.03998099136352539, 0.03985919952392578, 0.039725055694580076, 0.039731201171875, 0.040499198913574216, 0.04015411376953125, 0.03987968063354492, 0.04101232147216797, 0.040280609130859374, 0.04039923095703125, 0.04059126281738281, 0.03989513778686524, 0.03977830505371094, 0.040422977447509764, 0.040098239898681644, 0.0404213752746582, 0.04005062484741211, 0.040034366607666017, 0.04095795059204101, 0.04034560012817383, 0.040277759552001954, 0.04009353637695313, 0.03976367950439453, 0.04026028823852539, 0.04010985565185547, 0.03996684646606445, 0.03987260818481445, 0.04040473556518555, 0.04101145553588867, 0.040482017517089845, 0.04057097625732422, 0.04026643371582031, 0.04051763153076172, 0.04014665603637695, 0.04056256103515625, 0.03995820617675781, 0.03995225524902344, 0.03964912033081055, 0.039965694427490234, 0.04003635025024414, 0.03995657730102539, 0.03991305541992188, 0.03977248001098633, 0.039615550994873044, 0.039498401641845704, 0.03946115112304688, 0.03935846328735351, 0.03941712188720703, 0.040884960174560545, 0.03959318542480469, 0.039523105621337894, 0.03964108657836914, 0.0396874885559082, 0.04127129745483398, 0.0399439697265625, 0.039554336547851565, 0.03959904098510742, 0.03965856170654297, 0.040206783294677736, 0.03977676773071289, 0.039718433380126955, 0.03948944091796875, 0.03960684967041016, 0.0399441909790039, 0.039880702972412106, 0.04008515167236328, 0.03982985687255859, 0.03971299362182617, 0.04012419128417969, 0.04022224044799805, 0.03978492736816406, 0.039651329040527344, 0.03957494354248047, 0.039440990447998044, 0.04016128158569336, 0.039923713684082034, 0.039616512298583983, 0.03975167846679688, 0.04024662399291992, 0.03999811172485351, 0.040695968627929686, 0.04179740905761719, 0.04024739074707031, 0.04011008071899414, 0.03983494567871094, 0.03993360137939453, 0.03998003387451172, 0.03997459030151367, 0.040580673217773436, 0.04182502365112305, 0.04064460754394531, 0.04047372817993164, 0.040074367523193356, 0.040107776641845706, 0.040767070770263675, 0.04037673568725586, 0.04045209503173828, 0.04065420913696289, 0.04017561721801758, 0.04013865661621094, 0.04024409484863281, 0.03995820617675781, 0.04011433410644531, 0.04061727905273437, 0.04085420989990234, 0.04091289520263672, 0.04163379287719727, 0.040516929626464845, 0.040999294281005856, 0.04038278579711914, 0.04029030227661133, 0.040136703491210936, 0.03995033645629883, 0.040062976837158204, 0.040002880096435545, 0.03994281768798828]",tokens/s,24.903267820875524,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,939.917312,6540.886016,0.0,6138.363904,6060.931072,s,1,7.30018359375,7.30018359375,0.0,7.30018359375,7.30018359375,7.30018359375,7.30018359375,[7.30018359375],,kWh,5.116110066713494e-06,5.52479767289311e-07,2.2916684999929604e-06,7.960258333995765e-06,,MB,1369.956352,6566.05184,0.0,6150.946816,5419.87328,s,10,0.5800214500427247,0.05800214500427246,0.0015530498194660032,0.05776014518737793,0.05968034591674805,0.06060753364562988,0.061349283828735356,"[0.06153472137451172, 0.0580206413269043, 0.05947430419921875, 0.05669545745849609, 0.05765750503540039, 0.0568917121887207, 0.0564051513671875, 0.05637833786010742, 0.059100833892822266, 0.05786278533935547]",tokens/s,4413.629874914849,kWh,1.8691675327764137e-06,2.0613592537806654e-07,1.2378619244712912e-06,3.3131653826257717e-06,tokens/kWh,77267498.12806301,MB,1413.292032,6566.05184,0.0,6150.946816,5419.87584,s,10,15.551220092773438,1.5551220092773437,0.0037902216950123726,1.5546094970703126,1.5595710571289063,1.5611954406738282,1.5624949475097658,"[1.5521331787109376, 1.5505362548828125, 1.5542613525390625, 1.5508994140625, 1.5592100830078126, 1.56281982421875, 1.558548095703125, 1.5524212646484374, 1.5554329833984375, 1.5549576416015625]",tokens/s,40.51129083387852,kWh,4.5470545085558386e-05,5.0150591624497046e-06,3.0226496580330043e-05,8.071210082833815e-05,tokens/kWh,780552.1025154706,,s,630,15.54906138801576,0.024681049822247222,0.0005081224453642924,0.02454849624633789,0.024837552070617677,0.025444534969329836,0.027326687488555913,"[0.025975103378295897, 0.02532748794555664, 0.02494476890563965, 0.024754112243652343, 0.024557632446289064, 0.02444697570800781, 0.024401920318603516, 0.024429759979248046, 0.024431264877319336, 0.024403776168823242, 0.024394079208374022, 0.024582143783569335, 0.02450227165222168, 0.024528255462646486, 0.025459327697753907, 0.024389631271362306, 0.024414207458496092, 0.024409088134765625, 0.02441046333312988, 0.02435958480834961, 0.024546655654907226, 0.024496383666992187, 0.024445215225219728, 0.02456105613708496, 0.02447590446472168, 0.02469900894165039, 0.024534719467163086, 0.024782976150512694, 0.024570207595825195, 0.02453113555908203, 0.02485180854797363, 0.02449065589904785, 0.024721408843994142, 0.024451135635375976, 0.024686527252197266, 0.02457980728149414, 0.02463158416748047, 0.024662015914916992, 0.024780799865722656, 0.02482585525512695, 0.024780799865722656, 0.02466815948486328, 0.024666112899780275, 0.024764415740966796, 0.02470230484008789, 0.02467452812194824, 0.024611263275146483, 0.024625152587890626, 0.024637184143066405, 0.02461926460266113, 0.0246060791015625, 0.024659839630126953, 0.024703744888305665, 0.024608768463134766, 0.024795040130615235, 0.02454537582397461, 0.024544672012329103, 0.02461087989807129, 0.024582687377929686, 0.02451852798461914, 0.02447987174987793, 0.024573951721191405, 0.0245166072845459, 0.027373567581176757, 0.026176767349243166, 0.02529497528076172, 0.024896127700805664, 0.024656160354614258, 0.024479616165161134, 0.02443782424926758, 0.02447439956665039, 0.02444697570800781, 0.024465408325195313, 0.02452003288269043, 0.024470176696777344, 0.02452070426940918, 0.02449203109741211, 0.024479808807373046, 0.024482784271240236, 0.024631839752197265, 0.02475174331665039, 0.024641792297363282, 0.02449260711669922, 0.024424448013305664, 0.024477632522583007, 0.02453715133666992, 0.02445270347595215, 0.02451888084411621, 0.02472159957885742, 0.024639488220214844, 0.024598112106323244, 0.02448569679260254, 0.0244168643951416, 0.02451593589782715, 0.024461984634399414, 0.024501344680786134, 0.02450320053100586, 0.024438623428344727, 0.024520063400268556, 0.024445728302001955, 0.024436735153198243, 0.024440832138061523, 0.024415456771850585, 0.024543712615966797, 0.02456515121459961, 0.024580032348632812, 0.024478591918945313, 0.02455526351928711, 0.024586591720581055, 0.024563488006591798, 0.02451043128967285, 0.02448204803466797, 0.024557567596435546, 0.024507904052734376, 0.02442860794067383, 0.024457664489746095, 0.02446335983276367, 0.024588287353515623, 0.024612096786499022, 0.024594944000244142, 0.024527103424072265, 0.024556640625, 0.024572032928466797, 0.024547584533691408, 0.024457759857177734, 0.024460416793823242, 0.027845439910888673, 0.02633888053894043, 0.02544588851928711, 0.024920864105224608, 0.02471897506713867, 0.024512191772460938, 0.02502332878112793, 0.02437868881225586, 0.024541887283325195, 0.02448793601989746, 0.02446950340270996, 0.024590015411376953, 0.024516927719116212, 0.02457187271118164, 0.024522239685058594, 0.024545888900756836, 0.024549312591552734, 0.024668256759643556, 0.024602527618408202, 0.0244715518951416, 0.024413951873779295, 0.024430496215820312, 0.024478048324584962, 0.024436895370483398, 0.024440671920776365, 0.024436735153198243, 0.024556768417358397, 0.024504480361938478, 0.02445756721496582, 0.024457088470458986, 0.024451488494873046, 0.024483135223388672, 0.024449728012084962, 0.024454496383666993, 0.024613536834716798, 0.024537055969238282, 0.02467228889465332, 0.024625152587890626, 0.024758079528808593, 0.02464787292480469, 0.024696735382080077, 0.024883295059204103, 0.024782304763793946, 0.02473423957824707, 0.02466815948486328, 0.02468841552734375, 0.024692960739135742, 0.024622655868530272, 0.024514720916748046, 0.024448320388793944, 0.0245316162109375, 0.024486175537109377, 0.024514591217041016, 0.024489088058471678, 0.024590463638305665, 0.02454105567932129, 0.0245361270904541, 0.024598079681396483, 0.024531200408935548, 0.02461008071899414, 0.024719200134277343, 0.024556640625, 0.024569631576538086, 0.02699251174926758, 0.026167999267578124, 0.025269535064697264, 0.02480518341064453, 0.02455235290527344, 0.024496000289916994, 0.024449151992797853, 0.024444095611572264, 0.02446009635925293, 0.024411968231201172, 0.024502464294433594, 0.02443401527404785, 0.02448246383666992, 0.024473600387573242, 0.0244052791595459, 0.024468191146850587, 0.024460447311401366, 0.024519487380981444, 0.024450752258300783, 0.02466441535949707, 0.024629247665405272, 0.024647680282592774, 0.02454528045654297, 0.02464348793029785, 0.02452284812927246, 0.024527967453002928, 0.024472480773925782, 0.024432096481323242, 0.02453276824951172, 0.02455833625793457, 0.02452479934692383, 0.02444697570800781, 0.024470720291137695, 0.024593215942382812, 0.024591968536376952, 0.02453708839416504, 0.024467615127563475, 0.02442265510559082, 0.024619007110595705, 0.024631296157836914, 0.024653823852539062, 0.024592256546020506, 0.024750207901000975, 0.02464124870300293, 0.024634784698486328, 0.024576480865478516, 0.02451932716369629, 0.024591392517089843, 0.02453340721130371, 0.02444099235534668, 0.024578208923339843, 0.024465408325195313, 0.024555519104003908, 0.024475648880004884, 0.02446950340270996, 0.02451251220703125, 0.02457516860961914, 0.024551616668701173, 0.024547679901123047, 0.024533439636230468, 0.02450806427001953, 0.024627328872680664, 0.024632991790771483, 0.02735795211791992, 0.026992767333984376, 0.025794111251831054, 0.024940319061279297, 0.024742303848266603, 0.024643680572509766, 0.02460809516906738, 0.024519264221191408, 0.024451072692871095, 0.02461497688293457, 0.024473600387573242, 0.02452060890197754, 0.02453513526916504, 0.024565759658813476, 0.024478815078735353, 0.024386463165283204, 0.02445676803588867, 0.025465280532836913, 0.024414207458496092, 0.024385536193847656, 0.02462259292602539, 0.024696447372436522, 0.02467728042602539, 0.024505535125732423, 0.02448259162902832, 0.024539104461669924, 0.024522783279418946, 0.024532991409301756, 0.024406015396118166, 0.024549375534057616, 0.024629247665405272, 0.026074848175048827, 0.02447593688964844, 0.024561216354370117, 0.024768640518188476, 0.02452102470397949, 0.024506175994873047, 0.024481983184814454, 0.024729600906372072, 0.024791040420532227, 0.024786880493164062, 0.024790815353393555, 0.02474345588684082, 0.02470684814453125, 0.0247490234375, 0.024679744720458984, 0.024615264892578124, 0.02470742416381836, 0.02465564727783203, 0.024530271530151367, 0.024478591918945313, 0.024608768463134766, 0.02461692810058594, 0.024884639739990236, 0.024619007110595705, 0.024590976715087892, 0.02470028877258301, 0.025313919067382812, 0.024654943466186522, 0.02459331130981445, 0.02453708839416504, 0.024475648880004884, 0.024526847839355468, 0.027250143051147462, 0.02612601661682129, 0.025526943206787108, 0.025067136764526366, 0.024764991760253905, 0.024594432830810548, 0.024596288681030275, 0.024516159057617188, 0.02444304084777832, 0.02449660873413086, 0.02444697570800781, 0.02453913688659668, 0.024792160034179687, 0.024455232620239256, 0.02442540740966797, 0.024399776458740235, 0.02447769546508789, 0.02450227165222168, 0.024475648880004884, 0.024481792449951172, 0.024485183715820313, 0.02445996856689453, 0.024452863693237306, 0.024442880630493165, 0.024420608520507814, 0.02751487922668457, 0.024442880630493165, 0.024430688858032228, 0.024647584915161135, 0.024680448532104493, 0.02453913688659668, 0.024537055969238282, 0.0245199031829834, 0.02444108772277832, 0.024527360916137695, 0.02452076721191406, 0.029212671279907225, 0.024418304443359375, 0.024464799880981446, 0.02453555107116699, 0.024615264892578124, 0.02460032081604004, 0.024675424575805665, 0.02465238380432129, 0.02533203125, 0.02452889633178711, 0.024471424102783204, 0.024497888565063478, 0.0244553279876709, 0.027169023513793945, 0.024557472229003906, 0.024552928924560548, 0.024599168777465822, 0.024588287353515623, 0.024584096908569338, 0.024667423248291017, 0.02464143943786621, 0.02456387138366699, 0.0245250244140625, 0.024553375244140627, 0.02454355239868164, 0.024607040405273437, 0.024541183471679686, 0.02704582405090332, 0.025929216384887696, 0.02525254440307617, 0.024835968017578126, 0.02452479934692383, 0.024450368881225586, 0.02448863983154297, 0.024543231964111328, 0.024673280715942384, 0.02451968002319336, 0.024541183471679686, 0.02516377639770508, 0.025112159729003908, 0.024963199615478517, 0.02478927993774414, 0.026411008834838868, 0.024628416061401367, 0.02453536033630371, 0.024432735443115236, 0.024469823837280275, 0.024492128372192383, 0.0245534725189209, 0.02449385643005371, 0.025170272827148437, 0.024563583374023437, 0.02443878364562988, 0.024457216262817383, 0.024481216430664064, 0.024611391067504883, 0.02465996742248535, 0.024706464767456054, 0.02459913635253906, 0.024593984603881836, 0.024641984939575194, 0.024860256195068358, 0.02468191909790039, 0.024677343368530273, 0.02460643196105957, 0.024613151550292967, 0.024557567596435546, 0.02458380889892578, 0.02462495994567871, 0.024719135284423828, 0.024773056030273438, 0.024717344284057616, 0.024598495483398437, 0.024772287368774414, 0.02455740737915039, 0.024506975173950195, 0.02455116844177246, 0.024508895874023436, 0.024700927734375, 0.024676288604736328, 0.024655519485473634, 0.024641952514648437, 0.024670112609863282, 0.024611936569213868, 0.024676767349243164, 0.024613471984863283, 0.024606719970703125, 0.0245863037109375, 0.02462272071838379, 0.024572128295898436, 0.027029760360717775, 0.026236928939819337, 0.025536895751953125, 0.0250533447265625, 0.024750080108642578, 0.02463088035583496, 0.024561216354370117, 0.02452275276184082, 0.024490848541259765, 0.024467456817626954, 0.024464479446411135, 0.024517536163330078, 0.02448409652709961, 0.024383295059204103, 0.02439366340637207, 0.024424095153808594, 0.024510080337524415, 0.02448249626159668, 0.024440000534057617, 0.02442032051086426, 0.024410655975341797, 0.024422752380371095, 0.024377344131469726, 0.0247010555267334, 0.02446441650390625, 0.024570528030395507, 0.02463148880004883, 0.024543231964111328, 0.024444927215576173, 0.024440832138061523, 0.024552703857421875, 0.024515327453613282, 0.024504159927368162, 0.024498336791992186, 0.024440832138061523, 0.02453424072265625, 0.02447235107421875, 0.024458656311035155, 0.02442678451538086, 0.024615232467651366, 0.024868864059448242, 0.024698879241943358, 0.02466592025756836, 0.024645599365234375, 0.024747615814208986, 0.02460531234741211, 0.024665855407714845, 0.02460492706298828, 0.02456985664367676, 0.024536895751953124, 0.024502464294433594, 0.024483840942382814, 0.02448745536804199, 0.024730079650878905, 0.02479030418395996, 0.02467878341674805, 0.02450876808166504, 0.024497472763061523, 0.024535743713378907, 0.024620864868164064, 0.024578239440917967, 0.024530431747436524, 0.024821599960327147, 0.02810268783569336, 0.026312639236450195, 0.025451967239379883, 0.024934879302978517, 0.02465718460083008, 0.024541183471679686, 0.024621280670166015, 0.024672128677368163, 0.024613632202148437, 0.024473247528076173, 0.02447395133972168, 0.02442438316345215, 0.024600639343261718, 0.024472896575927734, 0.02445996856689453, 0.02446771240234375, 0.02446441650390625, 0.024510719299316405, 0.02448636817932129, 0.024440767288208008, 0.02445859146118164, 0.024584928512573243, 0.02451420783996582, 0.02446553611755371, 0.02449020767211914, 0.024475648880004884, 0.024563711166381837, 0.02461030387878418, 0.024561216354370117, 0.02466022491455078, 0.024602912902832032, 0.024581823348999023, 0.024611551284790038, 0.025421823501586914, 0.024506368637084962, 0.02453708839416504, 0.02451036834716797, 0.024526336669921874, 0.024500320434570313, 0.02489792060852051, 0.02466828727722168, 0.02468659210205078, 0.02470243263244629, 0.024633216857910156, 0.02470262336730957, 0.024668319702148438, 0.0246278076171875, 0.025389312744140625, 0.02453913688659668, 0.02454732894897461, 0.02449203109741211, 0.02445644760131836, 0.024523487091064455, 0.02464361572265625, 0.02462460708618164, 0.024565536499023436, 0.024496768951416014, 0.02450160026550293, 0.024539072036743163, 0.024525663375854493, 0.02443878364562988, 0.02452070426940918, 0.02447974395751953, 0.027940832138061523, 0.02654617691040039, 0.025550271987915037, 0.025442880630493166, 0.02474095916748047, 0.024590688705444334, 0.024476224899291993, 0.02442176055908203, 0.02451315116882324, 0.024612863540649413, 0.02447974395751953, 0.024457216262817383, 0.024426496505737305, 0.024485120773315428, 0.024437088012695313, 0.024402271270751952, 0.024393344879150392, 0.024575519561767577, 0.02451958465576172, 0.02442585563659668, 0.024447231292724608, 0.024428543090820314, 0.02451481628417969, 0.024407455444335938, 0.024431327819824218, 0.02443878364562988, 0.024453216552734375, 0.024524703979492187, 0.024559616088867187, 0.02453116798400879, 0.02448361587524414, 0.02444697570800781, 0.024426496505737305, 0.024426591873168944, 0.024514463424682616, 0.024481088638305663, 0.024599168777465822, 0.02459040069580078, 0.024633024215698244, 0.024570175170898437, 0.02457206344604492, 0.024737632751464844, 0.02469478416442871, 0.024985599517822265, 0.02475212860107422, 0.025134527206420898, 0.02486534309387207, 0.02477199935913086, 0.02467286491394043, 0.02459401512145996, 0.02455388832092285, 0.024512384414672853, 0.024567935943603517, 0.02496451187133789, 0.02481161689758301, 0.024623615264892578, 0.024490272521972656, 0.02447536087036133, 0.024517759323120118, 0.024531232833862306, 0.024492223739624022, 0.02452521514892578, 0.024521856307983397]",tokens/s,40.516915090808304,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1001, in __init__ self.model = GemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 780, in __init__ [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 780, in [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 571, in __init__ self.mlp = GemmaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 166, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 166.12 MiB is free. Process 440686 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1074, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 888, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 610, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 447, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,975.327232,13356.630016,0.0,12954.107904,12898.466816,s,1,8.639314453125,8.639314453125,0.0,8.639314453125,8.639314453125,8.639314453125,8.639314453125,[8.639314453125],,kWh,7.028268945830026e-06,7.679634831212898e-07,2.398613030000704e-06,1.019484545895202e-05,,MB,1372.491776,13373.407232,0.0,12960.39936,10667.938816,s,10,3.180529541015625,0.31805295410156253,0.003811042068807525,0.3180428314208984,0.32263394775390625,0.32292058105468746,0.3231498876953125,"[0.3096095886230469, 0.316553955078125, 0.3154758911132812, 0.3184391174316406, 0.3211558837890625, 0.3159206848144531, 0.3199504089355469, 0.31764654541015624, 0.32257025146484375, 0.32320721435546873]",tokens/s,804.8974131466566,kWh,9.055501493181873e-06,9.97904023060283e-07,5.994062034303001e-06,1.6047467550545156e-05,tokens/kWh,15952672.85593005,MB,1373.302784,13375.504384,0.0,12962.496512,10698.013696,s,10,31.900198242187496,3.1900198242187496,0.0035996792050985984,3.189802001953125,3.193630908203125,3.195307641601562,3.1966490283203126,"[3.186168212890625, 3.186814208984375, 3.1862119140625, 3.192526611328125, 3.19275048828125, 3.185880126953125, 3.189783935546875, 3.189820068359375, 3.196984375, 3.19325830078125]",tokens/s,19.74909357042287,kWh,9.351628420140093e-05,1.0315798362664734e-05,6.200268680749706e-05,0.00016583476937156275,tokens/kWh,379896.20776596444,,s,630,31.898487201690692,0.05063251936776298,0.0004173364604794488,0.05055036735534668,0.050834391021728514,0.051070454406738276,0.053394205703735355,"[0.053330974578857424, 0.05112403106689453, 0.05062451171875, 0.050587646484375, 0.050374656677246096, 0.05048115158081055, 0.05043600082397461, 0.05052188873291016, 0.05035023880004883, 0.05050291061401367, 0.050326560974121096, 0.05036627197265625, 0.050288288116455075, 0.050377120971679686, 0.050255870819091795, 0.05051145553588867, 0.05027062225341797, 0.05034156799316406, 0.05048761749267578, 0.050993152618408207, 0.05067366409301758, 0.050810943603515624, 0.05062854385375976, 0.050574432373046874, 0.05050211334228515, 0.05061641693115235, 0.05038524627685547, 0.05048934555053711, 0.05046607971191406, 0.05110038375854492, 0.05034406280517578, 0.05060505676269531, 0.05044828796386719, 0.05038179016113281, 0.05031305694580078, 0.05074550247192383, 0.050388992309570314, 0.050533889770507816, 0.050584064483642575, 0.050724864959716794, 0.05056512069702149, 0.050784255981445314, 0.050619937896728515, 0.050678241729736326, 0.05076172637939453, 0.05058150482177735, 0.05038796615600586, 0.05059686279296875, 0.05039926528930664, 0.05040534210205078, 0.05047500610351562, 0.050485183715820316, 0.050407520294189455, 0.05040534210205078, 0.05040492630004883, 0.05061369705200195, 0.05030780792236328, 0.05053647994995117, 0.05036377716064453, 0.05068479919433594, 0.05062246322631836, 0.05057689666748047, 0.05046908950805664, 0.053622848510742185, 0.05106224060058594, 0.050468673706054686, 0.05050764846801758, 0.050356224060058595, 0.05042374420166015, 0.050441120147705076, 0.05050777435302734, 0.050358112335205076, 0.050452640533447266, 0.050378433227539064, 0.050669662475585936, 0.05034415817260742, 0.05041705703735352, 0.05026844787597656, 0.05038726425170898, 0.05044019317626953, 0.050446334838867186, 0.05062451171875, 0.050869281768798826, 0.05063779067993164, 0.0506668815612793, 0.05053094482421875, 0.050427902221679685, 0.05044019317626953, 0.05056512069702149, 0.05036796951293945, 0.05042639923095703, 0.05033494567871094, 0.050471710205078124, 0.05039923095703125, 0.050476478576660155, 0.050339393615722657, 0.050348033905029295, 0.05058969497680664, 0.050787326812744144, 0.0505975341796875, 0.05054703903198242, 0.0507740478515625, 0.0506732177734375, 0.05058371353149414, 0.0505360336303711, 0.050622913360595705, 0.05057763290405273, 0.05053414535522461, 0.05047225570678711, 0.05048620986938476, 0.05039513778686523, 0.05055897521972656, 0.05042758560180664, 0.050706462860107424, 0.050621822357177736, 0.050953311920166014, 0.05045747375488281, 0.05048416137695313, 0.05049472045898438, 0.05066592025756836, 0.05057737731933594, 0.050534465789794925, 0.050592033386230466, 0.050685951232910156, 0.05056716918945312, 0.05066463851928711, 0.0534200325012207, 0.05116723251342774, 0.05045212936401367, 0.050290687561035156, 0.05052016067504883, 0.05048329544067383, 0.05043775939941406, 0.050342369079589847, 0.05049161529541016, 0.0505379524230957, 0.05039471817016602, 0.05037750244140625, 0.05046588897705078, 0.0503223991394043, 0.050399166107177734, 0.050363712310791016, 0.050340545654296874, 0.050526016235351565, 0.05070048141479492, 0.05073507308959961, 0.050664894104003905, 0.05071641540527344, 0.05071859359741211, 0.050621120452880856, 0.050476608276367185, 0.050454879760742186, 0.050377086639404295, 0.050494655609130856, 0.05053462219238281, 0.05036707305908203, 0.0504106559753418, 0.05057193756103515, 0.050433246612548825, 0.05060704040527344, 0.050400863647460936, 0.05042630386352539, 0.050331390380859375, 0.05058550262451172, 0.05049126434326172, 0.0505076789855957, 0.05043257522583008, 0.05064934539794922, 0.0504725456237793, 0.05060755157470703, 0.05050163269042969, 0.05051875305175781, 0.05043609619140625, 0.050618366241455076, 0.05043737411499023, 0.050510593414306644, 0.05035212707519531, 0.05059171295166016, 0.05042179107666016, 0.05064908981323242, 0.05051526260375976, 0.050476863861083986, 0.05043020629882813, 0.050813217163085934, 0.05062895965576172, 0.05072076797485352, 0.0505274543762207, 0.05131305694580078, 0.05044876861572266, 0.05359203338623047, 0.05095155334472656, 0.05057398223876953, 0.05054793548583984, 0.0504054069519043, 0.05043033599853516, 0.05052614212036133, 0.05095987319946289, 0.05046316909790039, 0.05074380874633789, 0.05068716812133789, 0.05068268966674805, 0.05195980834960937, 0.05050777435302734, 0.050595840454101565, 0.05048633575439453, 0.0505968017578125, 0.05045043182373047, 0.05081846237182617, 0.05081763076782227, 0.0509194221496582, 0.05070848083496094, 0.05058150482177735, 0.05069168090820313, 0.050810752868652345, 0.05053081512451172, 0.05056060791015625, 0.05046112060546875, 0.05054873657226563, 0.050493343353271485, 0.050753631591796876, 0.05055049514770508, 0.050487071990966796, 0.0504317741394043, 0.05057199859619141, 0.050530303955078126, 0.050511520385742185, 0.050676063537597654, 0.05109964752197266, 0.05075763320922851, 0.050742977142333986, 0.05076614379882813, 0.050597599029541016, 0.050581790924072265, 0.050522113800048826, 0.050517311096191404, 0.050547393798828125, 0.05045423889160156, 0.050498977661132816, 0.05043075180053711, 0.05046076965332031, 0.050564830780029296, 0.0505588493347168, 0.050459041595458984, 0.050484352111816407, 0.05045695877075195, 0.05076428985595703, 0.050568897247314455, 0.050630977630615234, 0.050553920745849606, 0.050672897338867186, 0.05052793502807617, 0.050530303955078126, 0.053680095672607425, 0.05111808013916016, 0.050536449432373044, 0.05047296142578125, 0.05038694381713867, 0.05046492767333984, 0.05075711822509766, 0.050514270782470706, 0.05051315307617187, 0.05050649642944336, 0.050357982635498046, 0.050638431549072264, 0.05049638366699219, 0.05042902374267578, 0.051089534759521486, 0.050496097564697265, 0.05049958419799805, 0.050519935607910155, 0.050568321228027346, 0.05116844940185547, 0.05088355255126953, 0.05074415969848633, 0.050493438720703124, 0.05055897521972656, 0.050651264190673825, 0.050566017150878904, 0.050746368408203124, 0.050806686401367186, 0.05065513610839844, 0.05050572967529297, 0.05083155059814453, 0.050541793823242184, 0.05064374542236328, 0.05053440093994141, 0.05061004638671875, 0.050548225402832034, 0.050583934783935545, 0.050519935607910155, 0.05071452713012695, 0.05060451126098633, 0.05083737564086914, 0.05111616134643555, 0.05067571258544922, 0.050561023712158204, 0.05078182220458984, 0.05083788681030273, 0.05047609710693359, 0.05047964859008789, 0.05052406311035156, 0.050499168395996094, 0.0506071662902832, 0.050421024322509764, 0.05052275085449219, 0.05055481719970703, 0.05076921463012695, 0.050575904846191407, 0.050581344604492186, 0.050490974426269535, 0.05066416168212891, 0.050671104431152345, 0.050698974609375, 0.05063459014892578, 0.05065465545654297, 0.0538377914428711, 0.051146495819091795, 0.05048963165283203, 0.05041561508178711, 0.050677761077880856, 0.05039923095703125, 0.05042150497436523, 0.05043225479125977, 0.05045452880859375, 0.050433151245117186, 0.05031372833251953, 0.050340225219726566, 0.05040332794189453, 0.050466049194335935, 0.05051193618774414, 0.05034668731689453, 0.05055487823486328, 0.05056208038330078, 0.05059251022338867, 0.050778270721435544, 0.050810943603515624, 0.05065318298339844, 0.05057535934448242, 0.050491584777832034, 0.05039244842529297, 0.05044889450073242, 0.05047091293334961, 0.05048108673095703, 0.05036236953735351, 0.05049331283569336, 0.05044009780883789, 0.05035440063476562, 0.05054281616210937, 0.050517791748046874, 0.050388992309570314, 0.05053235244750977, 0.05042940902709961, 0.05052880096435547, 0.05058697509765625, 0.05060236740112305, 0.050655166625976564, 0.05066582489013672, 0.05040326309204102, 0.05048121643066406, 0.050466209411621096, 0.05073494338989258, 0.05043686294555664, 0.050522113800048826, 0.050487297058105465, 0.05044838333129883, 0.05036236953735351, 0.05043199920654297, 0.05038678359985352, 0.05055023956298828, 0.05052489471435547, 0.05052617645263672, 0.050439937591552735, 0.050635009765625, 0.050603073120117185, 0.05063980865478516, 0.05055692672729492, 0.05062227249145508, 0.050460865020751956, 0.05331507110595703, 0.05093584060668945, 0.050563518524169924, 0.05038079833984375, 0.050437183380126954, 0.050364513397216794, 0.050444766998291014, 0.05056345748901367, 0.05040438461303711, 0.050361312866210935, 0.05051596832275391, 0.05042326354980469, 0.05040147018432617, 0.05040163040161133, 0.05044745635986328, 0.05052899169921875, 0.05044582366943359, 0.0504672966003418, 0.05068412780761719, 0.05077008056640625, 0.05097676849365235, 0.05066691207885742, 0.050710975646972654, 0.0506506233215332, 0.050579105377197266, 0.050417823791503905, 0.05055331039428711, 0.050477054595947264, 0.05057257461547852, 0.050512832641601564, 0.05073539352416992, 0.0506110725402832, 0.05056313705444336, 0.05039507293701172, 0.05073596954345703, 0.050479103088378906, 0.05054012680053711, 0.05051564788818359, 0.051234622955322266, 0.050729888916015625, 0.05082646560668945, 0.050727710723876954, 0.050661376953125, 0.050530303955078126, 0.050705760955810544, 0.050420448303222655, 0.050646976470947264, 0.05047926330566406, 0.05053833770751953, 0.05044790267944336, 0.05052054214477539, 0.050405376434326174, 0.0508737907409668, 0.05054329681396484, 0.05059881591796875, 0.050544864654541014, 0.05065395355224609, 0.050536449432373044, 0.05074943923950195, 0.050586654663085935, 0.0507562255859375, 0.050678112030029296, 0.05065523147583008, 0.05326457595825195, 0.051107585906982424, 0.05044249725341797, 0.05045462417602539, 0.050675617218017575, 0.05059174346923828, 0.050493438720703124, 0.05055680084228516, 0.050444416046142575, 0.05041692733764649, 0.050340286254882814, 0.05041571044921875, 0.050388862609863284, 0.05030329513549805, 0.05079644775390625, 0.05041571044921875, 0.05030815887451172, 0.050468929290771486, 0.050592639923095706, 0.050832481384277345, 0.05076841735839844, 0.05078044891357422, 0.050581600189208986, 0.0506695671081543, 0.050573280334472656, 0.050663486480712894, 0.050609249114990235, 0.05046755218505859, 0.05038915252685547, 0.05054399871826172, 0.050442142486572264, 0.05057404708862305, 0.050427902221679685, 0.050589408874511715, 0.05042156982421875, 0.05128348922729492, 0.05054521560668945, 0.05095872116088867, 0.05063177490234375, 0.05077046585083008, 0.05054064178466797, 0.050859424591064455, 0.050665630340576175, 0.050872478485107425, 0.05053907012939453, 0.05055414581298828, 0.05046259307861328, 0.05074620819091797, 0.05046681594848633, 0.05069823837280273, 0.050478401184082033, 0.05075833511352539, 0.05041305541992187, 0.050561374664306644, 0.05046492767333984, 0.050538368225097656, 0.0503583984375, 0.05060969543457031, 0.050446815490722656, 0.05068185424804687, 0.05058902359008789, 0.05078006362915039, 0.05057167816162109, 0.053620384216308596, 0.05101017761230469, 0.050520416259765624, 0.0504277458190918, 0.050534431457519534, 0.050520065307617185, 0.050511871337890625, 0.05067724609375, 0.05046054458618164, 0.050393726348876955, 0.050472000122070315, 0.05067254257202149, 0.050439231872558596, 0.05043228912353516, 0.050559616088867186, 0.050605567932128906, 0.05056284713745117, 0.05098543930053711, 0.05070675277709961, 0.05084774398803711, 0.05081452941894531, 0.050659454345703125, 0.05069855880737305, 0.050544448852539066, 0.05042195129394531, 0.05059385681152344, 0.050549793243408206, 0.050538753509521486, 0.05108137512207031, 0.05098137664794922, 0.050561023712158204, 0.05072259140014648, 0.05052812957763672, 0.05059590530395508, 0.05076201629638672, 0.050785438537597656, 0.05073561477661133, 0.05124313735961914, 0.05130876922607422, 0.0510579833984375, 0.05105324935913086, 0.05092147064208984, 0.05059379196166992, 0.050634750366210936, 0.05052108764648437, 0.05058457565307617, 0.050525184631347655, 0.05073942565917969, 0.05074550247192383, 0.050617183685302734, 0.05066320037841797, 0.05057494354248047, 0.05047951889038086, 0.050612224578857425, 0.05047404861450195, 0.05075040054321289, 0.0506060791015625, 0.05109292984008789, 0.051154624938964846, 0.05083430480957031, 0.05106230545043945, 0.050810943603515624, 0.05059420776367188, 0.053943294525146485, 0.05109881591796875, 0.05054572677612305, 0.05053814315795899, 0.050845504760742184, 0.050587390899658205, 0.05043798446655273, 0.05055763244628906, 0.05046681594848633, 0.05056512069702149, 0.05048320007324219, 0.0506695671081543, 0.050618366241455076, 0.050677761077880856, 0.050781246185302734, 0.05062137603759766, 0.050466175079345706, 0.05068454360961914, 0.0505241584777832, 0.0510832633972168, 0.050882816314697266, 0.050865119934082034, 0.050563873291015624, 0.05069209671020508, 0.05045043182373047, 0.050648544311523436, 0.05091299057006836, 0.05066937637329102, 0.050506752014160154, 0.05059174346923828, 0.05055487823486328, 0.05055897521972656, 0.05039718246459961, 0.05054032135009766, 0.05043017578125, 0.050569343566894534, 0.050482593536376956, 0.05066105651855469, 0.050573631286621096, 0.05083798217773437, 0.050658401489257814, 0.05067049789428711, 0.050536449432373044, 0.05070441436767578, 0.05044630432128906, 0.05077811050415039, 0.05053440093994141, 0.05047267150878906, 0.05043024063110352, 0.05057766342163086, 0.05042966461181641, 0.05058768081665039, 0.050458526611328124, 0.0505832633972168, 0.05051753616333008, 0.05066774368286133, 0.05056166458129883, 0.050783294677734375, 0.05054969787597656, 0.05107712173461914, 0.050835166931152344, 0.05085007858276367, 0.05076768112182617]",tokens/s,19.750152915295896,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,972.836864,7019.036672,0.0,6616.51456,6410.443264,s,1,9.0922021484375,9.0922021484375,0.0,9.0922021484375,9.0922021484375,9.0922021484375,9.0922021484375,[9.0922021484375],,kWh,6.090488104174104e-06,6.643721618110109e-07,2.7838911159996926e-06,9.538751381984807e-06,,MB,1372.721152,7071.465472,0.0,6658.4576,5286.424064,s,10,3.878668273925782,0.38786682739257816,0.0011453448304691157,0.3873702392578125,0.38900812988281247,0.3895884002685547,0.39005261657714846,"[0.3869072570800781, 0.38614337158203127, 0.3873790283203125, 0.3887994384765625, 0.38728048706054685, 0.3871011657714844, 0.3888791809082031, 0.3901686706542969, 0.38864822387695314, 0.3873614501953125]",tokens/s,660.0203521423873,kWh,1.1638483993589595e-05,1.2835122239754735e-06,7.688542475615396e-06,2.0610538693180464e-05,tokens/kWh,12420830.130204422,MB,1385.242624,7100.8256,0.0,6687.817728,5299.722752,s,10,29.7651083984375,2.9765108398437503,0.015904320633270984,2.98038916015625,2.991825244140625,2.993771142578125,2.995327861328125,"[2.97268505859375, 2.990581787109375, 2.94875537109375, 2.9645478515625, 2.991392822265625, 2.951751953125, 2.977518310546875, 2.988898193359375, 2.995717041015625, 2.983260009765625]",tokens/s,21.16572167541884,kWh,8.642502311849441e-05,9.532901697661124e-06,5.360150655638454e-05,0.00014955943137254007,tokens/kWh,421237.22604341985,,s,630,29.76307044982913,0.0472429689679827,0.0008758255176683365,0.047061344146728516,0.04806909370422364,0.04864614486694336,0.05050145687103272,"[0.04809619140625, 0.047949825286865234, 0.047521568298339846, 0.047202529907226565, 0.047503360748291014, 0.046904960632324216, 0.04820345687866211, 0.04678521728515625, 0.046472415924072266, 0.04654134368896484, 0.04691584014892578, 0.04702163314819336, 0.04713107299804688, 0.046878623962402347, 0.046886432647705076, 0.048088993072509766, 0.04744051361083984, 0.046858272552490234, 0.046639102935791016, 0.04667391967773438, 0.04654671859741211, 0.04685641479492188, 0.04793241500854492, 0.047123550415039066, 0.0476712646484375, 0.04730428695678711, 0.04679510498046875, 0.046824703216552736, 0.04748758316040039, 0.046561439514160155, 0.046628288269042965, 0.046577919006347654, 0.046319713592529295, 0.04679087829589844, 0.04697087860107422, 0.04699545669555664, 0.046378753662109376, 0.04675609588623047, 0.04720844650268555, 0.0466187515258789, 0.04658095932006836, 0.046643745422363284, 0.04662489700317383, 0.046859615325927736, 0.04721219253540039, 0.04900147247314453, 0.04723507308959961, 0.046618431091308594, 0.047357345581054686, 0.047457054138183595, 0.04759347152709961, 0.04745391845703125, 0.04789894485473633, 0.04769174575805664, 0.047761409759521485, 0.04772774505615234, 0.047711040496826174, 0.04726995086669922, 0.04738252639770508, 0.049094303131103516, 0.04719219207763672, 0.04698953628540039, 0.04706099319458008, 0.04803228759765625, 0.04737433624267578, 0.047443038940429685, 0.04699619293212891, 0.0474318733215332, 0.05015961456298828, 0.04756623840332031, 0.046672481536865235, 0.04710124969482422, 0.046946495056152344, 0.04699504089355469, 0.046825855255126954, 0.04696937561035156, 0.04729219055175781, 0.046991615295410155, 0.047013439178466794, 0.046635425567626954, 0.048473217010498046, 0.047618175506591795, 0.04692044830322266, 0.04675788879394531, 0.04693398284912109, 0.04655036926269531, 0.046757568359375, 0.04713369750976563, 0.04774889755249023, 0.048611518859863284, 0.04841884613037109, 0.04786995315551758, 0.04749235153198242, 0.0480366096496582, 0.047230976104736325, 0.04860313415527344, 0.04782694244384766, 0.04699321746826172, 0.04710188674926758, 0.04873382568359375, 0.048431297302246094, 0.04814275360107422, 0.047515617370605466, 0.04732089614868164, 0.04727612686157227, 0.04716969680786133, 0.04682726287841797, 0.04679500961303711, 0.047685184478759766, 0.04737836837768555, 0.048185855865478515, 0.047316127777099606, 0.04718435287475586, 0.04767987060546875, 0.04789593505859375, 0.048755329132080076, 0.048266719818115235, 0.047229473114013674, 0.04703641510009766, 0.046944255828857424, 0.04749427032470703, 0.04704726409912109, 0.04710211181640625, 0.04704473495483399, 0.047680641174316404, 0.04671577453613281, 0.04816796875, 0.047348670959472657, 0.04704035186767578, 0.04638252639770508, 0.04736278533935547, 0.04671452713012696, 0.046225791931152345, 0.04664275360107422, 0.04695251083374023, 0.04640393447875977, 0.04679257583618164, 0.04678668975830078, 0.04687606430053711, 0.04665222549438477, 0.04718159866333008, 0.04641996765136719, 0.047341567993164066, 0.0467720947265625, 0.046706817626953126, 0.046720321655273435, 0.046516929626464844, 0.04628684616088867, 0.046652511596679686, 0.046697185516357424, 0.04684771347045898, 0.046778751373291017, 0.046188640594482425, 0.04658108901977539, 0.04848624038696289, 0.04791379165649414, 0.04681059265136719, 0.04688729476928711, 0.046771839141845704, 0.04663468933105469, 0.04629385757446289, 0.046640159606933594, 0.046521312713623045, 0.04683494567871094, 0.04716825485229492, 0.046808799743652346, 0.04679913711547851, 0.046721023559570314, 0.04625568008422851, 0.04676448059082031, 0.04634828948974609, 0.04647052764892578, 0.04632025527954101, 0.04637081527709961, 0.04663296127319336, 0.046959617614746096, 0.047115264892578126, 0.04619436645507812, 0.04670291137695313, 0.04675715255737305, 0.04638803100585937, 0.046546848297119144, 0.046624160766601565, 0.04667782211303711, 0.04716624069213867, 0.047986049652099606, 0.04713558578491211, 0.04701721572875977, 0.04676457595825195, 0.04762195205688476, 0.04653526306152344, 0.047161407470703125, 0.04673331069946289, 0.04664879989624023, 0.04676182556152344, 0.046406238555908204, 0.04638524627685547, 0.047169151306152346, 0.04649612808227539, 0.04676995086669922, 0.046665950775146486, 0.04638105773925781, 0.04633804702758789, 0.046417312622070314, 0.046864990234375, 0.04632985687255859, 0.04612860870361328, 0.046130817413330076, 0.04671529769897461, 0.04640787124633789, 0.04642444610595703, 0.04623062515258789, 0.0461300163269043, 0.046182369232177736, 0.04620899200439453, 0.04638899230957031, 0.046582206726074216, 0.04658777618408203, 0.047239166259765625, 0.04686643218994141, 0.046706687927246096, 0.04676367950439453, 0.046797153472900394, 0.056564990997314456, 0.04691759872436523, 0.046647201538085936, 0.046263168334960934, 0.04679884719848633, 0.047430816650390624, 0.047145503997802735, 0.04706335830688477, 0.04728380966186523, 0.047693790435791014, 0.048850528717041014, 0.04780841445922852, 0.04768044662475586, 0.04805779266357422, 0.047408992767333985, 0.04812995147705078, 0.04731321716308594, 0.04814694213867188, 0.047259040832519535, 0.04720230484008789, 0.04689775848388672, 0.0470906867980957, 0.04696950531005859, 0.04863382339477539, 0.046583614349365234, 0.046649921417236326, 0.04628275299072265, 0.046820545196533205, 0.046561473846435546, 0.04751968002319336, 0.04733724975585937, 0.04693689727783203, 0.04712393569946289, 0.048810657501220704, 0.04682566452026367, 0.04655199813842773, 0.04680489730834961, 0.046905406951904295, 0.047152030944824216, 0.0480436782836914, 0.04985440063476562, 0.047061695098876956, 0.04710985565185547, 0.04703184127807617, 0.046808673858642576, 0.04722528076171875, 0.04729212951660156, 0.047542079925537106, 0.048065025329589846, 0.0480808334350586, 0.04831075286865234, 0.047167137145996095, 0.04757948684692383, 0.0474152946472168, 0.04806860733032227, 0.04756675338745117, 0.047731807708740234, 0.04871401596069336, 0.04801590347290039, 0.04768505477905274, 0.048337024688720705, 0.04752256011962891, 0.0474205436706543, 0.04769196701049805, 0.05046537780761719, 0.047382495880126954, 0.047527935028076174, 0.046707710266113284, 0.04761804962158203, 0.04766979217529297, 0.047394752502441406, 0.04677891159057617, 0.04737814331054688, 0.04771254348754883, 0.04708147048950195, 0.04778803253173828, 0.046671169281005856, 0.04694905471801758, 0.04695609664916992, 0.04643475341796875, 0.04679884719848633, 0.04716521453857422, 0.04720048141479492, 0.04711423873901367, 0.047503360748291014, 0.04759347152709961, 0.04754544067382813, 0.04783359909057617, 0.04692828750610351, 0.04676403045654297, 0.047587070465087894, 0.047298622131347653, 0.0488487663269043, 0.047531742095947266, 0.04719007873535156, 0.047638496398925784, 0.04738905715942383, 0.046876670837402344, 0.04695654296875, 0.0469667854309082, 0.04727215957641601, 0.04679644775390625, 0.04684601593017578, 0.04655519866943359, 0.046399486541748046, 0.04641996765136719, 0.04656268692016602, 0.04810611343383789, 0.047548385620117185, 0.04921267318725586, 0.04668204879760742, 0.04705775833129883, 0.04655104064941406, 0.04638515090942383, 0.04617216110229492, 0.046088191986083986, 0.04652646255493164, 0.04622502517700195, 0.04627084732055664, 0.04674675369262695, 0.04654169464111328, 0.04649369430541992, 0.046271488189697264, 0.04640153503417969, 0.046330753326416015, 0.046002304077148434, 0.046386302947998045, 0.04681119918823242, 0.047315776824951174, 0.04654025650024414, 0.04661507034301758, 0.04642406463623047, 0.046295040130615236, 0.04775721740722656, 0.04747622299194336, 0.046838367462158206, 0.04632495880126953, 0.04647196960449219, 0.04659939193725586, 0.04662259292602539, 0.046685089111328126, 0.04666777420043945, 0.046526081085205076, 0.04686409759521484, 0.046586528778076175, 0.047476417541503904, 0.046855552673339844, 0.04648028945922852, 0.04662623977661133, 0.046938720703125, 0.046779552459716794, 0.0471921272277832, 0.048255775451660154, 0.046729217529296874, 0.04653814315795898, 0.047503520965576175, 0.0478342399597168, 0.046466815948486326, 0.047117118835449216, 0.04688816070556641, 0.04684470367431641, 0.04652457427978516, 0.04742348861694336, 0.04739052963256836, 0.04725574493408203, 0.04762809753417969, 0.04777798461914062, 0.04729388809204101, 0.04740972900390625, 0.04684163284301758, 0.04687484741210937, 0.04741046524047852, 0.04749715042114258, 0.04816566467285156, 0.04869318389892578, 0.047658782958984375, 0.04729414367675781, 0.04742998504638672, 0.047231231689453125, 0.04741939163208008, 0.04740915298461914, 0.04686620712280273, 0.047429855346679685, 0.04698223876953125, 0.04693084716796875, 0.04651603317260742, 0.047218143463134764, 0.04783929443359375, 0.04715375900268555, 0.046821025848388674, 0.04717510223388672, 0.046758655548095704, 0.046930145263671875, 0.04730441665649414, 0.04789254379272461, 0.046868831634521484, 0.04780166244506836, 0.049130046844482425, 0.046845951080322266, 0.04707328033447265, 0.04674150466918945, 0.04734956741333008, 0.047320510864257814, 0.04708832168579102, 0.047941505432128904, 0.047533279418945314, 0.04739753723144531, 0.046987583160400394, 0.04689920043945312, 0.04657356643676758, 0.04718182373046875, 0.047212512969970706, 0.047263774871826175, 0.046879905700683594, 0.047352672576904294, 0.047120094299316406, 0.04666172790527344, 0.04700384140014648, 0.04744844818115234, 0.046688255310058595, 0.04717363357543945, 0.04963913726806641, 0.047607295989990236, 0.0470219841003418, 0.04696575927734375, 0.04680691146850586, 0.046877983093261716, 0.04727385711669922, 0.046749759674072265, 0.046795326232910155, 0.04699276733398437, 0.047766334533691404, 0.04735315322875976, 0.047411937713623044, 0.0466945915222168, 0.046677345275878905, 0.04677475357055664, 0.0466978874206543, 0.04660233688354492, 0.046652992248535155, 0.04700223922729492, 0.04655868911743164, 0.0468666877746582, 0.04677068710327149, 0.047417503356933594, 0.04684563064575195, 0.04742784118652344, 0.04669974517822266, 0.04682387161254883, 0.04711254501342774, 0.047257598876953126, 0.04845116806030274, 0.04795612716674805, 0.04760137557983399, 0.04879779052734375, 0.0474587516784668, 0.0468106575012207, 0.04695817565917969, 0.04704550552368164, 0.052258815765380856, 0.04734902572631836, 0.04740521621704102, 0.04699107360839844, 0.04696150588989258, 0.046960639953613284, 0.04730444717407226, 0.04721075057983398, 0.04742758560180664, 0.04925235366821289, 0.04864614486694336, 0.04960665512084961, 0.0477388801574707, 0.04730255889892578, 0.04690134429931641, 0.04705484771728516, 0.04718713760375977, 0.047943649291992185, 0.04791356658935547, 0.048906494140625, 0.04864614486694336, 0.04719968032836914, 0.04823052978515625, 0.047897953033447266, 0.04774576187133789, 0.04719206237792969, 0.047656673431396485, 0.04744630432128906, 0.04753408050537109, 0.047439041137695315, 0.048848991394042966, 0.04792745590209961, 0.04860371017456055, 0.04788348770141602, 0.04724764633178711, 0.050075904846191406, 0.0487691535949707, 0.04749856185913086, 0.046991649627685546, 0.047104705810546876, 0.04727391815185547, 0.04716694259643555, 0.04764307022094726, 0.04737449645996094, 0.047881439208984376, 0.0489315185546875, 0.047734176635742184, 0.04727043151855469, 0.047056671142578124, 0.048404670715332034, 0.04826319885253906, 0.04805580902099609, 0.04702873611450195, 0.04692556762695312, 0.046908702850341794, 0.046588897705078125, 0.04638217544555664, 0.051628704071044924, 0.047281566619873046, 0.04671369552612305, 0.047414783477783204, 0.047567359924316405, 0.04737161636352539, 0.04703039932250976, 0.046819839477539066, 0.04669033432006836, 0.048164031982421876, 0.04691641616821289, 0.047298561096191405, 0.04678451156616211, 0.04714086532592773, 0.04628070449829102, 0.04678627014160156, 0.04635881423950195, 0.04660019302368164, 0.04680704116821289, 0.04767948913574219, 0.046731521606445316, 0.04704844665527344, 0.052084735870361325, 0.047101951599121096, 0.046978912353515624, 0.047508865356445315, 0.04701577758789063, 0.04673632049560547, 0.04760985565185547, 0.0469936637878418, 0.04701875305175781, 0.046769153594970705, 0.046972896575927736, 0.046948287963867186, 0.04652588653564453, 0.04685203170776367, 0.0514087028503418, 0.046620800018310544, 0.04679081726074219, 0.04674560165405273, 0.04651481628417969, 0.04717772674560547, 0.0469153938293457, 0.046948543548583986, 0.04664524841308594, 0.04686438369750977, 0.04665702438354492, 0.04664569473266601, 0.04660025787353515, 0.047239166259765625, 0.0474851188659668, 0.04706224060058594, 0.046820030212402344, 0.04663619232177734, 0.04722355270385742, 0.046927871704101565, 0.04740095901489258, 0.04836127853393555, 0.05262969589233398, 0.04703372955322266, 0.04741526412963867, 0.04682332611083984, 0.04807347106933594, 0.04753203201293945, 0.04765849685668945, 0.047360511779785154, 0.04761737442016602, 0.05051619338989258, 0.047105857849121094, 0.047098495483398437, 0.047204063415527346, 0.046987617492675784, 0.0469826545715332, 0.046876800537109374, 0.04747500610351563, 0.04682928085327148, 0.04667180633544922, 0.04744620895385742, 0.04724342346191406, 0.047126529693603515, 0.04985007858276367, 0.04715491104125977, 0.0466798095703125, 0.04676860809326172, 0.047323486328125, 0.04685968017578125, 0.04753427124023438, 0.0473645133972168, 0.047693790435791014, 0.04737542343139649, 0.04738076782226563]",tokens/s,21.167170942997164,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpogz2hpp8/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1008.377856,13922.861056,0.0,13520.338944,13508.0832,s,1,8.4436865234375,8.4436865234375,0.0,8.4436865234375,8.4436865234375,8.4436865234375,8.4436865234375,[8.4436865234375],,kWh,8.606215537520256e-06,9.384032057820807e-07,3.50305835800091e-06,1.3047677101303247e-05,,MB,1360.93696,13937.54112,0.0,13524.533248,11787.729408,s,10,3.931678955078125,0.3931678955078125,0.005380609996804708,0.3942332611083984,0.39805140686035156,0.39866272735595704,0.3991517837524414,"[0.37934295654296873, 0.39106527709960937, 0.390093505859375, 0.394238037109375, 0.394266845703125, 0.39422848510742187, 0.3934929809570312, 0.39776126098632814, 0.3992740478515625, 0.39791555786132815]",tokens/s,651.1213222772232,kWh,1.1374485301440147e-05,1.2543893864440122e-06,7.484332910539857e-06,2.0113207598424015e-05,tokens/kWh,12727954.939422943,MB,1378.496512,13939.638272,0.0,13526.6304,11790.353408,s,10,34.58362524414063,3.4583625244140626,0.003414616379578957,3.4583970947265623,3.462887109375,3.46318330078125,3.46342025390625,"[3.45578857421875, 3.4541142578125, 3.454358642578125, 3.4544697265625, 3.457624755859375, 3.45916943359375, 3.461449951171875, 3.4634794921875, 3.4628212890625, 3.46034912109375]",tokens/s,18.216713706343974,kWh,0.0001012383465714714,1.116691202664005e-05,6.712714344525605e-05,0.0001795324020433675,tokens/kWh,350911.5863373891,,s,630,34.58168772125247,0.05489156781151181,0.0005740122471048801,0.054784608840942384,0.055153331756591796,0.05532016086578369,0.05871989833831788,"[0.05847545623779297, 0.05548031997680664, 0.05456076812744141, 0.05475532913208008, 0.05486182403564453, 0.054691841125488284, 0.054615936279296874, 0.05468729782104492, 0.05499062347412109, 0.05469996643066406, 0.05484016036987305, 0.0548939208984375, 0.054548385620117185, 0.05490723037719727, 0.05473452758789062, 0.05467619323730469, 0.0545478401184082, 0.05490300750732422, 0.055508575439453124, 0.05495033645629883, 0.05470655822753906, 0.05502540969848633, 0.05468185424804688, 0.05451507186889649, 0.054884990692138674, 0.05508505630493164, 0.054710208892822264, 0.054601600646972656, 0.054841537475585934, 0.05470003128051758, 0.05454643249511719, 0.05453619384765625, 0.05489049530029297, 0.05467049789428711, 0.05466588973999023, 0.0551262092590332, 0.05488435363769531, 0.0546693115234375, 0.05487411117553711, 0.05495129776000977, 0.054631038665771486, 0.054505054473876956, 0.05502316665649414, 0.05458015823364258, 0.05455043029785156, 0.054636512756347656, 0.05488028717041016, 0.05450956726074219, 0.05465292739868164, 0.05476147079467773, 0.054740993499755856, 0.05472051239013672, 0.05489868927001953, 0.05497241592407227, 0.05511167907714844, 0.05486796951293945, 0.0552119026184082, 0.05493158340454102, 0.05467340850830078, 0.05467878341674805, 0.05510601425170898, 0.054610206604003904, 0.05446368026733398, 0.059611968994140625, 0.05522227096557617, 0.05466316986083984, 0.05458943939208984, 0.054701343536376956, 0.054782337188720706, 0.05492582321166992, 0.054621150970458984, 0.054670207977294924, 0.05492326354980469, 0.054599105834960936, 0.054494976043701175, 0.0548559684753418, 0.05456300735473633, 0.05450377655029297, 0.05451776123046875, 0.05468560028076172, 0.05475132751464844, 0.054779232025146486, 0.05508726501464844, 0.05465254211425781, 0.05445312118530273, 0.05461193466186524, 0.05492291259765625, 0.05441756820678711, 0.05450364685058594, 0.05491097640991211, 0.054626304626464846, 0.05464377593994141, 0.054631359100341795, 0.05447270584106445, 0.054462337493896486, 0.05519577789306641, 0.054919166564941405, 0.05464432144165039, 0.05465510559082031, 0.05492969512939453, 0.054822433471679685, 0.054608352661132814, 0.054813983917236325, 0.054979297637939455, 0.05487958526611328, 0.054512287139892576, 0.0550847053527832, 0.054665695190429686, 0.05457920074462891, 0.05503558349609375, 0.055296192169189455, 0.05468569564819336, 0.05479219055175781, 0.05483929443359375, 0.05475078582763672, 0.05488479995727539, 0.054773761749267576, 0.05502883148193359, 0.05462860870361328, 0.05471692657470703, 0.054932926177978514, 0.05474991989135742, 0.054573055267333984, 0.05484134292602539, 0.05473878479003906, 0.054504928588867185, 0.058752414703369144, 0.055065185546875, 0.0545299186706543, 0.0544851188659668, 0.05445555114746094, 0.054563934326171876, 0.054717697143554685, 0.05453251266479492, 0.05462015914916992, 0.05465398406982422, 0.05454537582397461, 0.05450342559814453, 0.054496959686279295, 0.05449964904785156, 0.05452185440063476, 0.05456486511230469, 0.05480448150634765, 0.05482073593139648, 0.05486140823364258, 0.05502825546264648, 0.05495916748046875, 0.054526912689208985, 0.054728702545166014, 0.054933086395263675, 0.05508752059936523, 0.05491097640991211, 0.055011329650878904, 0.05468364715576172, 0.0547243537902832, 0.05464019012451172, 0.05508297729492188, 0.05467619323730469, 0.054609535217285156, 0.054927040100097656, 0.054763294219970705, 0.05470505523681641, 0.05527961730957031, 0.05536524963378906, 0.05502579116821289, 0.05467571258544922, 0.05483929443359375, 0.05474211120605469, 0.054575233459472655, 0.05474991989135742, 0.05493094253540039, 0.05470470428466797, 0.05470544052124023, 0.05484374237060547, 0.055013343811035155, 0.05470044708251953, 0.05481676864624024, 0.05493350219726562, 0.05487615966796875, 0.05464883041381836, 0.05499084854125977, 0.05476147079467773, 0.05458857727050781, 0.05481478500366211, 0.054991905212402346, 0.05465225601196289, 0.05444550323486328, 0.05480112075805664, 0.05472687911987305, 0.05857875061035156, 0.055433406829833984, 0.054595073699951174, 0.054609920501708986, 0.054671871185302735, 0.054542335510253906, 0.05450934219360352, 0.05479241561889649, 0.05491417694091797, 0.05475417709350586, 0.054965633392333985, 0.05476416015625, 0.054582977294921876, 0.054669857025146484, 0.05470800018310547, 0.054550529479980465, 0.05467340850830078, 0.05559910583496094, 0.05552873611450195, 0.054911712646484374, 0.054962303161621096, 0.05479827117919922, 0.05479145431518555, 0.05465769577026367, 0.054740993499755856, 0.054750686645507814, 0.05461590576171875, 0.05451212692260742, 0.05475347137451172, 0.05460483169555664, 0.05454332733154297, 0.05461606216430664, 0.05467750549316406, 0.054433311462402344, 0.05477014541625977, 0.05512192153930664, 0.05493145751953125, 0.05464665603637695, 0.05489059066772461, 0.054824703216552736, 0.0544791374206543, 0.054573055267333984, 0.05516646575927735, 0.054739456176757816, 0.05457049560546875, 0.054597408294677734, 0.054612289428710936, 0.05446902465820312, 0.0546324462890625, 0.0547938232421875, 0.05456118392944336, 0.0544986572265625, 0.05483900833129883, 0.05467436981201172, 0.054682689666748045, 0.054803390502929684, 0.05521203231811524, 0.055027713775634764, 0.054880256652832034, 0.05511782455444336, 0.054701278686523434, 0.05484828948974609, 0.05478361511230469, 0.059379711151123046, 0.05548646545410156, 0.05472438430786133, 0.05453638458251953, 0.05463158416748047, 0.05474803161621094, 0.055139713287353516, 0.054628063201904296, 0.054773761749267576, 0.05463132858276367, 0.0546693115234375, 0.05452185440063476, 0.0546324462890625, 0.05493888092041015, 0.054687519073486325, 0.05466825485229492, 0.054779903411865234, 0.05532262420654297, 0.05511884689331055, 0.05510246276855469, 0.05508095932006836, 0.0547327995300293, 0.05462015914916992, 0.054831104278564455, 0.05459715270996094, 0.05468617630004883, 0.054601856231689457, 0.05476339340209961, 0.05462835311889649, 0.05459081649780274, 0.05474371337890625, 0.054623680114746095, 0.054493759155273436, 0.054790145874023435, 0.05469388961791992, 0.05463654327392578, 0.054937599182128906, 0.05490099334716797, 0.0547547836303711, 0.054761505126953124, 0.05500543975830078, 0.05477686309814453, 0.05454742431640625, 0.054798336029052735, 0.05465491104125977, 0.0546099853515625, 0.05484064102172852, 0.05514924621582031, 0.0545873908996582, 0.05456486511230469, 0.055180992126464844, 0.054626625061035154, 0.05465449523925781, 0.05491318511962891, 0.05545606231689453, 0.05519769668579102, 0.054779903411865234, 0.05495107269287109, 0.05506339263916016, 0.054779903411865234, 0.05499615859985352, 0.05492780685424805, 0.05476160049438476, 0.059252063751220704, 0.055317150115966794, 0.05466470336914062, 0.05465145492553711, 0.054796287536621094, 0.05459734344482422, 0.05451190567016601, 0.05485295867919922, 0.0546732177734375, 0.054481758117675784, 0.0547407341003418, 0.055627105712890625, 0.0544736328125, 0.05454848098754883, 0.054749183654785157, 0.05455177688598633, 0.05465497589111328, 0.0551055679321289, 0.05537254333496094, 0.05485916900634766, 0.054674015045166016, 0.05479328155517578, 0.05471075057983398, 0.05456943893432617, 0.05473459243774414, 0.05502409744262695, 0.05459529495239258, 0.05501715087890625, 0.054968257904052735, 0.05476748657226563, 0.05494147109985351, 0.05483555221557617, 0.05460012817382812, 0.054524993896484374, 0.0547105598449707, 0.055529247283935546, 0.055134334564208985, 0.055057151794433594, 0.05504732894897461, 0.055074752807617186, 0.05463951873779297, 0.054856735229492186, 0.055134368896484376, 0.05472867202758789, 0.05456777572631836, 0.054765567779541016, 0.055367294311523436, 0.05467788696289062, 0.05456281661987305, 0.05489868927001953, 0.0545873908996582, 0.054866016387939455, 0.05495798492431641, 0.05503180694580078, 0.054865921020507816, 0.0547061767578125, 0.055209983825683595, 0.054814720153808595, 0.054879295349121095, 0.054833919525146484, 0.05508726501464844, 0.05469392013549805, 0.054472415924072266, 0.05864028930664063, 0.055295711517333986, 0.054816127777099606, 0.054712959289550785, 0.05460105514526367, 0.05461708831787109, 0.05500118255615234, 0.05464393615722656, 0.05458099365234375, 0.05475129699707031, 0.054534942626953124, 0.05458931350708008, 0.054825408935546875, 0.05493532943725586, 0.05508879852294922, 0.05484479904174805, 0.055822654724121096, 0.05512006378173828, 0.054913505554199216, 0.055058368682861326, 0.05504211044311524, 0.05478163146972656, 0.05490431976318359, 0.05510022354125976, 0.0553507194519043, 0.05472723388671875, 0.05467087936401367, 0.054986400604248045, 0.05483747100830078, 0.054633056640625, 0.054801536560058595, 0.05498559951782227, 0.05465705490112305, 0.05493756866455078, 0.05506252670288086, 0.05509734344482422, 0.0547512321472168, 0.054879646301269534, 0.05520844650268555, 0.054793697357177734, 0.05467766571044922, 0.054867679595947266, 0.05502137756347656, 0.05465091323852539, 0.0548054084777832, 0.05493145751953125, 0.05476736068725586, 0.05475529479980469, 0.05479452896118164, 0.05511721420288086, 0.05459321594238281, 0.05477264022827148, 0.055035903930664064, 0.054988414764404296, 0.054796096801757815, 0.05509097671508789, 0.05527836990356445, 0.05473689651489258, 0.054654304504394534, 0.054966945648193356, 0.054879390716552734, 0.05461715316772461, 0.05483683013916016, 0.05904528045654297, 0.05563158416748047, 0.05488032150268555, 0.054898880004882813, 0.055144447326660156, 0.05477222442626953, 0.05478166580200195, 0.05462444686889648, 0.054763744354248044, 0.05473411178588867, 0.054854366302490236, 0.054763519287109375, 0.054863872528076174, 0.05497148895263672, 0.054796287536621094, 0.05466204833984375, 0.05478425598144531, 0.05536924743652344, 0.05510899353027344, 0.05494460678100586, 0.054943679809570316, 0.0551525764465332, 0.05462656021118164, 0.05492099380493164, 0.055199520111083984, 0.055046463012695314, 0.05469529724121094, 0.05505295944213867, 0.05491094589233399, 0.05466300964355469, 0.054894752502441406, 0.05485881423950195, 0.055010238647460935, 0.05463859176635742, 0.05489664077758789, 0.0550748176574707, 0.054860065460205075, 0.054877056121826175, 0.05499776077270508, 0.05479945755004883, 0.05474816131591797, 0.05526121520996094, 0.05541811370849609, 0.054682334899902346, 0.054738239288330076, 0.05485228729248047, 0.05464678573608398, 0.05452799987792969, 0.054812671661376954, 0.05518864059448242, 0.05462307357788086, 0.05465894317626953, 0.05554188919067383, 0.05482905578613281, 0.05489254379272461, 0.0550645751953125, 0.055144447326660156, 0.054671169281005856, 0.0547760009765625, 0.055160129547119144, 0.05482566452026367, 0.05468531036376953, 0.05497241592407227, 0.059313793182373044, 0.05537212753295898, 0.05487596893310547, 0.055032001495361325, 0.054746944427490236, 0.054554817199707034, 0.05473894500732422, 0.05482700729370117, 0.05457715225219727, 0.05490595245361328, 0.055309215545654294, 0.054978561401367185, 0.05470614242553711, 0.05477001571655273, 0.054658241271972656, 0.05466892623901367, 0.05473574447631836, 0.05534899139404297, 0.05519747161865234, 0.05495651245117188, 0.05492531204223633, 0.054724128723144534, 0.054581153869628904, 0.054731327056884764, 0.05494524765014648, 0.054596126556396486, 0.05467129516601563, 0.054820255279541014, 0.054615966796875, 0.05463935852050781, 0.05471155166625977, 0.05478271865844726, 0.054745025634765625, 0.05519699096679687, 0.05517401504516602, 0.05501046371459961, 0.054886302947998046, 0.054946624755859375, 0.05517513656616211, 0.05509737777709961, 0.05480038452148438, 0.054963710784912106, 0.055070430755615234, 0.05504431915283203, 0.055145023345947265, 0.05527142333984375, 0.05484921646118164, 0.054788257598876955, 0.05505580902099609, 0.05472476959228516, 0.054757953643798825, 0.05482291030883789, 0.055357440948486325, 0.05505231857299805, 0.0549150390625, 0.05503513717651367, 0.054975231170654296, 0.05470207977294922, 0.054865921020507816, 0.05501862335205078, 0.05474137496948242, 0.054616321563720704, 0.05481702423095703, 0.059189697265625, 0.05531635284423828, 0.05479212951660156, 0.05469833755493164, 0.05455686569213867, 0.05460310363769531, 0.05474092864990234, 0.054655712127685545, 0.05458454513549805, 0.05471920013427734, 0.05464684677124024, 0.05454361724853515, 0.05502819061279297, 0.05487235260009766, 0.054712318420410154, 0.0548590087890625, 0.054913726806640625, 0.05505654525756836, 0.0550269775390625, 0.0551962890625, 0.05531238555908203, 0.05508915328979492, 0.05494707107543945, 0.05512243270874023, 0.0551622085571289, 0.05488265609741211, 0.05470579147338867, 0.054784961700439457, 0.0546611213684082, 0.0545873908996582, 0.05472256088256836, 0.05529190444946289, 0.05484076690673828, 0.05495865631103516, 0.054986751556396485, 0.05501337432861328, 0.054986751556396485, 0.05522022247314453, 0.05524403381347656, 0.05500390243530273, 0.054773761749267576, 0.05480652618408203, 0.054820289611816404, 0.054559425354003904, 0.05472224044799805, 0.05492720031738281, 0.05475356674194336, 0.05459769439697266, 0.05475689697265625, 0.0547344970703125, 0.054901409149169925, 0.05470361709594727, 0.05503247833251953, 0.05482291030883789, 0.05483520126342773, 0.05492902374267578, 0.05484521484375, 0.05472480010986328, 0.05462672042846679, 0.05500044631958008, 0.05471072006225586, 0.05458963012695312, 0.05475312042236328]",tokens/s,18.217734341890683,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 790, in __init__ self.model = RecurrentGemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 660, in __init__ [RecurrentGemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 660, in [RecurrentGemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 490, in __init__ self.mlp_block = RecurrentGemmaMlp(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 473, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 36.12 MiB is free. Process 452325 has 14.70 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 156.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,938.991616,6540.886016,0.0,6138.363904,6060.931072,s,1,7.37719140625,7.37719140625,0.0,7.37719140625,7.37719140625,7.37719140625,7.37719140625,[7.37719140625],,kWh,5.109241549947304e-06,5.561845555096514e-07,2.928891232018005e-06,8.59431733747496e-06,,MB,1413.062656,6566.05184,0.0,6150.946816,5419.87328,s,10,0.5785139541625978,0.05785139541625976,0.0009518099243710968,0.057694320678710935,0.05940791702270508,0.05949898338317871,0.05957183647155762,"[0.05938768005371094, 0.05638614273071289, 0.05959004974365235, 0.05750223922729492, 0.057707328796386716, 0.05775177764892578, 0.057681312561035154, 0.05748675155639649, 0.058211135864257815, 0.05680953598022461]",tokens/s,4425.130943825919,kWh,1.8903672903764745e-06,2.084740903599266e-07,1.2601963486580022e-06,3.359037729394403e-06,tokens/kWh,76212302.63649166,MB,1468.96896,6568.148992,0.0,6150.946816,5419.87584,s,10,16.952085449218753,1.695208544921875,0.012099712267101815,1.6924345092773438,1.7103209228515626,1.7169610595703124,1.7222731689453126,"[1.7236011962890625, 1.7088453369140626, 1.694952392578125, 1.6966339111328126, 1.6888203125, 1.690185546875, 1.6867459716796875, 1.6774317626953126, 1.6918145751953124, 1.693054443359375]",tokens/s,37.163569160102014,kWh,4.919446395462152e-05,5.425829694169537e-06,3.2515552894142914e-05,8.713584654293396e-05,tokens/kWh,723008.9853887901,,s,630,16.94988215827943,0.026904574854411775,0.00047021143322041137,0.02681273555755615,0.027335270118713378,0.027476702117919923,0.02895300701141358,"[0.027885791778564453, 0.027489952087402344, 0.02755824089050293, 0.02736332893371582, 0.02738380813598633, 0.02732815933227539, 0.027334943771362304, 0.027275039672851564, 0.027434848785400392, 0.02757609558105469, 0.027498815536499025, 0.027233919143676757, 0.027351776123046876, 0.027432960510253908, 0.02748374366760254, 0.027510271072387696, 0.02753219223022461, 0.02745148849487305, 0.02742268753051758, 0.027531200408935547, 0.02730339241027832, 0.02738844871520996, 0.027418624877929686, 0.027439104080200196, 0.02762303924560547, 0.027569536209106446, 0.027444223403930663, 0.02741372871398926, 0.027390527725219726, 0.027277536392211914, 0.027333696365356444, 0.02745030403137207, 0.027420352935791016, 0.027489824295043944, 0.027284255981445314, 0.027256832122802735, 0.027258464813232422, 0.02727120018005371, 0.027332063674926757, 0.027192224502563478, 0.027146015167236328, 0.027217599868774416, 0.02720591926574707, 0.02710259246826172, 0.02714303970336914, 0.027135583877563478, 0.027164928436279295, 0.027074720382690428, 0.02721343994140625, 0.02710361671447754, 0.027172384262084962, 0.027447423934936523, 0.027382112503051757, 0.027346080780029296, 0.027454303741455077, 0.02738105583190918, 0.027367136001586915, 0.027257823944091798, 0.027385856628417967, 0.027237951278686525, 0.027279808044433595, 0.02720956802368164, 0.02728156852722168, 0.028073312759399414, 0.027308767318725585, 0.027278944015502928, 0.02724224090576172, 0.027103904724121095, 0.027238399505615234, 0.027240447998046875, 0.02721171188354492, 0.027160640716552734, 0.027168575286865233, 0.02719148826599121, 0.02707244873046875, 0.02725279998779297, 0.027265024185180665, 0.027214975357055665, 0.027239103317260743, 0.02724185562133789, 0.02732320022583008, 0.027131904602050783, 0.02713804817199707, 0.02717420768737793, 0.02741878318786621, 0.027267839431762694, 0.027227327346801757, 0.02728988838195801, 0.027468095779418944, 0.027389663696289063, 0.027451679229736327, 0.027295743942260742, 0.02741596794128418, 0.027310688018798827, 0.027187456130981447, 0.027221696853637695, 0.02719340705871582, 0.027146240234375, 0.02713155174255371, 0.027187551498413086, 0.027039615631103516, 0.02717670440673828, 0.027138208389282225, 0.027123935699462892, 0.027305984497070314, 0.026959871292114256, 0.02711903953552246, 0.027033632278442382, 0.02717568016052246, 0.027414239883422852, 0.027129919052124023, 0.026841344833374022, 0.026715423583984373, 0.02688662338256836, 0.026976032257080076, 0.026902751922607424, 0.026660863876342773, 0.026629440307617186, 0.026813119888305665, 0.026746879577636717, 0.026583040237426758, 0.02665679931640625, 0.026649728775024414, 0.026721120834350586, 0.026770912170410156, 0.026599903106689454, 0.02759884834289551, 0.027031551361083983, 0.026748479843139647, 0.026675647735595703, 0.026879999160766603, 0.026845184326171875, 0.026808160781860352, 0.026785951614379883, 0.02676121520996094, 0.026679040908813477, 0.02657449531555176, 0.026606271743774414, 0.02650102424621582, 0.02661555290222168, 0.0267836799621582, 0.026548543930053712, 0.026510719299316407, 0.02664860725402832, 0.026964576721191406, 0.02692095947265625, 0.026712064743041993, 0.026606687545776365, 0.026629024505615235, 0.02652128028869629, 0.02660793685913086, 0.026867103576660157, 0.02905353546142578, 0.028850175857543944, 0.02660492706298828, 0.026590944290161133, 0.026635168075561523, 0.026687488555908204, 0.02717695999145508, 0.026834047317504883, 0.026787839889526367, 0.02682716751098633, 0.027144128799438477, 0.026836832046508788, 0.026981056213378905, 0.026869760513305665, 0.027082752227783204, 0.026808095932006837, 0.026704095840454103, 0.026712064743041993, 0.02713520050048828, 0.026927583694458006, 0.02693766403198242, 0.027115520477294923, 0.02690656089782715, 0.02725187110900879, 0.026956703186035155, 0.027403968811035156, 0.027000831604003905, 0.026900800704956054, 0.026838848114013672, 0.026925247192382814, 0.026855424880981447, 0.026927104949951174, 0.026734207153320314, 0.026790271759033202, 0.026843072891235352, 0.026722368240356446, 0.026935487747192382, 0.03061347198486328, 0.027447296142578126, 0.027148288726806642, 0.027014976501464845, 0.027062463760375976, 0.026808319091796876, 0.02708995246887207, 0.027009599685668944, 0.027152799606323243, 0.02696544075012207, 0.027023008346557617, 0.02690934371948242, 0.026954303741455077, 0.02691449546813965, 0.026916864395141602, 0.027203231811523437, 0.027248992919921874, 0.027112703323364257, 0.02702822494506836, 0.0271517448425293, 0.027029407501220702, 0.026957664489746094, 0.026849376678466798, 0.02828067207336426, 0.02724550437927246, 0.026828800201416016, 0.026746879577636717, 0.026885408401489258, 0.02699958419799805, 0.02667628860473633, 0.02690483283996582, 0.026855199813842774, 0.026861471176147463, 0.02688310432434082, 0.02709084892272949, 0.026910720825195314, 0.026814464569091798, 0.02675619125366211, 0.026860448837280275, 0.026624000549316407, 0.02731007957458496, 0.026725568771362306, 0.026642528533935547, 0.02654070472717285, 0.026520864486694336, 0.026663488388061523, 0.026628320693969726, 0.026521600723266602, 0.026568256378173827, 0.026407136917114257, 0.02656278419494629, 0.0264616641998291, 0.026434080123901367, 0.026447040557861328, 0.026350400924682618, 0.026513408660888672, 0.026454015731811522, 0.028065792083740236, 0.026475936889648437, 0.02646895980834961, 0.02651651191711426, 0.02674787139892578, 0.026576351165771485, 0.027101823806762695, 0.026621599197387696, 0.026800479888916016, 0.026816511154174806, 0.02659328079223633, 0.026469791412353515, 0.026554975509643555, 0.026454015731811522, 0.026414207458496094, 0.026473344802856444, 0.026463584899902345, 0.026815135955810546, 0.029042688369750977, 0.02667318344116211, 0.02662803268432617, 0.02650115203857422, 0.026589183807373046, 0.02703718376159668, 0.02755353546142578, 0.026641151428222657, 0.026676639556884766, 0.026538400650024413, 0.02662953567504883, 0.026636480331420898, 0.026626367568969727, 0.027018783569335937, 0.026571519851684572, 0.026421503067016603, 0.026552064895629883, 0.026692863464355468, 0.026538496017456056, 0.026794048309326173, 0.02697235107421875, 0.02721552085876465, 0.02677177619934082, 0.02705606460571289, 0.026846944808959963, 0.02684556770324707, 0.026566656112670898, 0.026599424362182617, 0.026680543899536134, 0.02651420783996582, 0.02653183937072754, 0.026689151763916015, 0.02688559913635254, 0.02683977508544922, 0.026667200088500976, 0.026818559646606444, 0.026709728240966797, 0.026635936737060547, 0.026458784103393553, 0.026594335556030274, 0.02647039985656738, 0.026360671997070314, 0.027088991165161135, 0.026953727722167968, 0.02689023971557617, 0.02727120018005371, 0.02899500846862793, 0.02720524787902832, 0.026958879470825196, 0.02674265670776367, 0.026816511154174806, 0.028333663940429688, 0.026982431411743165, 0.026660863876342773, 0.026672927856445313, 0.027017728805541992, 0.02927622413635254, 0.027060224533081056, 0.027006175994873045, 0.02673308753967285, 0.026947839736938477, 0.02672960090637207, 0.02674163246154785, 0.027207679748535156, 0.026736640930175783, 0.026853376388549805, 0.02710086441040039, 0.027285696029663086, 0.026923040390014648, 0.026631967544555664, 0.026660991668701173, 0.0268023681640625, 0.02669059181213379, 0.026690528869628905, 0.026572799682617186, 0.026648576736450196, 0.026684831619262696, 0.02681507110595703, 0.02693529510498047, 0.026687103271484373, 0.02667353630065918, 0.026888191223144533, 0.028639232635498047, 0.026766847610473633, 0.026763776779174804, 0.027495647430419923, 0.02699078369140625, 0.02672831916809082, 0.02661859130859375, 0.026717344284057618, 0.02656892776489258, 0.026542720794677736, 0.02653388786315918, 0.026658239364624022, 0.02650169563293457, 0.02653183937072754, 0.026533184051513673, 0.026616512298583986, 0.026816511154174806, 0.026454015731811522, 0.026542144775390623, 0.02658051109313965, 0.026552736282348634, 0.02655232048034668, 0.02649087905883789, 0.02687548828125, 0.026500736236572266, 0.026577695846557618, 0.0266744327545166, 0.02657766342163086, 0.02646575927734375, 0.026468896865844728, 0.026492448806762697, 0.02648726463317871, 0.02876969528198242, 0.02676576042175293, 0.02670355224609375, 0.026792192459106447, 0.02654947280883789, 0.02656358337402344, 0.026558464050292968, 0.02651033592224121, 0.026569440841674806, 0.026650720596313477, 0.02658531188964844, 0.02658710479736328, 0.026685440063476562, 0.026498912811279297, 0.0265296630859375, 0.02658505630493164, 0.026554847717285158, 0.026531455993652343, 0.026481983184814453, 0.02674508857727051, 0.026483232498168946, 0.026680864334106446, 0.02670774459838867, 0.026606304168701172, 0.026507360458374023, 0.02654003143310547, 0.026479616165161132, 0.02646668815612793, 0.02647104072570801, 0.026580991744995116, 0.026486719131469726, 0.026503231048583983, 0.026474079132080077, 0.026982816696166992, 0.02704915237426758, 0.02692793655395508, 0.026963327407836916, 0.026651136398315428, 0.02687398338317871, 0.02679529571533203, 0.026804000854492187, 0.02706732749938965, 0.02701260757446289, 0.02681292724609375, 0.026968160629272462, 0.02672012710571289, 0.02689436721801758, 0.02696124839782715, 0.027002975463867186, 0.026962495803833007, 0.0267509765625, 0.027092767715454102, 0.027185312271118166, 0.026819679260253908, 0.02702025604248047, 0.026938623428344726, 0.02715724754333496, 0.027031551361083983, 0.02683193588256836, 0.02671673583984375, 0.02682304000854492, 0.026728191375732423, 0.026773759841918945, 0.02829136085510254, 0.026837312698364257, 0.026775520324707033, 0.026691871643066405, 0.02672768020629883, 0.02643391990661621, 0.027168319702148436, 0.02658336067199707, 0.02665340805053711, 0.026549087524414063, 0.026688127517700194, 0.026677120208740236, 0.026601919174194334, 0.026695680618286134, 0.02675302314758301, 0.026611167907714842, 0.02655081558227539, 0.026597375869750976, 0.02657868766784668, 0.02676153564453125, 0.02706425666809082, 0.027074560165405274, 0.026787071228027343, 0.026630144119262695, 0.02663264083862305, 0.026705888748168944, 0.02653379249572754, 0.026519968032836915, 0.02669366455078125, 0.02653593635559082, 0.026517375946044922, 0.026488639831542968, 0.02642576026916504, 0.02651945686340332, 0.026482080459594725, 0.026536544799804686, 0.02655135917663574, 0.02643040084838867, 0.026486335754394533, 0.026513471603393554, 0.026518016815185546, 0.026516544342041017, 0.0264484806060791, 0.026435808181762697, 0.026556415557861326, 0.026488832473754883, 0.026703712463378906, 0.02654867172241211, 0.026490432739257812, 0.026536096572875978, 0.026408960342407226, 0.02662006378173828, 0.026535776138305663, 0.02634752082824707, 0.026693599700927734, 0.026572832107543944, 0.026423295974731444, 0.026441728591918946, 0.026540063858032228, 0.026429407119750975, 0.026502527236938477, 0.026533504486083985, 0.026586111068725587, 0.027867136001586915, 0.026956991195678712, 0.026589599609375, 0.026546688079833985, 0.02645187187194824, 0.02649087905883789, 0.02708665657043457, 0.026922624588012697, 0.02682304000854492, 0.026667200088500976, 0.02665827178955078, 0.02677609634399414, 0.02680569648742676, 0.02688467216491699, 0.027156415939331054, 0.026876031875610353, 0.0268287353515625, 0.026849279403686522, 0.02684873580932617, 0.027077152252197267, 0.026846303939819335, 0.026783712387084962, 0.026749887466430665, 0.0267509765625, 0.02672230339050293, 0.026927104949951174, 0.026877376556396486, 0.026733119964599608, 0.026580991744995116, 0.026634239196777345, 0.02669152069091797, 0.02667526435852051, 0.027150047302246093, 0.026998559951782228, 0.026993087768554688, 0.026902591705322266, 0.02697216033935547, 0.026888191223144533, 0.027183103561401366, 0.026963584899902342, 0.026900863647460936, 0.02692300796508789, 0.026798336029052735, 0.026656000137329102, 0.02681907272338867, 0.026930912017822266, 0.02686185646057129, 0.026879999160766603, 0.026636287689208983, 0.026681087493896485, 0.02668729591369629, 0.02666700744628906, 0.02659987258911133, 0.026771135330200195, 0.0269683837890625, 0.027066688537597656, 0.02691654396057129, 0.026959871292114256, 0.02674278450012207, 0.026834943771362304, 0.027188768386840822, 0.02696620750427246, 0.026935583114624025, 0.02784022331237793, 0.026824192047119142, 0.026845375061035157, 0.026636896133422853, 0.026520896911621093, 0.02643833541870117, 0.026703296661376955, 0.026666656494140625, 0.026925472259521483, 0.027006528854370118, 0.027095712661743165, 0.02687171173095703, 0.026727935791015626, 0.026858367919921876, 0.026970111846923828, 0.02675062370300293, 0.02682304000854492, 0.026810335159301757, 0.026779647827148437, 0.02680012893676758, 0.02682255935668945, 0.026703071594238282, 0.026731391906738282, 0.026640031814575197, 0.026495328903198244, 0.02656025505065918, 0.026566911697387695, 0.026607616424560547, 0.02656870460510254, 0.02681145668029785, 0.026985408782958985, 0.026480640411376953, 0.02660918426513672, 0.026547679901123045, 0.026652959823608397, 0.026536384582519532, 0.026564895629882814, 0.02667900848388672, 0.026540159225463867, 0.027338207244873045, 0.027022016525268554, 0.026728191375732423, 0.02649932861328125, 0.026371679306030273, 0.026812543869018556, 0.026743072509765625, 0.026752031326293946, 0.027038816452026368, 0.026680288314819337, 0.02648566436767578, 0.026514879226684572, 0.02640716743469238, 0.026468671798706055, 0.026488832473754883, 0.026505216598510743, 0.027240447998046875, 0.02705120086669922, 0.02680659294128418, 0.026644063949584962, 0.02690732765197754, 0.03175551986694336, 0.02942220878601074, 0.02713225555419922]",tokens/s,37.168399999304256,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,973.18912,13356.630016,0.0,12954.107904,12898.466816,s,1,8.8016494140625,8.8016494140625,0.0,8.8016494140625,8.8016494140625,8.8016494140625,8.8016494140625,[8.8016494140625],,kWh,7.303122370833155e-06,7.981384036733835e-07,4.286947873997221e-06,1.2388208648503759e-05,,MB,1366.99904,13373.407232,0.0,12960.39936,10667.938816,s,10,3.1930947570800785,0.31930947570800783,0.004485436503169583,0.3204183502197266,0.3227578887939453,0.32288155364990234,0.32298048553466796,"[0.3068966064453125, 0.32053115844726565, 0.31932736206054685, 0.3203055419921875, 0.318093994140625, 0.3179678039550781, 0.32273040771484374, 0.32179507446289063, 0.3230052185058594, 0.32244158935546874]",tokens/s,801.7300439718202,kWh,9.12179740664077e-06,1.005958976963565e-06,6.0668190895624445e-06,1.619457547316678e-05,tokens/kWh,15807762.322894672,MB,1367.269376,13375.504384,0.0,12962.496512,10698.013696,s,10,32.38315112304687,3.2383151123046874,0.004668977737298212,3.2394345703125,3.2428379150390625,3.2438355834960935,3.2446337182617184,"[3.229404052734375, 3.232055908203125, 3.234654541015625, 3.244833251953125, 3.2426162109375, 3.24179833984375, 3.237837158203125, 3.24108251953125, 3.240552001953125, 3.238317138671875]",tokens/s,19.454561342908757,kWh,9.494312268419344e-05,1.047155754380731e-05,6.294498611983748e-05,0.00016835966634783821,tokens/kWh,374198.8884073893,,s,630,32.381324684143074,0.05139892807006836,0.0005364337877569262,0.0512925910949707,0.051665264892578125,0.05191661281585693,0.05493612583160401,"[0.05494374465942383, 0.05165670394897461, 0.051122177124023435, 0.05126470565795899, 0.05104089736938477, 0.050937664031982424, 0.05087680053710938, 0.05254886245727539, 0.0509114875793457, 0.051222496032714844, 0.051925407409667966, 0.051243263244628905, 0.050926849365234374, 0.05107283020019531, 0.05090591812133789, 0.051033374786376956, 0.05098704147338867, 0.05093824005126953, 0.051173694610595705, 0.05146214294433594, 0.05122048187255859, 0.051062782287597655, 0.05106687927246094, 0.05106687927246094, 0.051318782806396485, 0.05114028930664063, 0.051173728942871095, 0.05108473587036133, 0.05126979064941406, 0.051021247863769534, 0.05108745574951172, 0.050909503936767575, 0.05112902450561523, 0.05104009628295898, 0.051043903350830075, 0.05115692901611328, 0.05160831832885742, 0.05115235137939453, 0.05115523147583008, 0.05124300765991211, 0.05111603164672852, 0.05134460830688477, 0.05105059051513672, 0.05140137481689453, 0.051014816284179684, 0.0512125129699707, 0.051203968048095704, 0.051261215209960936, 0.05111705780029297, 0.05111164855957031, 0.05101916885375977, 0.05101654434204102, 0.051120159149169925, 0.05110483169555664, 0.05130451202392578, 0.05111648178100586, 0.05127622222900391, 0.05112531280517578, 0.05149792098999023, 0.05148083114624023, 0.051560192108154296, 0.051332225799560545, 0.05130944061279297, 0.05520339202880859, 0.051968830108642575, 0.051318302154541015, 0.05116294479370117, 0.051017601013183596, 0.050985889434814455, 0.05147430419921875, 0.051113983154296876, 0.051050369262695315, 0.05100479888916016, 0.051333889007568356, 0.051222270965576175, 0.05126579284667969, 0.05099900817871094, 0.05112451171875, 0.050978015899658204, 0.05121513748168945, 0.051079166412353515, 0.05159731292724609, 0.05151670455932617, 0.05152431869506836, 0.05129830551147461, 0.05130649566650391, 0.051014881134033206, 0.05099760055541992, 0.051294654846191404, 0.05111014556884766, 0.05115884780883789, 0.050990848541259765, 0.05114080047607422, 0.05123891067504883, 0.05116723251342774, 0.05103411102294922, 0.05114870452880859, 0.05101987075805664, 0.05107455825805664, 0.051636062622070315, 0.051538719177246096, 0.051375423431396484, 0.0512476806640625, 0.051600990295410154, 0.051288768768310546, 0.051248863220214845, 0.051171329498291014, 0.051318782806396485, 0.05120940780639648, 0.051235328674316405, 0.051042625427246094, 0.05119081497192383, 0.05149900817871094, 0.05148566436767578, 0.05134070587158203, 0.05096713638305664, 0.05138022232055664, 0.051138561248779295, 0.0513875846862793, 0.05115740966796875, 0.05125350570678711, 0.051195518493652344, 0.05132524871826172, 0.05119596862792969, 0.05118582534790039, 0.051111934661865234, 0.054994720458984375, 0.05187398529052734, 0.05127167892456055, 0.05100694274902344, 0.05108544158935547, 0.05178121566772461, 0.05100601577758789, 0.05132886505126953, 0.05117974472045898, 0.05116332626342773, 0.051073024749755856, 0.051212287902832034, 0.05099679946899414, 0.05130080032348633, 0.05101363372802734, 0.05128345489501953, 0.0511341438293457, 0.051139392852783204, 0.05144780731201172, 0.0516434555053711, 0.05151177597045899, 0.05131721496582031, 0.05184297561645508, 0.051404895782470705, 0.05127926254272461, 0.051105438232421876, 0.051205055236816406, 0.051097599029541016, 0.051197662353515624, 0.051053886413574216, 0.051160030364990235, 0.050956287384033204, 0.05120307159423828, 0.051184608459472654, 0.051141983032226564, 0.05103647994995117, 0.0511328010559082, 0.05139865493774414, 0.051353599548339846, 0.05197619247436523, 0.05138022232055664, 0.05153756713867187, 0.051296607971191406, 0.051343360900878904, 0.05126067352294922, 0.05121523284912109, 0.05114662551879883, 0.05119385528564453, 0.051294208526611325, 0.05128806304931641, 0.05144934463500977, 0.05113907241821289, 0.05139401626586914, 0.051149120330810545, 0.05133871841430664, 0.051283905029296875, 0.051399070739746096, 0.05130057525634766, 0.05136198425292969, 0.05140035247802734, 0.051240673065185545, 0.051200286865234375, 0.05137238311767578, 0.05483980941772461, 0.05174204635620117, 0.0516591682434082, 0.051133697509765624, 0.051208961486816404, 0.051089408874511716, 0.05116038513183594, 0.0510667839050293, 0.05123126220703125, 0.051119808197021485, 0.05159164810180664, 0.051027294158935546, 0.05122943878173828, 0.05109932708740234, 0.05105696105957031, 0.05123641586303711, 0.05104275131225586, 0.051378177642822265, 0.051714046478271485, 0.05184921646118164, 0.05154611206054688, 0.05199257659912109, 0.05120409774780273, 0.051236862182617186, 0.05148262405395508, 0.051111934661865234, 0.05128915023803711, 0.05119388961791992, 0.05142211151123047, 0.05113446426391602, 0.051736030578613285, 0.051128257751464845, 0.051197566986083985, 0.05106991958618164, 0.05127519989013672, 0.05120003128051758, 0.05126825714111328, 0.05130841445922851, 0.05138431930541992, 0.052280670166015626, 0.05148944091796875, 0.05168742370605469, 0.05124025726318359, 0.05147872161865234, 0.05121279907226563, 0.05120614242553711, 0.051326976776123044, 0.05122662353515625, 0.05127718353271484, 0.05129836654663086, 0.05178188705444336, 0.05162140655517578, 0.05183977508544922, 0.051644256591796875, 0.05181455993652344, 0.05381324768066406, 0.05163727951049805, 0.05166524887084961, 0.051554943084716795, 0.05210726547241211, 0.05161577606201172, 0.05184918212890625, 0.051269279479980466, 0.05504940795898437, 0.051921249389648434, 0.051292095184326175, 0.05118182373046875, 0.05134569549560547, 0.05118726348876953, 0.051159454345703126, 0.051334590911865235, 0.051278430938720705, 0.051361793518066405, 0.05138227081298828, 0.05181545639038086, 0.0512132797241211, 0.051435134887695313, 0.05117580795288086, 0.05113446426391602, 0.0513966064453125, 0.05118668746948242, 0.05151359939575195, 0.05149568176269531, 0.053065727233886716, 0.05139836883544922, 0.05152163314819336, 0.051251392364501956, 0.051258846282958986, 0.05122921752929688, 0.051584896087646485, 0.05147843170166016, 0.051496158599853514, 0.05161471939086914, 0.051246177673339846, 0.051499073028564456, 0.05159968185424805, 0.05171254348754883, 0.05118975830078125, 0.05140054321289062, 0.05120425415039063, 0.05136588668823242, 0.05138022232055664, 0.05130022430419922, 0.052281471252441404, 0.051351551055908204, 0.05143142318725586, 0.0511280632019043, 0.05122870254516602, 0.05114287948608399, 0.051589279174804686, 0.05131657409667969, 0.05116249465942383, 0.05116972732543945, 0.051198143005371094, 0.05162179183959961, 0.05135164642333984, 0.051705856323242184, 0.05120355224609375, 0.051321502685546874, 0.05131660842895508, 0.05132467269897461, 0.05134771347045899, 0.05138140869140625, 0.05143228912353515, 0.051219520568847654, 0.05153273773193359, 0.05532966232299805, 0.051970081329345705, 0.05126755142211914, 0.05139046478271484, 0.051156993865966796, 0.051224414825439456, 0.051154335021972655, 0.05160217666625976, 0.05125107192993164, 0.051148929595947266, 0.05125734329223633, 0.05117536163330078, 0.05130169677734375, 0.05169375991821289, 0.051436000823974606, 0.05124028778076172, 0.05166976165771484, 0.051236862182617186, 0.051629631042480466, 0.05152608108520508, 0.051681278228759765, 0.05135283279418945, 0.05126838302612305, 0.05197427368164063, 0.05121007919311524, 0.05137203216552735, 0.05115084838867188, 0.05149398422241211, 0.05123107147216797, 0.0512743034362793, 0.051164447784423826, 0.05123337554931641, 0.05111171340942383, 0.05113056182861328, 0.0512718391418457, 0.05134646224975586, 0.052179134368896485, 0.05165340805053711, 0.05167513656616211, 0.051418495178222653, 0.05143318557739258, 0.05133356857299805, 0.051302879333496094, 0.05129011154174805, 0.05138431930541992, 0.05149462509155273, 0.0511748161315918, 0.05129305648803711, 0.051391841888427735, 0.05131894302368164, 0.05122057723999023, 0.051359294891357425, 0.05120483016967774, 0.05146841430664063, 0.051394561767578124, 0.05148700714111328, 0.05158992004394531, 0.05149792098999023, 0.05159526443481445, 0.051388416290283206, 0.051369983673095705, 0.05139014434814453, 0.05136620712280274, 0.05488188934326172, 0.05169740676879883, 0.05136041641235352, 0.05155846405029297, 0.051463550567626956, 0.05115347290039062, 0.051423423767089846, 0.051439422607421875, 0.051214336395263675, 0.05107519912719727, 0.051171199798583984, 0.051160511016845704, 0.05112854385375976, 0.051212638854980466, 0.051248958587646484, 0.05135526275634766, 0.05128441619873047, 0.051611358642578126, 0.05160182571411133, 0.05193734359741211, 0.05181024169921875, 0.05143756866455078, 0.051223712921142577, 0.05112710571289063, 0.05124240112304688, 0.051153537750244144, 0.051393985748291016, 0.051182144165039065, 0.05124095916748047, 0.051130367279052735, 0.05127347183227539, 0.05112390518188477, 0.0512927360534668, 0.051171329498291014, 0.051187713623046874, 0.05120937728881836, 0.05128073501586914, 0.051461761474609374, 0.05140028762817383, 0.05169359970092773, 0.05138508987426758, 0.05135760116577148, 0.05134700775146484, 0.051214336395263675, 0.051173919677734374, 0.05144780731201172, 0.051451904296875, 0.0512204475402832, 0.05114064025878906, 0.05114271926879883, 0.05151478576660156, 0.05129244613647461, 0.05132835388183594, 0.051092384338378906, 0.051305503845214845, 0.05155939102172852, 0.051507198333740234, 0.05126553726196289, 0.05127372741699219, 0.051324512481689455, 0.0513458251953125, 0.05163772964477539, 0.05132342529296875, 0.05529977416992188, 0.05210284805297852, 0.05237001419067383, 0.051170974731445315, 0.051077152252197264, 0.05129983901977539, 0.05113651275634765, 0.051300735473632814, 0.05113695907592773, 0.05117849731445313, 0.05107814407348633, 0.05135564804077149, 0.0510750732421875, 0.051501056671142575, 0.051173343658447265, 0.05149494552612305, 0.051144126892089845, 0.05128249740600586, 0.05184102249145508, 0.0520513916015625, 0.0519931526184082, 0.051485759735107425, 0.05141190338134766, 0.051305503845214845, 0.05125321578979492, 0.051637248992919924, 0.05149248123168945, 0.051112319946289064, 0.05107097625732422, 0.05134745788574219, 0.05122252655029297, 0.05145529556274414, 0.0514727668762207, 0.051324993133544924, 0.051129631042480465, 0.05138438415527344, 0.05126646423339844, 0.05135769653320312, 0.051410945892333984, 0.05147878265380859, 0.0513504638671875, 0.05138310241699219, 0.05152108764648437, 0.05152384185791015, 0.05144412612915039, 0.051416862487792966, 0.05134115219116211, 0.051159198760986326, 0.051535041809082034, 0.05152646255493164, 0.051122177124023435, 0.05112198257446289, 0.05102201461791992, 0.05149423980712891, 0.05137392044067383, 0.051366561889648436, 0.051250526428222656, 0.051665409088134766, 0.051406654357910156, 0.05133488082885742, 0.051346206665039064, 0.051253246307373046, 0.051257217407226566, 0.05491747283935547, 0.0517501106262207, 0.051200191497802736, 0.05116988754272461, 0.05109964752197266, 0.05125356674194336, 0.05131203079223633, 0.05119823837280273, 0.05118492889404297, 0.05110160064697265, 0.05127654266357422, 0.05118899154663086, 0.051311424255371094, 0.05155750274658203, 0.05137510299682617, 0.05118143844604492, 0.0514252815246582, 0.05122662353515625, 0.052543552398681644, 0.05149456024169922, 0.051936542510986325, 0.05143769454956055, 0.05127980804443359, 0.05144057464599609, 0.051294208526611325, 0.05162803268432617, 0.05123072052001953, 0.05130854415893555, 0.05114265441894531, 0.05129011154174805, 0.05124505615234375, 0.051309921264648437, 0.051105758666992185, 0.05124576187133789, 0.05108060836791992, 0.05123952102661133, 0.05160140609741211, 0.05153164672851562, 0.05166204833984375, 0.05142819213867188, 0.05152364730834961, 0.051320831298828126, 0.05138431930541992, 0.05131846237182617, 0.05144521713256836, 0.051251232147216795, 0.051381057739257815, 0.051633697509765625, 0.05125350570678711, 0.05134950256347656, 0.05119772720336914, 0.05115539169311523, 0.05102796936035156, 0.05125734329223633, 0.05164988708496094, 0.051490463256835935, 0.05149798583984375, 0.05125734329223633, 0.051816417694091794, 0.05149311828613281, 0.051699104309082033, 0.05134783935546875, 0.05142118453979492, 0.05520105743408203, 0.05172067260742187, 0.051159423828125, 0.0510807991027832, 0.05134719848632813, 0.05122524642944336, 0.05130188751220703, 0.05112473678588867, 0.051259326934814456, 0.05133679962158203, 0.0512393913269043, 0.05128192138671875, 0.05120614242553711, 0.051138561248779295, 0.05139865493774414, 0.05135078430175781, 0.05119161605834961, 0.05197715377807617, 0.05158457565307617, 0.051910945892333984, 0.05158313751220703, 0.0514334716796875, 0.0515722541809082, 0.051276031494140624, 0.05114492797851562, 0.05114028930664063, 0.05138832092285156, 0.05113471984863281, 0.05154431915283203, 0.05125923156738281, 0.051517440795898435, 0.05116668701171875, 0.05119855880737305, 0.05119180679321289, 0.051340606689453124, 0.051069087982177734, 0.05120214462280273, 0.051165630340576175, 0.05142902374267578, 0.05165296173095703, 0.051525760650634765, 0.05151935958862305, 0.05168537521362305, 0.05184511947631836, 0.05117542266845703, 0.05118566513061523, 0.05125235366821289, 0.051253856658935545, 0.05133747100830078, 0.05110787200927734, 0.051408447265625, 0.05121068954467774, 0.05138022232055664, 0.05117337417602539, 0.05124870300292969, 0.05110393524169922, 0.05133132934570313, 0.05133935928344727, 0.051322017669677734, 0.05137206268310547, 0.05131951904296875, 0.051321887969970705, 0.05127881622314453]",tokens/s,19.455658659588657,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 790, in __init__ self.model = RecurrentGemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 660, in __init__ [RecurrentGemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 660, in [RecurrentGemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 490, in __init__ self.mlp_block = RecurrentGemmaMlp(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 472, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 452707 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1005.273088,7163.74016,0.0,6761.218048,6730.975744,s,1,7.4978857421875,7.4978857421875,0.0,7.4978857421875,7.4978857421875,7.4978857421875,7.4978857421875,[7.4978857421875],,kWh,7.481095095772616e-06,8.033981297668026e-07,4.11250329002133e-06,1.2396996515560748e-05,,MB,1378.177024,7197.294592,0.0,6784.28672,5879.090688,s,10,5.866066101074218,0.5866066101074219,0.0047778593045656416,0.58755078125,0.5920167358398437,0.5930582946777344,0.5938915417480469,"[0.578309326171875, 0.5869566650390625, 0.5798045043945312, 0.5858963012695313, 0.5890794067382813, 0.5917852783203125, 0.5891841430664062, 0.5828057250976563, 0.594099853515625, 0.5881448974609375]",tokens/s,436.4083111049843,kWh,1.7097813718287063e-05,1.8856021961237402e-06,1.1168542885446145e-05,3.015195879985695e-05,tokens/kWh,8490327.334926398,MB,1415.446528,7199.391744,0.0,6784.28672,5879.093248,s,10,23.744656250000006,2.3744656250000005,0.006573245566875472,2.376098388671875,2.38122841796875,2.3812510498046877,2.3812691552734377,"[2.360286376953125, 2.378990966796875, 2.37007666015625, 2.375188232421875, 2.381273681640625, 2.381223388671875, 2.377008544921875, 2.372578369140625, 2.380659912109375, 2.3673701171875]",tokens/s,26.532285553723266,kWh,6.931746024420623e-05,7.645803004321801e-06,4.5940836135351566e-05,0.0001229040993838796,tokens/kWh,512594.78175113845,,s,630,23.74291019439697,0.03768715903872535,0.0004946232225286581,0.0376110725402832,0.038221816253662104,0.0385123176574707,0.03950408603668213,"[0.03860771179199219, 0.037502559661865234, 0.037644126892089846, 0.03783123016357422, 0.03768259048461914, 0.03839430236816406, 0.03766105651855469, 0.037692928314208986, 0.03762435150146484, 0.0375700798034668, 0.038271392822265625, 0.03781612777709961, 0.03806208038330078, 0.03783654403686523, 0.03757078552246094, 0.03760540771484375, 0.03809628677368164, 0.0377083854675293, 0.038043647766113284, 0.03761875152587891, 0.03744812774658203, 0.037499393463134766, 0.03749039840698242, 0.03750940704345703, 0.03775692749023438, 0.037795841217041014, 0.037935104370117184, 0.03749273681640625, 0.037424224853515625, 0.03726956939697266, 0.037104480743408205, 0.037400577545166014, 0.03792486572265625, 0.037271873474121094, 0.0371135368347168, 0.037384193420410154, 0.037029888153076174, 0.03726515197753906, 0.03739846420288086, 0.03703020858764648, 0.037171199798583986, 0.03706060791015625, 0.03691110229492187, 0.037054145812988284, 0.037089599609375, 0.03720140838623047, 0.037095935821533206, 0.037212158203125, 0.037162879943847656, 0.03705036926269531, 0.03703180694580078, 0.03714278411865234, 0.0374365119934082, 0.037351680755615235, 0.037325439453125, 0.03718262481689453, 0.03736172866821289, 0.037055038452148435, 0.037074302673339846, 0.03732729721069336, 0.03715238571166992, 0.03705120086669922, 0.03724044799804688, 0.03859999847412109, 0.03738489532470703, 0.037169151306152344, 0.037083072662353514, 0.038082942962646485, 0.03841737747192383, 0.03784163284301758, 0.03766873550415039, 0.03754611206054687, 0.03752140808105469, 0.037822463989257815, 0.037310462951660156, 0.03696844863891602, 0.03722409439086914, 0.03751356887817383, 0.0376110725402832, 0.03806588745117188, 0.037714656829833985, 0.03761321640014648, 0.03775727844238281, 0.0380682258605957, 0.03795510482788086, 0.03790860748291015, 0.03793686294555664, 0.03766320037841797, 0.03749504089355469, 0.037408672332763675, 0.03749683380126953, 0.0374703369140625, 0.037937023162841796, 0.03763814544677734, 0.03796688079833985, 0.037501502990722656, 0.037595550537109376, 0.03823769760131836, 0.037660736083984375, 0.038656448364257814, 0.037852256774902344, 0.037388702392578126, 0.037388446807861325, 0.037276222229003904, 0.03767184066772461, 0.03949356842041016, 0.03770028686523438, 0.03745027160644531, 0.0374678726196289, 0.03748236846923828, 0.03874006271362305, 0.03723891067504883, 0.03741068649291992, 0.03750096130371094, 0.0400750732421875, 0.03870671844482422, 0.03759580612182617, 0.037535137176513675, 0.037380992889404295, 0.03712905502319336, 0.03740758514404297, 0.0373265266418457, 0.03738044738769531, 0.03844492721557617, 0.038338687896728514, 0.037855232238769534, 0.03867155075073242, 0.03849440002441406, 0.03776761627197266, 0.038441150665283204, 0.037550079345703126, 0.03757670211791992, 0.039495681762695314, 0.03812704086303711, 0.03769769668579102, 0.03717161560058594, 0.03738201522827148, 0.03797148895263672, 0.03755887985229492, 0.037473857879638674, 0.03785772705078125, 0.03779769515991211, 0.03806636810302735, 0.03758700942993164, 0.03779715347290039, 0.037634719848632814, 0.037425151824951174, 0.03778566360473633, 0.037518592834472654, 0.03780239868164063, 0.0378309440612793, 0.03791619110107422, 0.037323040008544923, 0.03747180938720703, 0.03772902297973633, 0.038217601776123045, 0.0377874870300293, 0.03782672119140625, 0.03824367904663086, 0.037478496551513675, 0.037402591705322265, 0.037335647583007815, 0.037427200317382815, 0.037746654510498044, 0.037679134368896486, 0.03740671920776367, 0.037418304443359376, 0.037449726104736326, 0.03727635192871094, 0.03874364852905274, 0.037043903350830076, 0.03751206588745117, 0.037232318878173826, 0.03765161514282227, 0.03729334259033203, 0.037205726623535156, 0.03699507141113281, 0.03721980667114258, 0.036978591918945314, 0.037053054809570315, 0.03700646209716797, 0.03691609573364258, 0.03695942306518555, 0.037665599822998046, 0.03756012725830078, 0.037789886474609374, 0.037085182189941404, 0.03712992095947266, 0.03724476623535156, 0.03903603363037109, 0.03796640014648438, 0.03817708969116211, 0.037701438903808594, 0.03746835327148437, 0.03750092697143555, 0.03747427368164063, 0.038066207885742186, 0.037369857788085936, 0.037215232849121094, 0.03753062438964844, 0.037523712158203125, 0.037994400024414066, 0.03811625671386719, 0.03775564956665039, 0.037742176055908204, 0.037802623748779296, 0.03776713562011719, 0.03771539306640625, 0.03771859359741211, 0.03793305587768555, 0.037639392852783206, 0.03781302261352539, 0.0378994255065918, 0.03739100646972656, 0.037429439544677735, 0.03810713577270508, 0.03816233444213867, 0.03786966323852539, 0.03809920120239258, 0.0377341423034668, 0.03781324768066406, 0.03765760040283203, 0.03826393508911133, 0.037327518463134764, 0.03722671890258789, 0.03713008117675781, 0.03713468933105469, 0.03714438247680664, 0.03832831954956055, 0.036982784271240236, 0.03733913421630859, 0.03739971160888672, 0.03790313720703125, 0.037935169219970706, 0.037931198120117186, 0.037588382720947264, 0.037464481353759765, 0.037279361724853514, 0.03744527816772461, 0.03764876937866211, 0.03740707015991211, 0.037781505584716796, 0.037351425170898435, 0.0373551025390625, 0.03739279937744141, 0.037617664337158206, 0.03787980651855469, 0.0378919677734375, 0.037950782775878905, 0.037989185333251956, 0.03771337509155273, 0.038020896911621097, 0.03893657684326172, 0.03790563201904297, 0.037994369506835934, 0.03782675170898438, 0.03786620712280273, 0.03761078262329102, 0.03754035186767578, 0.03791209411621094, 0.03785798263549805, 0.03794124984741211, 0.03902054214477539, 0.03765651321411133, 0.0373351058959961, 0.037369857788085936, 0.03719168090820312, 0.037222400665283206, 0.03759513473510742, 0.037838848114013675, 0.037466110229492186, 0.03944243240356445, 0.03766796875, 0.03786409759521484, 0.03848563385009766, 0.037929569244384766, 0.038086048126220705, 0.037835617065429684, 0.03773209762573242, 0.037793342590332034, 0.03764064025878906, 0.037556224822998044, 0.03748659133911133, 0.03754598236083984, 0.03740262222290039, 0.03752755355834961, 0.037564449310302735, 0.037412830352783207, 0.03807564926147461, 0.03767737579345703, 0.03802767944335937, 0.03828675079345703, 0.038075008392333985, 0.037624992370605466, 0.0374791374206543, 0.037945472717285156, 0.037688385009765624, 0.037669761657714844, 0.03759110260009765, 0.037477760314941405, 0.037464702606201175, 0.0373493766784668, 0.03722012710571289, 0.03755782318115235, 0.037771263122558595, 0.038013118743896485, 0.03838614273071289, 0.0378081283569336, 0.03769753646850586, 0.03862527847290039, 0.0376484489440918, 0.037775295257568356, 0.037926910400390625, 0.03765657424926758, 0.03750297546386719, 0.039507518768310546, 0.03796627044677734, 0.03764355087280274, 0.037821247100830076, 0.03736304092407226, 0.03759097671508789, 0.037593727111816404, 0.037566463470458986, 0.037915870666503905, 0.03779199981689453, 0.03822022247314453, 0.039069793701171876, 0.040210113525390626, 0.038373600006103514, 0.03808879852294922, 0.038311649322509765, 0.0383177604675293, 0.03813846588134766, 0.03748454284667969, 0.03812966537475586, 0.03794147109985352, 0.03778742218017578, 0.038284446716308596, 0.0383612174987793, 0.03790921783447266, 0.03835084915161133, 0.03886284637451172, 0.03782451248168945, 0.03800064086914062, 0.03781391906738281, 0.03767660903930664, 0.037983009338378906, 0.0376258544921875, 0.03745382308959961, 0.03753062438964844, 0.03716716766357422, 0.039865055084228516, 0.03746563339233398, 0.037303009033203126, 0.03732191848754883, 0.037237663269042966, 0.03753340911865234, 0.03758816146850586, 0.03761222457885742, 0.037151008605957034, 0.03710540771484375, 0.03708870315551758, 0.0371860466003418, 0.03725958251953125, 0.03776921463012695, 0.03751728057861328, 0.03723040008544922, 0.03744585418701172, 0.037187583923339845, 0.03709132766723633, 0.03768112182617187, 0.037705951690673825, 0.03738604736328125, 0.03748374557495117, 0.03727417755126953, 0.03718479919433594, 0.03731081771850586, 0.03738889694213867, 0.038635646820068356, 0.03768979263305664, 0.038107391357421874, 0.037760032653808596, 0.03829948806762695, 0.038288257598876954, 0.03827443313598633, 0.03784540939331055, 0.03761174392700195, 0.037416961669921874, 0.03746815872192383, 0.03758428955078125, 0.0374354248046875, 0.03806470489501953, 0.037548030853271484, 0.037486785888671874, 0.03752444839477539, 0.03780694580078125, 0.03773235321044922, 0.037746688842773435, 0.03823616027832031, 0.0380579833984375, 0.037996543884277346, 0.037891681671142576, 0.03752719879150391, 0.037231201171875, 0.03714384078979492, 0.03742585754394531, 0.037316287994384766, 0.037580318450927734, 0.038009822845458986, 0.03783475112915039, 0.03819033432006836, 0.0376673583984375, 0.03744585418701172, 0.037894142150878905, 0.037466110229492186, 0.03813785552978516, 0.03740262222290039, 0.03726131057739258, 0.03737395095825195, 0.03768060684204102, 0.03769152069091797, 0.037429664611816404, 0.037203231811523435, 0.037163455963134764, 0.03717763137817383, 0.03768320083618164, 0.037658462524414064, 0.03740646362304687, 0.037876129150390625, 0.03743334579467773, 0.03729817581176758, 0.03769548797607422, 0.037564414978027344, 0.041463935852050784, 0.037885822296142575, 0.037482494354248046, 0.03732275390625, 0.037703678131103514, 0.037591041564941405, 0.03755567932128906, 0.03745846557617188, 0.03869651031494141, 0.037904830932617185, 0.03769286346435547, 0.0383125114440918, 0.0385269775390625, 0.03811532974243164, 0.03786880111694336, 0.037666561126708985, 0.037803009033203126, 0.03790848159790039, 0.03799977493286133, 0.03770044708251953, 0.0373125114440918, 0.03732851028442383, 0.03704051208496094, 0.03728806304931641, 0.037865345001220706, 0.03787161636352539, 0.03787180709838867, 0.03803104019165039, 0.03749929428100586, 0.03882771301269531, 0.03781635284423828, 0.038005889892578124, 0.037718910217285156, 0.03750896072387695, 0.03741036987304688, 0.037294689178466796, 0.03730947113037109, 0.03729097747802734, 0.03741900634765625, 0.03749273681640625, 0.037748737335205076, 0.03786137771606445, 0.03773583984375, 0.037666656494140624, 0.038171390533447265, 0.03778271865844727, 0.037665119171142576, 0.03778384017944336, 0.037568702697753906, 0.03736576080322265, 0.03738784027099609, 0.03751913452148437, 0.03745859146118164, 0.03751119995117187, 0.037560287475585936, 0.03724016189575195, 0.038531742095947265, 0.03762790298461914, 0.0376495361328125, 0.037741439819335934, 0.03751721572875977, 0.03714217758178711, 0.03720806503295898, 0.03709996795654297, 0.03717907333374024, 0.037628223419189456, 0.037614879608154295, 0.03743612670898438, 0.03723433685302734, 0.03709942245483398, 0.037280193328857424, 0.03827040100097656, 0.03745849609375, 0.037392383575439454, 0.037262367248535155, 0.03799353790283203, 0.03759856033325195, 0.037752479553222654, 0.03781516647338867, 0.037596511840820315, 0.03741561508178711, 0.037504959106445315, 0.038279232025146485, 0.03764633560180664, 0.03866787338256836, 0.0390412483215332, 0.03758451080322266, 0.03840671920776367, 0.0373125114440918, 0.037384193420410154, 0.037367774963378904, 0.03713846588134766, 0.03791452789306641, 0.03999123382568359, 0.03776489639282227, 0.03804345703125, 0.03751993560791016, 0.037922080993652345, 0.03807823944091797, 0.037929920196533205, 0.0376110725402832, 0.0375577278137207, 0.03756345748901367, 0.03889289474487305, 0.037510879516601564, 0.03764860916137695, 0.03753433609008789, 0.037400577545166014, 0.03832169723510742, 0.03753609466552734, 0.03733283233642578, 0.03848220825195312, 0.04050739288330078, 0.037692928314208986, 0.037875614166259765, 0.03790703964233398, 0.03792281723022461, 0.037394432067871096, 0.037233856201171874, 0.037754878997802735, 0.03746419143676758, 0.03773846435546875, 0.037595329284667967, 0.03748006439208985, 0.03736054229736328, 0.03756032180786133, 0.03751913452148437, 0.03751692962646484, 0.037442142486572266, 0.03765043258666992, 0.037531646728515625, 0.03724492645263672, 0.03731209564208984, 0.0373474235534668, 0.03918438339233398, 0.037868991851806644, 0.03801475143432617, 0.037991199493408206, 0.037693248748779294, 0.03774041748046875, 0.03788374328613281, 0.037757152557373046, 0.038045631408691404, 0.03769343948364258, 0.03777977752685547, 0.03791436767578125, 0.03799631881713867, 0.03823798370361328, 0.03751187133789063, 0.03758694458007812, 0.03781017684936523, 0.038176769256591796, 0.03783475112915039, 0.037713920593261716, 0.03791606521606445, 0.03780249786376953, 0.03764847946166992, 0.03783270263671875, 0.03743334579467773, 0.037607425689697264, 0.037582847595214845, 0.03764019012451172, 0.03740467071533203, 0.037328254699707034, 0.03744134521484375, 0.03752246475219727, 0.03766886520385742, 0.037465057373046874, 0.03766969680786133, 0.03778966522216797, 0.03747817611694336, 0.03728595352172852, 0.03710585784912109, 0.037807968139648436, 0.037183647155761716, 0.03713350296020508, 0.03724272155761719, 0.03729647827148438, 0.03736159896850586, 0.0372374382019043, 0.03731657409667969, 0.037246463775634765, 0.03711564636230469, 0.03745052719116211, 0.03717529678344727, 0.03688236618041992, 0.03727951812744141, 0.03686019134521484, 0.03701145553588867, 0.03706179046630859, 0.03680307388305664, 0.03719935989379883, 0.03766153717041015, 0.03811695861816406, 0.037504512786865236, 0.038007713317871096, 0.037193984985351564]",tokens/s,26.534236740223704,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1001, in __init__ self.model = GemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 780, in __init__ [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 780, in [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 571, in __init__ self.mlp = GemmaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 167, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 441104 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1074, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 888, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 610, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 447, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,939.04896,6540.886016,0.0,6138.363904,6060.931072,s,1,7.31232373046875,7.31232373046875,0.0,7.31232373046875,7.31232373046875,7.31232373046875,7.31232373046875,[7.31232373046875],,kWh,5.5944180250359445e-06,5.954578400483085e-07,2.8150022519934925e-06,9.004878117077745e-06,,MB,1265.47968,6563.954688,0.0,6150.946816,5419.87328,s,10,4.640802795410155,0.46408027954101555,0.004444838681579343,0.4644615936279297,0.46968894958496094,0.470020393371582,0.4702855484008789,"[0.4591753234863281, 0.4612737731933594, 0.4653211669921875, 0.4556710205078125, 0.46961529541015623, 0.4618258972167969, 0.46777105712890626, 0.46619540405273435, 0.4703518371582031, 0.46360202026367187]",tokens/s,551.628697201245,kWh,1.3562558553410258e-05,1.495707749096132e-06,9.004312759001103e-06,2.4062579061507496e-05,tokens/kWh,10638926.082928447,MB,1313.681408,6563.954688,0.0,6150.946816,5419.87584,s,10,15.588853515625,1.5588853515625,0.005305374555463592,1.5577423095703125,1.5665158447265624,1.5689876586914062,1.5709651098632813,"[1.5583944091796875, 1.5581160888671874, 1.565966552734375, 1.55382421875, 1.5574808349609375, 1.5559781494140625, 1.55591796875, 1.57145947265625, 1.5580037841796874, 1.5537120361328125]",tokens/s,40.41349156104002,kWh,4.549760379658823e-05,5.017608216811111e-06,3.0051912930397824e-05,8.056712494379717e-05,tokens/kWh,781956.6609078848,,s,630,15.586733560562154,0.0247408469215272,0.0003756853537589789,0.024651375770568848,0.024912428855895993,0.025181228160858153,0.026311815185546875,"[0.02633763122558594, 0.025167327880859375, 0.024865312576293944, 0.024743423461914063, 0.024678815841674806, 0.025016096115112303, 0.02489187240600586, 0.02495471954345703, 0.02466815948486328, 0.024630495071411133, 0.024559711456298827, 0.024502880096435548, 0.024652095794677736, 0.024659296035766602, 0.024797439575195312, 0.02480352020263672, 0.02485862350463867, 0.02469478416442871, 0.024592607498168946, 0.024578943252563476, 0.024533504486083983, 0.024664575576782227, 0.02482371139526367, 0.0246824951171875, 0.024934463500976563, 0.024711103439331056, 0.02474188804626465, 0.024682111740112304, 0.024717695236206056, 0.024936447143554686, 0.02473526382446289, 0.024664384841918945, 0.024715423583984375, 0.024672256469726563, 0.024680448532104493, 0.024727296829223634, 0.024727807998657227, 0.024628768920898436, 0.02461075210571289, 0.02476291275024414, 0.024762367248535155, 0.024776159286499025, 0.024717472076416017, 0.024744319915771484, 0.024710912704467774, 0.024666368484497072, 0.024644704818725587, 0.024587167739868163, 0.0245534725189209, 0.024577695846557616, 0.024592735290527343, 0.024557567596435546, 0.024627199172973634, 0.02462505531311035, 0.024708255767822266, 0.02470364761352539, 0.024529184341430664, 0.024772607803344726, 0.024653823852539062, 0.024614912033081054, 0.02459257507324219, 0.024827711105346678, 0.024647680282592774, 0.02627168083190918, 0.025174047470092772, 0.02478489685058594, 0.02472326469421387, 0.02458153533935547, 0.02454159927368164, 0.024516992568969727, 0.024473600387573242, 0.02457804870605469, 0.024838016510009765, 0.024750207901000975, 0.02468659210205078, 0.024647743225097656, 0.024645408630371093, 0.024650976181030272, 0.02454547119140625, 0.02452956771850586, 0.024590272903442383, 0.024654272079467774, 0.02456291198730469, 0.024533504486083983, 0.024641536712646486, 0.024626880645751952, 0.02460089683532715, 0.024543231964111328, 0.024510496139526366, 0.02576918411254883, 0.024605440139770507, 0.024555200576782225, 0.0247524471282959, 0.024657888412475584, 0.02468412780761719, 0.024717920303344725, 0.025587551116943358, 0.024772607803344726, 0.02499344062805176, 0.024573312759399415, 0.024679231643676757, 0.024723615646362305, 0.024796735763549804, 0.024881120681762695, 0.02481551933288574, 0.024764991760253905, 0.024823808670043947, 0.024726816177368164, 0.02464851188659668, 0.02462201690673828, 0.024606943130493164, 0.02461712074279785, 0.024764799118041993, 0.024696800231933595, 0.0246561279296875, 0.024641536712646486, 0.02467430305480957, 0.024642847061157228, 0.024664640426635742, 0.02480143928527832, 0.024634592056274413, 0.024691423416137694, 0.024608543395996094, 0.024690303802490234, 0.024586912155151366, 0.024580095291137697, 0.026320928573608397, 0.025194368362426757, 0.024868255615234376, 0.024689695358276368, 0.024578880310058594, 0.02450934410095215, 0.024473215103149416, 0.024547744750976562, 0.024559072494506836, 0.024656383514404297, 0.02451251220703125, 0.024511680603027344, 0.024556352615356446, 0.024567808151245117, 0.029542207717895508, 0.024774848937988283, 0.024549375534057616, 0.024475648880004884, 0.024468639373779296, 0.024488000869750975, 0.024591136932373046, 0.02464358329772949, 0.024763999938964845, 0.024633823394775392, 0.024653120040893553, 0.024530912399291994, 0.024520832061767577, 0.02556982421875, 0.0247390079498291, 0.02468947219848633, 0.02465987205505371, 0.024692832946777345, 0.02470911979675293, 0.024559167861938475, 0.024588640213012696, 0.024590015411376953, 0.0247238712310791, 0.024612735748291016, 0.024723583221435547, 0.02480086326599121, 0.024699296951293945, 0.024664064407348633, 0.02464358329772949, 0.024823808670043947, 0.024844287872314453, 0.024744096755981444, 0.024678239822387694, 0.024718496322631837, 0.024656736373901367, 0.02468627166748047, 0.024629568099975584, 0.0249487361907959, 0.024866687774658204, 0.02484646415710449, 0.02522480010986328, 0.026617471694946288, 0.025686176300048828, 0.02495756721496582, 0.025029855728149412, 0.02488979148864746, 0.02470742416381836, 0.024661823272705077, 0.024669376373291016, 0.026220640182495116, 0.025118816375732423, 0.024869888305664063, 0.024701887130737305, 0.024630495071411133, 0.024777503967285158, 0.02456707191467285, 0.02453696060180664, 0.024539680480957032, 0.024585920333862303, 0.02447135925292969, 0.024488767623901366, 0.024928255081176756, 0.024910848617553712, 0.02488217544555664, 0.024592384338378907, 0.02462822341918945, 0.02458243179321289, 0.024610559463500978, 0.02456060791015625, 0.024519775390625, 0.024605663299560546, 0.0245732479095459, 0.02462374305725098, 0.0246824951171875, 0.024528160095214843, 0.024559968948364257, 0.02459004783630371, 0.024555391311645507, 0.024511072158813478, 0.024610464096069335, 0.02457423973083496, 0.02462544059753418, 0.024573471069335936, 0.024533023834228517, 0.02455388832092285, 0.024573951721191405, 0.02464899253845215, 0.024698816299438476, 0.02478499221801758, 0.024828607559204102, 0.024815616607666017, 0.02471116828918457, 0.024637439727783202, 0.024694496154785157, 0.024774560928344725, 0.024639871597290038, 0.024559616088867187, 0.02449612808227539, 0.024528064727783204, 0.024574304580688478, 0.024601055145263673, 0.024585311889648437, 0.024678848266601563, 0.024655712127685546, 0.024596895217895508, 0.024563936233520507, 0.024567808151245117, 0.024604223251342774, 0.024652223587036132, 0.024591903686523437, 0.02457206344604492, 0.024561567306518553, 0.026282047271728514, 0.025217023849487305, 0.024867008209228516, 0.024719167709350585, 0.02456166458129883, 0.02453708839416504, 0.024469215393066405, 0.024591840744018555, 0.024564544677734376, 0.024641536712646486, 0.024600479125976564, 0.024706464767456054, 0.02460851287841797, 0.024912832260131836, 0.02477670478820801, 0.0247459831237793, 0.024723455429077147, 0.024663679122924803, 0.02465420722961426, 0.024678239822387694, 0.024651935577392578, 0.024616159439086915, 0.02456451225280762, 0.02452070426940918, 0.02449171257019043, 0.024645952224731444, 0.024694688796997072, 0.02463961601257324, 0.024632320404052735, 0.02470742416381836, 0.02474611282348633, 0.02459699249267578, 0.024845727920532225, 0.02490015983581543, 0.02469856071472168, 0.024770559310913084, 0.024668512344360353, 0.02467036819458008, 0.024697887420654298, 0.0248656005859375, 0.024801279067993166, 0.024778751373291014, 0.024795135498046874, 0.024696832656860353, 0.02465996742248535, 0.024616512298583984, 0.024637407302856445, 0.024648160934448243, 0.024649152755737303, 0.024640064239501953, 0.02460982322692871, 0.02457699203491211, 0.02474166488647461, 0.024723648071289062, 0.024636831283569336, 0.024619680404663086, 0.02492207908630371, 0.025324832916259764, 0.02462998390197754, 0.0245980167388916, 0.02457382392883301, 0.02470262336730957, 0.02455206489562988, 0.026227807998657225, 0.025069536209106444, 0.024748992919921876, 0.02467635154724121, 0.024583551406860353, 0.02453536033630371, 0.024457536697387695, 0.024525856018066405, 0.024538080215454103, 0.024571104049682616, 0.02451241683959961, 0.02452499198913574, 0.024566463470458984, 0.024616960525512696, 0.024551424026489257, 0.024551424026489257, 0.024559616088867187, 0.024610815048217775, 0.024608768463134766, 0.024576000213623047, 0.024453119277954103, 0.024573856353759766, 0.024630720138549805, 0.024574623107910157, 0.02465177536010742, 0.024555519104003908, 0.024577856063842773, 0.024559808731079102, 0.024558944702148436, 0.024508127212524412, 0.024564672470092773, 0.02454528045654297, 0.02458153533935547, 0.02456563186645508, 0.024480703353881837, 0.024624927520751953, 0.024623104095458984, 0.024647680282592774, 0.024811487197875976, 0.024991775512695314, 0.025620512008666992, 0.024993759155273437, 0.02505523109436035, 0.024854528427124024, 0.024823808670043947, 0.024705024719238283, 0.02470681571960449, 0.024614240646362303, 0.02464352035522461, 0.02464188766479492, 0.024606815338134767, 0.024601119995117188, 0.024611871719360353, 0.02492233657836914, 0.024638208389282226, 0.024631296157836914, 0.024688640594482423, 0.024622495651245118, 0.024736352920532226, 0.024754175186157225, 0.024866912841796877, 0.0248536319732666, 0.024839168548583986, 0.026324960708618166, 0.025124319076538087, 0.02479705619812012, 0.024723552703857423, 0.024672832489013672, 0.024604671478271483, 0.024678272247314455, 0.02468876838684082, 0.024649728775024415, 0.02467020797729492, 0.024606719970703125, 0.02451046371459961, 0.024639488220214844, 0.024571903228759767, 0.024587455749511718, 0.02450924873352051, 0.024549375534057616, 0.024575872421264647, 0.024522880554199218, 0.024532991409301756, 0.02452275276184082, 0.024832000732421877, 0.024667423248291017, 0.02460540771484375, 0.02472755241394043, 0.024526847839355468, 0.024503551483154296, 0.024535808563232422, 0.024511775970458984, 0.024558271408081055, 0.024653728485107423, 0.024716896057128908, 0.02457040023803711, 0.024558847427368163, 0.02451737594604492, 0.02461612892150879, 0.02465430450439453, 0.024742240905761718, 0.02471436882019043, 0.024932960510253906, 0.024901920318603516, 0.02488832092285156, 0.024847551345825194, 0.024694368362426757, 0.02467417526245117, 0.024682207107543944, 0.02463609504699707, 0.025190336227416992, 0.02513046455383301, 0.024547872543334962, 0.02453875160217285, 0.02463929557800293, 0.024830528259277344, 0.024748031616210937, 0.024623264312744142, 0.024571744918823243, 0.02487055969238281, 0.024688640594482423, 0.024613216400146486, 0.02455673599243164, 0.024566368103027345, 0.02451081657409668, 0.024566783905029296, 0.02628950309753418, 0.025510431289672852, 0.024957056045532226, 0.024780799865722656, 0.024633344650268556, 0.02458624076843262, 0.0246560001373291, 0.024608640670776366, 0.02459427261352539, 0.02599078369140625, 0.025909696578979492, 0.027509952545166017, 0.024887807846069337, 0.025026399612426756, 0.02486534309387207, 0.024860639572143555, 0.024817567825317383, 0.024844415664672853, 0.02471881675720215, 0.02477110481262207, 0.024758144378662108, 0.024618175506591795, 0.027382495880126954, 0.025810592651367186, 0.02472198486328125, 0.02471116828918457, 0.024659584045410157, 0.02462348747253418, 0.02466774368286133, 0.024561376571655275, 0.024596351623535156, 0.0248407039642334, 0.024700960159301757, 0.024654111862182616, 0.024600095748901367, 0.024596960067749023, 0.02467430305480957, 0.025004032135009766, 0.024854528427124024, 0.024868864059448242, 0.024805248260498045, 0.024842336654663087, 0.024885055541992187, 0.02483363151550293, 0.024664703369140624, 0.024797183990478516, 0.024844032287597656, 0.024727136611938476, 0.024662143707275392, 0.024656415939331055, 0.025246976852416992, 0.02563302421569824, 0.024754688262939452, 0.024677919387817382, 0.0248570556640625, 0.024665088653564454, 0.024603647232055666, 0.02460809516906738, 0.024621728897094727, 0.02478220748901367, 0.024707712173461915, 0.024774656295776368, 0.024852479934692383, 0.026258047103881837, 0.025255296707153322, 0.02498828887939453, 0.02474575996398926, 0.02466633605957031, 0.024610815048217775, 0.02456729507446289, 0.024652288436889647, 0.024616960525512696, 0.024619007110595705, 0.024707199096679688, 0.02447551918029785, 0.02452479934692383, 0.024545055389404297, 0.024532543182373048, 0.024596511840820314, 0.024645727157592775, 0.02472400093078613, 0.024577312469482422, 0.024549312591552734, 0.02451036834716797, 0.024613792419433594, 0.02460825538635254, 0.024567903518676756, 0.024516544342041015, 0.024539583206176756, 0.02461849594116211, 0.024723968505859374, 0.024580095291137697, 0.02453708839416504, 0.02466556739807129, 0.02463702392578125, 0.024759231567382814, 0.024798816680908203, 0.02462678337097168, 0.02462188720703125, 0.024649728775024415, 0.025171424865722655, 0.024650272369384767, 0.02500182342529297, 0.025005695343017578, 0.024887840270996095, 0.024879072189331056, 0.024962591171264648, 0.024912384033203124, 0.02475817680358887, 0.024702816009521483, 0.024619039535522462, 0.024770784378051757, 0.024664064407348633, 0.024893152236938478, 0.0245883846282959, 0.024643775939941406, 0.025058624267578124, 0.02471187210083008, 0.024799232482910157, 0.024716800689697265, 0.024723968505859374, 0.024675615310668947, 0.02463203239440918, 0.024598047256469725, 0.02464406394958496, 0.02467840003967285, 0.026210271835327148, 0.025187103271484376, 0.024860671997070313, 0.024723455429077147, 0.024629247665405272, 0.024598527908325195, 0.024621055603027343, 0.024661216735839844, 0.0246463680267334, 0.024674367904663087, 0.02453708839416504, 0.02460851287841797, 0.024620351791381837, 0.024605503082275392, 0.024575519561767577, 0.02448649597167969, 0.024548416137695313, 0.02456262397766113, 0.024571903228759767, 0.024563488006591798, 0.02452604866027832, 0.024556543350219725, 0.024532991409301756, 0.02449977684020996, 0.024578271865844728, 0.024762592315673827, 0.024612863540649413, 0.024623104095458984, 0.02452479934692383, 0.024477855682373047, 0.024543071746826173, 0.02454694366455078, 0.024555904388427734, 0.024538591384887697, 0.02462774467468262, 0.02464739227294922, 0.02463884735107422, 0.024632223129272462, 0.024663135528564452, 0.02486979293823242, 0.02484822463989258, 0.024848543167114257, 0.02474393653869629, 0.024735231399536133, 0.024688543319702147, 0.02467452812194824, 0.024637216567993163, 0.024584224700927734, 0.024560192108154296, 0.024591903686523437, 0.024580671310424806, 0.02459574317932129, 0.024857215881347657, 0.02469273567199707, 0.024619007110595705, 0.024640832901000977, 0.02461926460266113, 0.024583711624145507, 0.024578975677490233, 0.02458624076843262, 0.024556608200073243, 0.024642175674438476, 0.02459679985046387]",tokens/s,40.41898820892394,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 1047, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 890, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 366, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,973.16864,7019.036672,0.0,6616.51456,6410.443264,s,1,8.964912109375,8.964912109375,0.0,8.964912109375,8.964912109375,8.964912109375,8.964912109375,[8.964912109375],,kWh,6.044024379161783e-06,6.594641260942918e-07,2.7163910619985854e-06,9.41987956725466e-06,,MB,1485.000704,7075.659776,0.0,6658.4576,5286.424064,s,10,0.6609455718994142,0.06609455718994142,0.0006165446762350791,0.06620318603515625,0.06668882217407227,0.06694337196350097,0.06714701179504394,"[0.06719792175292968, 0.06630390167236327, 0.06530719757080078, 0.06498576354980469, 0.0656484146118164, 0.06663225555419922, 0.06628518676757812, 0.06597913360595703, 0.06648461151123047, 0.06612118530273438]",tokens/s,3873.2387489080456,kWh,2.121494695199312e-06,2.3387781292975408e-07,1.4039503985217197e-06,3.759322906650786e-06,tokens/kWh,68097369.22228707,MB,1504.538624,7105.019904,0.0,6687.817728,5299.722752,s,10,29.315107666015628,2.931510766601563,0.011919667321502542,2.930851318359375,2.943059326171875,2.94622119140625,2.94875068359375,"[2.949383056640625, 2.916034912109375, 2.932284423828125, 2.908190185546875, 2.9420712890625, 2.942356689453125, 2.927465087890625, 2.929418212890625, 2.9286796875, 2.93922412109375]",tokens/s,21.49062548831589,kWh,8.529734609146781e-05,9.408264537737975e-06,5.3312898930478605e-05,0.0001480185095596844,tokens/kWh,425622.4453780017,,s,630,29.313445816040026,0.046529279073079424,0.0007252774166878843,0.046382926940917966,0.047037092208862305,0.04731312122344971,0.049730614089965826,"[0.04658611297607422, 0.04638652801513672, 0.046459136962890626, 0.0464736328125, 0.04608204650878906, 0.04623097610473633, 0.04616883087158203, 0.04597536087036133, 0.04600831985473633, 0.047017982482910156, 0.04641926574707031, 0.04640857696533203, 0.04662384033203125, 0.0461317138671875, 0.046835968017578125, 0.04650559997558594, 0.047445758819580075, 0.04696441650390625, 0.046528800964355466, 0.04679948806762695, 0.04661862564086914, 0.04692377471923828, 0.04677167892456055, 0.04657340621948242, 0.04710060882568359, 0.0476682243347168, 0.04672371292114258, 0.047239551544189455, 0.046607616424560544, 0.04656380844116211, 0.04667216110229492, 0.04691257476806641, 0.047102142333984375, 0.04806937789916992, 0.04642972946166992, 0.04694268798828125, 0.04661043167114258, 0.046952224731445315, 0.04671635055541992, 0.046690528869628906, 0.046690879821777345, 0.04642630386352539, 0.04652563095092774, 0.0465926399230957, 0.047126529693603515, 0.046518207550048825, 0.04678662490844727, 0.04677036666870117, 0.04620268630981445, 0.04837376022338867, 0.050026496887207034, 0.04713676834106445, 0.0474337272644043, 0.046860286712646484, 0.047107135772705075, 0.0482740478515625, 0.046321983337402346, 0.04678041458129883, 0.04653670501708984, 0.046796062469482425, 0.04701871871948242, 0.04671897506713867, 0.046241790771484374, 0.04671443176269531, 0.04702457427978515, 0.0469683837890625, 0.04682124710083008, 0.04662739181518555, 0.04657478332519531, 0.04618035125732422, 0.04628486251831055, 0.04637772750854492, 0.047108097076416014, 0.04680908966064453, 0.04638924789428711, 0.04655292892456055, 0.04613369750976563, 0.046106334686279296, 0.046145374298095704, 0.04588969421386719, 0.04624486541748047, 0.04590467071533203, 0.04611094284057617, 0.04617216110229492, 0.04612710571289062, 0.04581891250610352, 0.04596630477905273, 0.04619878387451172, 0.04579849624633789, 0.045842750549316406, 0.04633865737915039, 0.04611072158813476, 0.045854625701904295, 0.04588262557983398, 0.04607580947875976, 0.04608095932006836, 0.04599398422241211, 0.04587507247924805, 0.045887615203857424, 0.04628275299072265, 0.04637664031982422, 0.04651638412475586, 0.04653622436523437, 0.04688755035400391, 0.04648540878295898, 0.04632995223999024, 0.046040096282958985, 0.0462591667175293, 0.046011489868164064, 0.046672798156738284, 0.04683497619628906, 0.046227455139160156, 0.045973281860351566, 0.04587615966796875, 0.0457891845703125, 0.04580044937133789, 0.045927425384521485, 0.046274559020996094, 0.04731903839111328, 0.047009792327880856, 0.046721023559570314, 0.046340255737304686, 0.04613513565063477, 0.04597862243652344, 0.04611174392700195, 0.04616361618041992, 0.04659404754638672, 0.04669619369506836, 0.04717494583129883, 0.04688991928100586, 0.04693139266967773, 0.046185054779052735, 0.04624150466918946, 0.04580313491821289, 0.046396064758300784, 0.04666592025756836, 0.04687424087524414, 0.04700342559814453, 0.046653694152832034, 0.046751903533935546, 0.046630783081054686, 0.046731521606445316, 0.04627648162841797, 0.046757694244384765, 0.04625632095336914, 0.04632371139526367, 0.04679065704345703, 0.04689920043945312, 0.04668150329589844, 0.04658774566650391, 0.04623952102661133, 0.046564319610595706, 0.04646060943603516, 0.046774593353271485, 0.04626396942138672, 0.046096736907958985, 0.04627609634399414, 0.04621721649169922, 0.04648601531982422, 0.04670816040039062, 0.046053951263427734, 0.04612300872802735, 0.04602470397949219, 0.046077953338623044, 0.049126846313476566, 0.046361152648925784, 0.046478561401367184, 0.04619878387451172, 0.04603276824951172, 0.04606249618530273, 0.04596473693847656, 0.04605091094970703, 0.04646915054321289, 0.04627286529541016, 0.04621577453613281, 0.04674671936035156, 0.04629391860961914, 0.04618438339233399, 0.04728409576416016, 0.046636608123779295, 0.046371265411376955, 0.046104671478271485, 0.04642006301879883, 0.04620816040039062, 0.0461300163269043, 0.0462213134765625, 0.04660358428955078, 0.048411102294921876, 0.04807702255249023, 0.046308929443359376, 0.0460118408203125, 0.04614678573608398, 0.046144798278808595, 0.046110271453857425, 0.04593142318725586, 0.04612710571289062, 0.046087841033935546, 0.045999744415283206, 0.0464469108581543, 0.04612956619262695, 0.04628889465332031, 0.04613513565063477, 0.04586905670166016, 0.0461860466003418, 0.045756542205810546, 0.04595894241333008, 0.04607660675048828, 0.04593382263183594, 0.04640464019775391, 0.046259521484375, 0.04611280059814453, 0.0462053108215332, 0.04606316757202149, 0.04602233505249023, 0.046018688201904294, 0.04583273696899414, 0.04583433532714844, 0.04607964706420899, 0.04611017608642578, 0.046104801177978515, 0.04678473663330078, 0.04610707092285156, 0.046036991119384765, 0.04588272094726562, 0.045803680419921874, 0.045996639251708986, 0.046098270416259766, 0.048001087188720704, 0.04622870254516601, 0.04658870315551758, 0.04677449417114258, 0.046110496520996094, 0.04598931121826172, 0.04602096176147461, 0.04574825668334961, 0.04594233703613281, 0.04602508926391601, 0.045860801696777344, 0.04605964660644531, 0.0459958381652832, 0.04609471893310547, 0.04592988967895508, 0.046111328125, 0.045871105194091794, 0.04658790588378906, 0.045900928497314454, 0.04607065582275391, 0.04667801666259765, 0.046832958221435544, 0.046203102111816406, 0.046569278717041016, 0.046412448883056644, 0.04690892791748047, 0.04668057632446289, 0.046380607604980466, 0.047036865234375, 0.04690643310546875, 0.046442848205566406, 0.046002559661865235, 0.04674531173706055, 0.04806924819946289, 0.046401409149169924, 0.04664524841308594, 0.04635263824462891, 0.047123424530029295, 0.04677507019042969, 0.04739657592773437, 0.046569759368896485, 0.04778598403930664, 0.046633983612060545, 0.04648419189453125, 0.046204193115234375, 0.0472627182006836, 0.04624531173706055, 0.04615225601196289, 0.046429439544677736, 0.04667043304443359, 0.04664131164550781, 0.047074878692626956, 0.047086017608642575, 0.04706304168701172, 0.04665139389038086, 0.04643430328369141, 0.04672512054443359, 0.04675379180908203, 0.0471275520324707, 0.04681964874267578, 0.04680364990234375, 0.04674496078491211, 0.046596736907958985, 0.04629616165161133, 0.046171039581298826, 0.04671078491210937, 0.046669822692871094, 0.046548255920410155, 0.046564064025878905, 0.04657942581176758, 0.04678070449829102, 0.04726134490966797, 0.04703392028808594, 0.046610942840576174, 0.04694454574584961, 0.04659814453125, 0.046080001831054686, 0.04625126266479492, 0.050622303009033205, 0.04628572845458984, 0.046387008666992184, 0.04621535873413086, 0.046139392852783206, 0.0462213134765625, 0.046059425354003904, 0.046053470611572264, 0.0458752326965332, 0.046134944915771484, 0.046246559143066406, 0.04609843063354492, 0.046186496734619144, 0.04635136032104492, 0.04677676773071289, 0.04641356658935547, 0.04690966415405273, 0.04651625442504883, 0.046223072052001955, 0.04622390365600586, 0.04620665740966797, 0.050057857513427735, 0.04610867309570312, 0.04631961441040039, 0.046374782562255856, 0.04640694427490234, 0.04652675247192383, 0.046416351318359375, 0.04638524627685547, 0.046192638397216795, 0.04653657531738281, 0.04682355117797852, 0.04673052978515625, 0.04694908905029297, 0.04629267120361328, 0.046197055816650394, 0.045972896575927735, 0.045967041015625, 0.04603977584838867, 0.046088031768798825, 0.04655548858642578, 0.04665958404541016, 0.04631961441040039, 0.05067747116088867, 0.046346527099609375, 0.046155582427978514, 0.04634182357788086, 0.04708528137207031, 0.04691388702392578, 0.04658572769165039, 0.046747745513916014, 0.04663065719604492, 0.04670947265625, 0.04675577545166015, 0.0465470085144043, 0.04646092987060547, 0.04647731018066406, 0.04615756988525391, 0.04624614334106445, 0.04689920043945312, 0.046233024597167965, 0.04654924774169922, 0.04629945755004883, 0.04977852630615234, 0.04637916946411133, 0.046927871704101565, 0.0461552963256836, 0.04659452819824219, 0.04751551818847656, 0.04684147262573242, 0.049613311767578126, 0.047002784729003905, 0.046489761352539065, 0.04658134460449219, 0.04650249481201172, 0.04665686416625976, 0.04665414428710937, 0.04649964904785156, 0.04636896133422851, 0.04651536178588867, 0.046488414764404296, 0.04636003112792969, 0.04620528030395508, 0.046327999114990234, 0.046088191986083986, 0.04605132675170898, 0.04646623992919922, 0.04623852920532227, 0.04617216110229492, 0.04590902328491211, 0.046370784759521486, 0.046015487670898435, 0.046235649108886716, 0.04913068771362305, 0.04629177474975586, 0.04625718307495117, 0.0461956787109375, 0.046202880859375, 0.04612300872802735, 0.04605952072143555, 0.04606083297729492, 0.04686511993408203, 0.04637007904052735, 0.04626502227783203, 0.04720230484008789, 0.04647897720336914, 0.04636681747436523, 0.04603299331665039, 0.046225086212158206, 0.04755289459228516, 0.0461764144897461, 0.04604431915283203, 0.04659286499023438, 0.046478721618652345, 0.04625612640380859, 0.046362239837646486, 0.046117889404296876, 0.04612643051147461, 0.04635100936889648, 0.046671871185302735, 0.04678451156616211, 0.047030464172363284, 0.04621049499511719, 0.04613158416748047, 0.045974750518798825, 0.04679663848876953, 0.0468919677734375, 0.046036991119384765, 0.04631347274780274, 0.04596255874633789, 0.04605791854858399, 0.04636620712280273, 0.04710294342041016, 0.04890192031860351, 0.04623276901245117, 0.04695062255859375, 0.047153663635253903, 0.04649574279785156, 0.04639129638671875, 0.04606771087646484, 0.04630527877807617, 0.046200511932373046, 0.04622143936157227, 0.04590201568603516, 0.046036159515380856, 0.04606854248046875, 0.046163585662841795, 0.046663070678710936, 0.04780524826049805, 0.04614937591552734, 0.04601692962646484, 0.04656937789916992, 0.04653680038452149, 0.046342144012451174, 0.04594278335571289, 0.04632950210571289, 0.04621516799926758, 0.0461929931640625, 0.04660224151611328, 0.04629708862304688, 0.0461578254699707, 0.04610047912597656, 0.046976673126220704, 0.046102432250976565, 0.05000646209716797, 0.04780646514892578, 0.0466732177734375, 0.04712108612060547, 0.046862335205078126, 0.04709075164794922, 0.04648828887939453, 0.0463403205871582, 0.04611459350585938, 0.04677791976928711, 0.04670553588867187, 0.04694812774658203, 0.04654694366455078, 0.046196094512939455, 0.04605990219116211, 0.04616422271728516, 0.04637286376953125, 0.04653875350952148, 0.04738364791870117, 0.04689984130859375, 0.04645734405517578, 0.04650371170043945, 0.0460840950012207, 0.04592844772338867, 0.046069217681884767, 0.046118465423583985, 0.046115806579589844, 0.04628006362915039, 0.04605718231201172, 0.045867935180664066, 0.04686438369750977, 0.04686643218994141, 0.0466063346862793, 0.046292991638183595, 0.046048641204833984, 0.04676358413696289, 0.04703913497924805, 0.046876670837402344, 0.046581760406494144, 0.04624319839477539, 0.04646387100219727, 0.04677593612670899, 0.04665151977539062, 0.046483455657958986, 0.04685823822021484, 0.04730588912963867, 0.04701801681518555, 0.04698195266723633, 0.04655104064941406, 0.0462110710144043, 0.04610969543457031, 0.04607798385620117, 0.04620387268066406, 0.04612076950073242, 0.04644268798828125, 0.04637286376953125, 0.04612710571289062, 0.04632921600341797, 0.04616563034057617, 0.046150657653808595, 0.04649574279785156, 0.0468969612121582, 0.04627475357055664, 0.046360576629638675, 0.04600831985473633, 0.04613119888305664, 0.045932544708251956, 0.04591206359863281, 0.045811294555664066, 0.04619305419921875, 0.04636262512207031, 0.04664115142822266, 0.04621311950683594, 0.04604927825927734, 0.04664092636108398, 0.04695881652832031, 0.04655513763427734, 0.04666099166870117, 0.04656972885131836, 0.04636892700195312, 0.0464222412109375, 0.046243934631347655, 0.04685737609863281, 0.04661539077758789, 0.04634566497802734, 0.04633993530273438, 0.046150272369384765, 0.04633795166015625, 0.04686038589477539, 0.04652646255493164, 0.04673894500732422, 0.046866943359375, 0.04696268844604492, 0.04767295837402344, 0.0463056640625, 0.04642406463623047, 0.04655104064941406, 0.046353790283203126, 0.04629923248291016, 0.04673263931274414, 0.04623222351074219, 0.046816608428955075, 0.046320159912109374, 0.04608643341064453, 0.04647510528564453, 0.04639539337158203, 0.045969406127929685, 0.04596723175048828, 0.045733375549316405, 0.04610313415527344, 0.04615990447998047, 0.04578451156616211, 0.0458430061340332, 0.045849983215332034, 0.045910655975341795, 0.04710604858398437, 0.04728758239746094, 0.04603158569335938, 0.046741344451904296, 0.04611907196044922, 0.047134719848632815, 0.046495681762695314, 0.046792736053466795, 0.04684572982788086, 0.04672127914428711, 0.046410785675048825, 0.04656806564331055, 0.04678086471557617, 0.04710736083984375, 0.04725398254394531, 0.04669436645507812, 0.046378623962402346, 0.04637519836425781, 0.04693337631225586, 0.0467276496887207, 0.04711673736572266, 0.046046367645263674, 0.046312255859375, 0.04632579040527344, 0.045964672088623044, 0.05672819137573242, 0.04655263900756836, 0.048017856597900394, 0.0470156478881836, 0.046528705596923826, 0.046542945861816405, 0.046311038970947266, 0.04719862365722656, 0.04615779113769531, 0.046231201171875, 0.04742559814453125, 0.046749984741210934, 0.04680089569091797, 0.04669865417480469, 0.04604297637939453, 0.04600831985473633, 0.046139392852783206, 0.046118080139160154, 0.04600844955444336, 0.046301887512207034, 0.04651542282104492]",tokens/s,21.491843843730923,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,939.728896,12526.157824,0.0,12123.635712,12121.851904,s,1,7.25050244140625,7.25050244140625,0.0,7.25050244140625,7.25050244140625,7.25050244140625,7.25050244140625,[7.25050244140625],,kWh,6.085127154180251e-06,6.637993715739614e-07,2.6938910439866515e-06,9.442817569740863e-06,,MB,1271.107584,12551.323648,0.0,12138.315776,10311.21664,s,10,3.469616912841797,0.34696169128417964,0.0084303162450332,0.350090087890625,0.3528365264892578,0.35364571075439455,0.3542930581665039,"[0.32356906127929685, 0.35445489501953126, 0.34911050415039063, 0.34595584106445315, 0.34302529907226564, 0.3512448425292969, 0.3472713317871094, 0.3512587585449219, 0.3510696716308594, 0.35265670776367186]",tokens/s,737.8336180357235,kWh,9.616533058737507e-06,1.060508852781179e-06,6.381913707677367e-06,1.7058955619196054e-05,tokens/kWh,15006780.35130879,MB,1299.6608,12557.615104,0.0,12144.607232,10311.2192,s,10,27.8458828125,2.78458828125,0.0017250744209269136,2.7849578857421875,2.785989453125,2.786872534179688,2.7875789990234376,"[2.782995849609375, 2.78280517578125, 2.7851123046875, 2.787755615234375, 2.785793212890625, 2.7815703125, 2.78576416015625, 2.784803466796875, 2.78557470703125, 2.7837080078125]",tokens/s,22.624529602530444,kWh,8.178240287959168e-05,9.020633178324012e-06,5.429238483172084e-05,0.00014509542088963654,tokens/kWh,434197.0243700488,,s,630,27.84354676818846,0.044196105981251545,0.0003465880741196334,0.04413801574707031,0.04445681457519531,0.04459216079711914,0.04631116275787354,"[0.04620044708251953, 0.04474528121948242, 0.044289440155029294, 0.04406857681274414, 0.044062686920166017, 0.043924224853515624, 0.04399721527099609, 0.04405452728271484, 0.04391731262207031, 0.043919136047363284, 0.04380080032348633, 0.04398694229125977, 0.04401561737060547, 0.04398284912109375, 0.04418156814575196, 0.043913150787353514, 0.04402995300292969, 0.04395315170288086, 0.043915809631347655, 0.04388675308227539, 0.043947521209716796, 0.044167999267578126, 0.04461344146728516, 0.044343486785888675, 0.04427980804443359, 0.04408054351806641, 0.04403209686279297, 0.04393625640869141, 0.04396988677978515, 0.04388905715942383, 0.04387206268310547, 0.04390444946289063, 0.04388351821899414, 0.04404633712768555, 0.04399459075927734, 0.04394009780883789, 0.043858207702636716, 0.04393983840942383, 0.0439741439819336, 0.044103519439697265, 0.044321441650390626, 0.04402175903320313, 0.04396851348876953, 0.044076446533203126, 0.04480265426635742, 0.044453887939453124, 0.04416483306884766, 0.044521728515625, 0.04448662567138672, 0.044525630950927736, 0.04459244918823242, 0.044435169219970705, 0.04431766510009766, 0.044199935913085936, 0.044496192932128906, 0.044189918518066404, 0.044050209045410155, 0.04412639999389648, 0.04425532913208008, 0.044343711853027344, 0.04427487945556641, 0.044399200439453126, 0.044107200622558594, 0.046467422485351566, 0.04463312149047852, 0.04408972930908203, 0.04394659042358398, 0.04392550277709961, 0.04388454437255859, 0.043862014770507815, 0.043911167144775394, 0.043804672241210936, 0.04391139221191406, 0.044014591217041016, 0.043843486785888675, 0.043819614410400394, 0.04386435317993164, 0.043859870910644534, 0.04395836639404297, 0.043961761474609375, 0.043936351776123046, 0.04393164825439453, 0.04392144012451172, 0.043945953369140624, 0.044290046691894534, 0.04427315139770508, 0.04414720153808594, 0.044077056884765625, 0.04402364730834961, 0.04450729751586914, 0.04413993453979492, 0.04399164962768555, 0.043937759399414064, 0.043835262298583984, 0.04385507202148437, 0.0439730224609375, 0.04421481704711914, 0.04396851348876953, 0.044249088287353515, 0.044074337005615236, 0.04425948715209961, 0.04434380722045898, 0.04430972671508789, 0.04426627349853516, 0.04404140853881836, 0.044004161834716796, 0.04428556823730469, 0.04431500625610352, 0.0442589111328125, 0.044347518920898436, 0.04462211227416992, 0.04446355056762695, 0.044843582153320315, 0.04431257629394531, 0.04432281494140625, 0.0441275520324707, 0.04415353775024414, 0.044181503295898435, 0.044244384765625, 0.04400128173828125, 0.04411027145385742, 0.044560672760009766, 0.044305599212646485, 0.04447916793823242, 0.0441610221862793, 0.04418086242675781, 0.046456798553466794, 0.04479590225219727, 0.04411711883544922, 0.044108478546142575, 0.04400761413574219, 0.043902976989746094, 0.043947135925292966, 0.0438711051940918, 0.04382249450683594, 0.04401417541503906, 0.0439576301574707, 0.04386787033081055, 0.04385472106933594, 0.04394601440429687, 0.04405657577514648, 0.044000576019287106, 0.043923423767089846, 0.04408361434936523, 0.04429833602905273, 0.043908672332763674, 0.04419180679321289, 0.04433059310913086, 0.044243968963623044, 0.04419385528564453, 0.044203617095947265, 0.044691169738769534, 0.04423644638061523, 0.044157920837402345, 0.04441292953491211, 0.044019775390625, 0.04445161437988281, 0.0443551025390625, 0.04402649688720703, 0.04403811264038086, 0.04414467239379883, 0.04414236831665039, 0.04419401550292969, 0.0442531852722168, 0.0442158088684082, 0.043963134765625, 0.04404403305053711, 0.044365825653076174, 0.04451676940917969, 0.04434185409545898, 0.044318782806396485, 0.04440057754516601, 0.04433260726928711, 0.0445546875, 0.04456665420532226, 0.044281726837158204, 0.04429209518432617, 0.04446822357177734, 0.04437606430053711, 0.044192798614501955, 0.043991455078125, 0.04399776077270508, 0.04414652633666992, 0.04407926559448242, 0.0440832633972168, 0.043990463256835935, 0.04396083068847656, 0.044066814422607424, 0.04410572814941406, 0.046325759887695314, 0.044653633117675784, 0.044174049377441404, 0.04406403350830078, 0.04394611358642578, 0.04404051208496094, 0.04386374282836914, 0.0439192008972168, 0.04402070236206055, 0.04397055816650391, 0.04387203216552735, 0.04387452697753906, 0.04394723129272461, 0.04401136016845703, 0.043945953369140624, 0.043938369750976564, 0.04389878463745117, 0.04395004653930664, 0.044120609283447264, 0.04439984130859375, 0.04435433578491211, 0.0442770881652832, 0.044392192840576175, 0.04465887832641602, 0.04430233764648438, 0.044362464904785154, 0.04407295989990234, 0.04397875213623047, 0.04426342391967773, 0.0439799690246582, 0.04439078521728516, 0.044093887329101564, 0.04415078353881836, 0.044365825653076174, 0.04424703979492187, 0.04432486343383789, 0.04407932662963867, 0.04418742370605469, 0.04422643280029297, 0.04426287841796875, 0.044493408203125, 0.04442118453979492, 0.04420153427124023, 0.04426704025268555, 0.04440700912475586, 0.04437267303466797, 0.044485694885253904, 0.04427814483642578, 0.04451200103759766, 0.04436563110351562, 0.04430374526977539, 0.04421696090698242, 0.04420735931396484, 0.044415294647216795, 0.044318336486816406, 0.04418848037719727, 0.04430233764648438, 0.04427891159057617, 0.04430137634277344, 0.044420799255371096, 0.04439871978759766, 0.044314624786376954, 0.0441049919128418, 0.046456256866455076, 0.04459894561767578, 0.044073856353759766, 0.044037185668945315, 0.04406163024902344, 0.04418112182617188, 0.043942272186279295, 0.043964576721191403, 0.04385366439819336, 0.04400873565673828, 0.04405724716186524, 0.044062782287597656, 0.04397055816650391, 0.04399494552612305, 0.04401375961303711, 0.043974655151367184, 0.043999038696289065, 0.043953857421875, 0.04405712127685547, 0.043918495178222654, 0.04423148727416992, 0.044423168182373046, 0.044365215301513675, 0.04425081634521484, 0.044053409576416014, 0.04460086441040039, 0.044119998931884764, 0.044624416351318356, 0.044232704162597655, 0.044199935913085936, 0.04426649475097656, 0.044245887756347656, 0.04424492645263672, 0.04404038238525391, 0.04416921615600586, 0.04438582229614258, 0.04437449645996094, 0.04432262420654297, 0.0441591682434082, 0.044298240661621094, 0.04419971084594727, 0.04430387115478516, 0.04426620864868164, 0.04422655868530274, 0.04450099182128906, 0.04451123046875, 0.04430377578735351, 0.04428976058959961, 0.04423177719116211, 0.04436764907836914, 0.04419379043579102, 0.04420943832397461, 0.04409580612182617, 0.04409363174438476, 0.043966686248779294, 0.044014686584472655, 0.04406524658203125, 0.04412665557861328, 0.04435670471191406, 0.04411075210571289, 0.04405452728271484, 0.04416307067871094, 0.044095489501953126, 0.04633808135986328, 0.044595199584960936, 0.044117183685302735, 0.04394607925415039, 0.04387913513183594, 0.043850879669189456, 0.043948928833007814, 0.044025856018066405, 0.04398406219482422, 0.04392214584350586, 0.04384745788574219, 0.04387052917480469, 0.04386956787109375, 0.04398128128051758, 0.04397011184692383, 0.04413014221191406, 0.04394790267944336, 0.043867008209228515, 0.044096641540527344, 0.04397062301635742, 0.044045120239257815, 0.04426342391967773, 0.044273792266845705, 0.04416089630126953, 0.04425932693481445, 0.04413235092163086, 0.04436707305908203, 0.043985694885253904, 0.04434124755859375, 0.04409862518310547, 0.04414255905151367, 0.044155872344970704, 0.04435763168334961, 0.04401152038574219, 0.04429414367675781, 0.04436377716064453, 0.0440333137512207, 0.04416790390014649, 0.044216320037841796, 0.044224510192871096, 0.04412124633789063, 0.04405942535400391, 0.04423263931274414, 0.04429632186889648, 0.0441712646484375, 0.04435763168334961, 0.0444268798828125, 0.044265857696533205, 0.04421222305297851, 0.04402995300292969, 0.044160865783691404, 0.0440997428894043, 0.04404399871826172, 0.04398262405395508, 0.04392534255981445, 0.04439072036743164, 0.04413792037963867, 0.044120094299316404, 0.04402675247192383, 0.043976703643798826, 0.04402096176147461, 0.044138336181640626, 0.04412102508544922, 0.046193950653076174, 0.0445918083190918, 0.044095489501953126, 0.043916511535644534, 0.04390172958374024, 0.04383283233642578, 0.04382534408569336, 0.043806430816650394, 0.0440203857421875, 0.043923168182373046, 0.04429436874389649, 0.043960319519042966, 0.04389888000488281, 0.0441077766418457, 0.04401766586303711, 0.04396134567260742, 0.04420915222167969, 0.043993087768554685, 0.04429523086547851, 0.04405337524414062, 0.04412649536132812, 0.044306209564208984, 0.044291488647460936, 0.04450297546386719, 0.04453958511352539, 0.04452995300292969, 0.0444541130065918, 0.04423075103759765, 0.04407033538818359, 0.04430944061279297, 0.04413811111450195, 0.04428752136230469, 0.044141407012939456, 0.04426956939697266, 0.04406198501586914, 0.04446249771118164, 0.04429379272460938, 0.04409171295166016, 0.04412566375732422, 0.04405132675170898, 0.044339199066162106, 0.0441343994140625, 0.0441484489440918, 0.0441357421875, 0.04421334457397461, 0.04443328094482422, 0.0445063362121582, 0.044417438507080076, 0.044081535339355465, 0.04430438232421875, 0.04454787063598633, 0.0441223373413086, 0.044246849060058595, 0.0441776008605957, 0.044058624267578124, 0.044251136779785157, 0.04418672180175781, 0.04421039962768555, 0.044126911163330076, 0.044055713653564456, 0.04406358337402344, 0.04445183944702148, 0.04413811111450195, 0.04627542495727539, 0.044539905548095705, 0.04398694229125977, 0.043880062103271486, 0.04384819030761719, 0.04377363204956055, 0.04410572814941406, 0.04395027160644531, 0.04390892791748047, 0.04401372909545898, 0.04407503890991211, 0.04389683151245117, 0.043969566345214844, 0.04395926284790039, 0.043949886322021486, 0.04393388748168945, 0.043872257232666016, 0.044111137390136716, 0.044092128753662106, 0.04403574371337891, 0.044085601806640624, 0.04419510269165039, 0.04428777694702148, 0.04440095901489258, 0.04429888153076172, 0.04425091171264649, 0.0444090576171875, 0.04431872177124024, 0.044125377655029295, 0.044128097534179685, 0.04417849731445313, 0.04407392120361328, 0.044323646545410156, 0.044111103057861326, 0.04416195297241211, 0.044214176177978515, 0.04422006225585937, 0.04395052719116211, 0.04414668655395508, 0.04447379302978516, 0.044278526306152345, 0.044214080810546875, 0.04410572814941406, 0.04420198440551758, 0.04440399932861328, 0.04435017776489258, 0.044281856536865234, 0.04461568069458008, 0.044351646423339844, 0.04434928131103515, 0.04429619216918945, 0.04422041702270508, 0.044392288208007814, 0.04440899276733398, 0.044115966796875, 0.04430438232421875, 0.04417459106445312, 0.04397132873535156, 0.04403385543823242, 0.04402195358276367, 0.04450716781616211, 0.04415689468383789, 0.04428550338745117, 0.04663100814819336, 0.044593185424804685, 0.0440893440246582, 0.04378335952758789, 0.043760513305664064, 0.04387209701538086, 0.04390307235717773, 0.04395391845703125, 0.044285663604736326, 0.04386857604980469, 0.04383961486816406, 0.043894142150878904, 0.04389116668701172, 0.04387651062011719, 0.04432281494140625, 0.04400537490844727, 0.04408031845092773, 0.044036510467529294, 0.043999488830566404, 0.04415830230712891, 0.044028736114501955, 0.04422592163085937, 0.04446214294433594, 0.04462035369873047, 0.044510303497314455, 0.04433545684814453, 0.04418617630004883, 0.04419583892822266, 0.04410492706298828, 0.044122753143310545, 0.04407107162475586, 0.04406272125244141, 0.045205726623535156, 0.04408467102050781, 0.044037761688232424, 0.04427142333984375, 0.04396739196777344, 0.04431820678710938, 0.04428156661987305, 0.04407497787475586, 0.04399379348754883, 0.043956382751464844, 0.044062686920166017, 0.044427070617675785, 0.04437596893310547, 0.044773662567138675, 0.04437923049926758, 0.0443524169921875, 0.04460879898071289, 0.044306560516357424, 0.04417801666259766, 0.044039424896240235, 0.04406915283203125, 0.04416140747070312, 0.044214271545410154, 0.044101726531982424, 0.04401049423217773, 0.043985599517822264, 0.04444124984741211, 0.044292064666748045, 0.04413510513305664, 0.04442931365966797, 0.044044288635253906, 0.046548992156982424, 0.04462387084960937, 0.04402166366577148, 0.043974334716796876, 0.04400294494628906, 0.04392819213867188, 0.04390054321289062, 0.043866176605224606, 0.0439136962890625, 0.043972545623779294, 0.043915328979492185, 0.0439068489074707, 0.04389295959472656, 0.04404137420654297, 0.04440150451660156, 0.044017406463623045, 0.04400908660888672, 0.043948352813720705, 0.04391763305664063, 0.04442652893066406, 0.04427027130126953, 0.044332927703857425, 0.044404640197753906, 0.04436383819580078, 0.044285793304443356, 0.04435113525390625, 0.04429663848876953, 0.044239105224609374, 0.044549728393554686, 0.044125728607177735, 0.044233184814453125, 0.04430889511108398, 0.044142593383789064, 0.044232318878173825, 0.04417574310302735, 0.04448019027709961, 0.044326847076416015, 0.044074913024902344, 0.04396620941162109, 0.04408598327636719, 0.0441888313293457, 0.04417622375488281, 0.044060321807861326, 0.04411996841430664, 0.04421865463256836, 0.04436156845092774, 0.04438995361328125, 0.044222942352294924, 0.04445622253417969, 0.0441890869140625, 0.04406937789916992, 0.04425532913208008, 0.04409958267211914, 0.043963871002197265, 0.04402025604248047, 0.04400886535644531, 0.044102241516113284, 0.04410947036743164, 0.04396192169189453, 0.043973281860351564, 0.04393996810913086, 0.0440588493347168, 0.044074047088623045]",tokens/s,22.626427776786727,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,972.853248,7019.036672,0.0,6616.51456,6410.443264,s,1,8.84169140625,8.84169140625,0.0,8.84169140625,8.84169140625,8.84169140625,8.84169140625,[8.84169140625],,kWh,6.269967345843952e-06,6.720721003028321e-07,2.075557216003193e-06,9.017596662149977e-06,,MB,1371.713536,7071.465472,0.0,6658.4576,5286.424064,s,10,3.8821061401367185,0.3882106140136719,0.0011918428234150092,0.38811412048339844,0.39022032775878907,0.3902323715209961,0.39024200653076174,"[0.38843719482421873, 0.3865806579589844, 0.3865809936523438, 0.3881293640136719, 0.39024441528320314, 0.38801974487304686, 0.3874368896484375, 0.388098876953125, 0.3902176513671875, 0.3883603515625]",tokens/s,659.4358597083187,kWh,1.165535801073725e-05,1.2853867237127433e-06,7.740722004538491e-06,2.0681466738988483e-05,tokens/kWh,12378232.319344716,MB,1383.424,7100.8256,0.0,6687.817728,5299.722752,s,10,29.6685830078125,2.9668583007812503,0.017246655928956505,2.9686268310546877,2.9863890869140626,2.9869007202148437,2.987310026855469,"[2.94070263671875, 2.942963134765625, 2.953101318359375, 2.95559326171875, 2.986275390625, 2.961717041015625, 2.97553662109375, 2.98125927734375, 2.987412353515625, 2.98402197265625]",tokens/s,21.23458339193701,kWh,8.708934804926282e-05,9.606176330484135e-06,5.374118829206145e-05,0.00015043671267180842,tokens/kWh,418780.7542527223,,s,630,29.666879486084028,0.04709028489854601,0.0007428415405452109,0.04701177597045898,0.04774808959960938,0.04812331657409667,0.04961821510314941,"[0.04767119979858398, 0.046819873809814457, 0.04650393676757812, 0.04654953765869141, 0.04633932876586914, 0.04683011245727539, 0.046483455657958986, 0.04749894332885742, 0.04698348617553711, 0.046575103759765625, 0.04668262481689453, 0.04646297454833984, 0.04708332824707031, 0.04606399917602539, 0.046104095458984376, 0.046225696563720706, 0.04609846496582031, 0.04639126586914062, 0.04633804702758789, 0.04611072158813476, 0.04636876678466797, 0.047136993408203126, 0.04622723388671875, 0.046196033477783206, 0.04632767868041992, 0.04670547103881836, 0.046243839263916016, 0.04608937454223633, 0.04620534515380859, 0.046082496643066406, 0.046636127471923826, 0.04626729583740234, 0.04681932830810547, 0.046548095703125, 0.04709056091308594, 0.04632572937011719, 0.046446624755859374, 0.046417278289794923, 0.04639769744873047, 0.0466596794128418, 0.0467558708190918, 0.04674291229248047, 0.04672367858886719, 0.046973217010498045, 0.047103969573974606, 0.04667708969116211, 0.04682643127441406, 0.04693552017211914, 0.047614494323730466, 0.046995681762695314, 0.04712006378173828, 0.04694240188598633, 0.046911392211914066, 0.046903297424316405, 0.047124481201171874, 0.046446239471435544, 0.04644870376586914, 0.046836193084716794, 0.04705465698242187, 0.0483164176940918, 0.04669427108764648, 0.046887039184570316, 0.04651430511474609, 0.04778684616088867, 0.04693183898925781, 0.04648777770996094, 0.046564544677734375, 0.04660732650756836, 0.04665926361083984, 0.04680300903320313, 0.04707328033447265, 0.047355422973632814, 0.04678499221801758, 0.04662483215332031, 0.046833599090576175, 0.047108097076416014, 0.046540672302246094, 0.04662879943847656, 0.046467262268066405, 0.04655923080444336, 0.04672617721557617, 0.04693920135498047, 0.04706646347045899, 0.04674822235107422, 0.04675363159179687, 0.046737022399902343, 0.04694480133056641, 0.046516319274902344, 0.046626014709472655, 0.0465968017578125, 0.04628591918945312, 0.04619561767578125, 0.04670841598510742, 0.046618465423583985, 0.04684848022460938, 0.04694220733642578, 0.046620670318603515, 0.04654694366455078, 0.04651542282104492, 0.04719491195678711, 0.0461844482421875, 0.04637491226196289, 0.04622335815429687, 0.046182239532470706, 0.04625785446166992, 0.04654537582397461, 0.046311424255371096, 0.047181377410888674, 0.046213569641113283, 0.04628889465332031, 0.04630681610107422, 0.04626073455810547, 0.046185665130615235, 0.046414592742919925, 0.04946335983276367, 0.04718288040161133, 0.047341537475585935, 0.04684492874145508, 0.04671004867553711, 0.04650595092773437, 0.04655750274658203, 0.04665107345581055, 0.046855167388916014, 0.046496734619140626, 0.046693153381347656, 0.04662825775146484, 0.04705923080444336, 0.04653577423095703, 0.04642649459838867, 0.04649833679199219, 0.04653875350952148, 0.04642540740966797, 0.04633452987670898, 0.046163070678710935, 0.046529281616210935, 0.046770431518554687, 0.046818401336669924, 0.046871456146240234, 0.047935489654541016, 0.04772201538085938, 0.047247039794921876, 0.04674028778076172, 0.047075103759765625, 0.046448543548583986, 0.0463260498046875, 0.046292991638183595, 0.047265792846679686, 0.04610844802856445, 0.04624201583862304, 0.04620841598510742, 0.04617891311645508, 0.04646047973632812, 0.046485950469970706, 0.046380958557128905, 0.046214527130126956, 0.04630553436279297, 0.046266014099121094, 0.0462913932800293, 0.04638553619384766, 0.04634624099731445, 0.047547870635986325, 0.04666729736328125, 0.04704972839355469, 0.04705043029785156, 0.04701216125488281, 0.04678041458129883, 0.04678041458129883, 0.04758323287963867, 0.04886937713623047, 0.04735184097290039, 0.047402912139892575, 0.04653593444824219, 0.04721136093139648, 0.046557407379150394, 0.04700748825073242, 0.04706246566772461, 0.047262241363525394, 0.04758940887451172, 0.048858238220214845, 0.04736016082763672, 0.04740784072875977, 0.047576160430908204, 0.047027103424072264, 0.046690174102783207, 0.04721062469482422, 0.04725964736938477, 0.04682745742797852, 0.04654886245727539, 0.04694172668457031, 0.04806361770629883, 0.04675468826293945, 0.04699280166625976, 0.046967391967773435, 0.04679811096191406, 0.04772937774658203, 0.04695142364501953, 0.04662988662719727, 0.04659404754638672, 0.04683468627929688, 0.04675920104980469, 0.04690505599975586, 0.04663296127319336, 0.046716064453125, 0.046594303131103514, 0.046198463439941405, 0.0465926399230957, 0.046502174377441405, 0.04616515350341797, 0.04629919815063477, 0.04639363098144531, 0.04625664138793945, 0.0467426872253418, 0.04620783996582031, 0.04642604827880859, 0.046599456787109375, 0.04648771286010742, 0.0461868782043457, 0.046199039459228514, 0.046198368072509766, 0.04658422470092773, 0.046227584838867186, 0.04637887954711914, 0.04641737747192383, 0.04634064102172852, 0.04612895965576172, 0.04683977508544922, 0.04632086563110351, 0.04642099380493164, 0.046467071533203126, 0.046473217010498044, 0.047061054229736325, 0.046564640045166014, 0.047128894805908206, 0.04703267288208008, 0.04687209701538086, 0.0467993278503418, 0.046597217559814455, 0.04715817642211914, 0.047312480926513675, 0.04723865509033203, 0.04774777603149414, 0.047120800018310545, 0.046954017639160156, 0.05216080093383789, 0.047032318115234374, 0.046921470642089846, 0.0469117431640625, 0.047933441162109375, 0.04908425521850586, 0.0485214729309082, 0.04780022430419922, 0.04748681640625, 0.0484505615234375, 0.04762345504760742, 0.04722163009643555, 0.04746799850463867, 0.047493408203125, 0.047605857849121094, 0.0471899528503418, 0.04737187194824219, 0.04725539016723633, 0.047036991119384766, 0.047623680114746096, 0.04674387359619141, 0.04730064010620117, 0.04685027313232422, 0.04718182373046875, 0.04719820785522461, 0.04688281631469727, 0.04686140823364258, 0.046756767272949216, 0.047099903106689454, 0.047010879516601566, 0.04696736145019531, 0.04753216171264649, 0.047553985595703126, 0.047588161468505856, 0.046992481231689455, 0.04706806564331055, 0.046927520751953125, 0.04713907241821289, 0.04703350448608398, 0.046926559448242186, 0.04733769607543945, 0.047750911712646484, 0.04750969696044922, 0.04776483154296875, 0.047618785858154294, 0.047105342864990234, 0.048050880432128906, 0.047745025634765625, 0.04761804962158203, 0.04784265518188477, 0.0477537612915039, 0.04769804763793945, 0.04747683334350586, 0.047344734191894534, 0.047340351104736327, 0.04711859130859375, 0.0470013427734375, 0.046893054962158204, 0.04730060958862305, 0.05035171127319336, 0.047368606567382815, 0.04698521423339844, 0.04723292922973633, 0.04673545455932617, 0.047930686950683594, 0.047454910278320314, 0.048444446563720704, 0.0471418571472168, 0.04711337661743164, 0.04695331192016602, 0.04819968032836914, 0.04795724868774414, 0.048699390411376955, 0.04727596664428711, 0.04722079849243164, 0.04736614227294922, 0.04726937484741211, 0.04778649520874023, 0.04715520095825195, 0.047021568298339846, 0.0470494384765625, 0.04722022247314453, 0.046932254791259766, 0.048504894256591796, 0.0544799690246582, 0.04671331024169922, 0.04771059036254883, 0.04710790252685547, 0.04719424057006836, 0.046886974334716794, 0.04733747100830078, 0.04640927886962891, 0.04621152114868164, 0.0464117431640625, 0.04624796676635742, 0.04638684844970703, 0.04610083389282227, 0.04605939102172851, 0.046525566101074216, 0.04690547180175781, 0.04711465454101563, 0.04640528106689453, 0.046492481231689455, 0.04645273590087891, 0.047443199157714847, 0.0472828483581543, 0.04725785446166992, 0.04711407852172852, 0.0466431999206543, 0.04664934539794922, 0.04669222259521484, 0.046723201751708986, 0.047247135162353515, 0.046559585571289065, 0.04635635375976563, 0.04650188827514649, 0.04665753555297852, 0.04656742477416992, 0.047058944702148435, 0.04675788879394531, 0.04779561614990235, 0.04672480010986328, 0.04657244873046875, 0.04658585739135742, 0.04633318328857422, 0.04647574234008789, 0.04690972900390625, 0.04629913711547851, 0.046491649627685545, 0.04720355224609375, 0.04639619064331055, 0.04710400009155274, 0.04650511932373047, 0.047311710357666015, 0.04664934539794922, 0.04828236770629883, 0.04741120147705078, 0.04705484771728516, 0.04671039962768555, 0.046891647338867186, 0.0466511344909668, 0.04652361679077149, 0.04689152145385742, 0.04709609603881836, 0.04720230484008789, 0.047230976104736325, 0.04796441650390625, 0.04740038299560547, 0.046800670623779295, 0.04707587051391601, 0.047239070892333986, 0.04700960159301758, 0.047310783386230466, 0.04735353469848633, 0.04708240127563477, 0.0477468147277832, 0.047338623046875, 0.04721958541870117, 0.04817216110229492, 0.047129024505615236, 0.04699590301513672, 0.04678188705444336, 0.04648812866210938, 0.04735702514648438, 0.046945182800292966, 0.04752588653564453, 0.04738435363769531, 0.04742086410522461, 0.05184592056274414, 0.047568191528320314, 0.04755654525756836, 0.04709388732910156, 0.04687503814697266, 0.04678607940673828, 0.04653254318237305, 0.04645292663574219, 0.04750739288330078, 0.047075969696044925, 0.04659750366210937, 0.0465516471862793, 0.04644220733642578, 0.046575935363769534, 0.047001598358154296, 0.04660838317871094, 0.0468125114440918, 0.04638175964355469, 0.04682339096069336, 0.0465530891418457, 0.04690678405761719, 0.04769033432006836, 0.04654694366455078, 0.046522529602050784, 0.050409313201904296, 0.047177921295166014, 0.04716505432128906, 0.04721478271484375, 0.04679462432861328, 0.04959660720825195, 0.04848844909667969, 0.04782080078125, 0.04741654586791992, 0.04746867370605469, 0.04716815948486328, 0.04702822494506836, 0.04694630432128906, 0.04695017623901367, 0.047067359924316404, 0.04688281631469727, 0.04711407852172852, 0.047128288269042966, 0.04750553512573242, 0.048446945190429684, 0.047411968231201175, 0.04704060745239258, 0.046895103454589845, 0.04722483062744141, 0.04694956970214844, 0.04706592178344727, 0.047405055999755856, 0.04685315322875976, 0.047723487854003904, 0.04704819107055664, 0.04702668762207031, 0.046954238891601566, 0.047828479766845705, 0.046981758117675784, 0.046884990692138674, 0.046838943481445315, 0.0471003532409668, 0.04683407974243164, 0.04684185409545898, 0.04679679870605469, 0.04670003128051758, 0.046696575164794925, 0.04705523300170898, 0.04735916900634766, 0.04739155197143555, 0.04749484634399414, 0.04752025604248047, 0.04711609649658203, 0.047441856384277344, 0.04702646255493164, 0.04721811294555664, 0.04804643249511719, 0.047222782135009765, 0.04714412689208984, 0.04750214385986328, 0.04903635025024414, 0.04789251327514649, 0.048299041748046875, 0.04774396896362305, 0.0471473274230957, 0.04712918472290039, 0.047367774963378906, 0.04751219177246094, 0.047322399139404295, 0.04745881652832031, 0.04784444808959961, 0.047481758117675785, 0.047726593017578124, 0.047047775268554685, 0.048764926910400394, 0.04792432022094727, 0.04735443115234375, 0.04702640151977539, 0.046835838317871095, 0.04751760101318359, 0.047070430755615233, 0.04712297439575195, 0.04740716934204101, 0.04709814453125, 0.04957913589477539, 0.04750118255615234, 0.04748944091796875, 0.04695510482788086, 0.04705267333984375, 0.04760569763183594, 0.04962704086303711, 0.04709331130981445, 0.047149791717529296, 0.04740915298461914, 0.047469886779785156, 0.04718662261962891, 0.04735795211791992, 0.047527935028076174, 0.04780003356933594, 0.04732547378540039, 0.04771430587768555, 0.04774399948120117, 0.04757196807861328, 0.04777369689941406, 0.04836368179321289, 0.04754211044311524, 0.04737948989868164, 0.04758012771606445, 0.047195487976074216, 0.04720649719238281, 0.04718956756591797, 0.04682854461669922, 0.046779678344726565, 0.04713663864135742, 0.04748988723754883, 0.04777164840698242, 0.047818367004394534, 0.047384159088134765, 0.047319808959960935, 0.047304737091064454, 0.04684799957275391, 0.04698054504394531, 0.046811710357666014, 0.047013248443603516, 0.04734739303588867, 0.04722742462158203, 0.04667001724243164, 0.04681955337524414, 0.0470118408203125, 0.04777983856201172, 0.04681932830810547, 0.046852096557617184, 0.04673500823974609, 0.04759574508666992, 0.04832166290283203, 0.04741603088378906, 0.04764217758178711, 0.048570369720458986, 0.04760985565185547, 0.04670259094238281, 0.04663488006591797, 0.04682144165039062, 0.04734518432617187, 0.047806400299072266, 0.04779199981689453, 0.047677566528320316, 0.04734435272216797, 0.0471223030090332, 0.04724531173706055, 0.047233024597167966, 0.04692172622680664, 0.046814720153808595, 0.04689766311645508, 0.04724326324462891, 0.046863872528076174, 0.047271743774414066, 0.046905887603759765, 0.047535808563232425, 0.04693040084838867, 0.04681683349609375, 0.04665593719482422, 0.04671408081054688, 0.046641952514648435, 0.04818297576904297, 0.04763616180419922, 0.04721728134155274, 0.04758854293823242, 0.0478680305480957, 0.0473891830444336, 0.047894718170166016, 0.04766064071655273, 0.047233505249023436, 0.047136703491210935, 0.04797849655151367, 0.04723507308959961, 0.04715315246582031, 0.046844993591308594, 0.04755257415771484, 0.04755708694458008, 0.047535552978515624, 0.04744905471801758, 0.047446014404296875, 0.047101951599121096, 0.04741500854492187, 0.04738281631469726, 0.04712857437133789, 0.04712432098388672, 0.04676419067382812, 0.04675920104980469, 0.04668236923217774, 0.04700163269042969, 0.049015232086181644, 0.047527935028076174, 0.04765423965454101, 0.04763260650634766, 0.047575489044189456, 0.047026176452636716, 0.04701171112060547, 0.051308319091796874, 0.04707155227661133]",tokens/s,21.23580271715189,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 1047, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 890, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma2/modeling_gemma2.py"", line 366, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp7gcr9mj7/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1001, in __init__ self.model = GemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 780, in __init__ [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 780, in [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 571, in __init__ self.mlp = GemmaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 167, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 442360 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1001, in __init__ self.model = GemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 780, in __init__ [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 780, in [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 571, in __init__ self.mlp = GemmaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 167, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 441476 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1001, in __init__ self.model = GemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 780, in __init__ [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 780, in [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 571, in __init__ self.mlp = GemmaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 167, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 443151 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma2,google/gemma-2-2b,google/gemma-2-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,973.058048,7019.036672,0.0,6616.51456,6410.443264,s,1,8.63152734375,8.63152734375,0.0,8.63152734375,8.63152734375,8.63152734375,8.63152734375,[8.63152734375],,kWh,6.204855066666444e-06,6.571124701120822e-07,2.0411127439994015e-06,8.903080280777927e-06,,MB,1490.26816,7075.659776,0.0,6658.4576,5286.424064,s,10,0.6550041007995605,0.06550041007995606,0.001675851382012331,0.06600307464599608,0.06655716094970703,0.06661680297851563,0.0666645166015625,"[0.06654390716552734, 0.06578166198730469, 0.06530480194091796, 0.06667644500732423, 0.06634095764160156, 0.06546320343017578, 0.06607123565673828, 0.06063180923461914, 0.0659349136352539, 0.06625516510009766]",tokens/s,3908.3724771723096,kWh,2.12374646648551e-06,2.342106478148533e-07,1.4055828474927585e-06,3.7635399617931223e-06,tokens/kWh,68021065.9641913,MB,1510.199296,7105.019904,0.0,6687.817728,5299.722752,s,10,30.890808837890628,3.089080883789063,0.008562685595575388,3.0874703369140626,3.099440283203125,3.1012765380859375,3.1027455419921877,"[3.089718505859375, 3.0790546875, 3.08043896484375, 3.0824677734375, 3.0990322265625, 3.094529296875, 3.08522216796875, 3.09791455078125, 3.07931787109375, 3.10311279296875]",tokens/s,20.394415805235983,kWh,8.952622424184753e-05,9.874676494940653e-06,5.5046017869307225e-05,0.00015444691860609544,tokens/kWh,407907.1344937381,,s,630,30.88918445968628,0.049030451523311556,0.0007795951180883799,0.048876720428466795,0.049619593048095705,0.04992401542663574,0.05123214694976807,"[0.0492239990234375, 0.04866252899169922, 0.04874649429321289, 0.04883865737915039, 0.04914176177978516, 0.04893644714355469, 0.04881235122680664, 0.04864176177978516, 0.0487756462097168, 0.049896961212158204, 0.04872652816772461, 0.049686527252197264, 0.048922622680664066, 0.04870553588867187, 0.04849193572998047, 0.05810953521728516, 0.0486605110168457, 0.048734783172607425, 0.048521537780761716, 0.050866081237792966, 0.0491317138671875, 0.04895068740844727, 0.048734302520751956, 0.04852345657348633, 0.04886671829223633, 0.048577438354492186, 0.048446495056152346, 0.04841366577148438, 0.04860847854614258, 0.04886403274536133, 0.04882592010498047, 0.049377727508544925, 0.04976025772094726, 0.04869734573364258, 0.04858652877807617, 0.04850710296630859, 0.04839955139160156, 0.048417598724365234, 0.048517120361328124, 0.048378944396972656, 0.04895606231689453, 0.04920963287353516, 0.04865433502197265, 0.048518302917480466, 0.04907708740234375, 0.048992286682128905, 0.04871734237670899, 0.04872771072387695, 0.048777984619140624, 0.048869407653808594, 0.04948515319824219, 0.04918339157104492, 0.050107841491699216, 0.049105472564697265, 0.048784511566162106, 0.04857955169677734, 0.04863580703735351, 0.048707584381103515, 0.048947200775146485, 0.04899020767211914, 0.0492476806640625, 0.04899814224243164, 0.04860195159912109, 0.048680545806884766, 0.049027488708496096, 0.049070079803466796, 0.04903225708007813, 0.049554367065429684, 0.04911536026000977, 0.04901456069946289, 0.048979007720947265, 0.049487903594970704, 0.04952355194091797, 0.048877471923828124, 0.04867808151245117, 0.04848495864868164, 0.049240127563476566, 0.04873980712890625, 0.04929622268676758, 0.050337791442871094, 0.04893904113769531, 0.04837577438354492, 0.04841852951049805, 0.04887171173095703, 0.04880134582519531, 0.048573089599609376, 0.048721694946289064, 0.04909875106811523, 0.04888576126098633, 0.04937318420410156, 0.0489431037902832, 0.04950630569458008, 0.049018878936767575, 0.04994867324829102, 0.04866252899169922, 0.04889395141601562, 0.04885299301147461, 0.04855398559570313, 0.04838604736328125, 0.04948998260498047, 0.04866886520385742, 0.04857011032104492, 0.04855807876586914, 0.04830752182006836, 0.04838243103027344, 0.048371070861816405, 0.04852617645263672, 0.04877097702026367, 0.048545886993408206, 0.04847180938720703, 0.049119487762451175, 0.04973385620117188, 0.050756702423095705, 0.04909449768066406, 0.048690017700195314, 0.04855807876586914, 0.04836556625366211, 0.048302078247070314, 0.048403648376464846, 0.04846851348876953, 0.04856041717529297, 0.04871782302856445, 0.048418815612792966, 0.048396289825439455, 0.048353279113769534, 0.04833420944213867, 0.0486236801147461, 0.04863395309448242, 0.04895734405517578, 0.04945507049560547, 0.04954729461669922, 0.04902262496948242, 0.04943907165527344, 0.0491333122253418, 0.04933657455444336, 0.04855807876586914, 0.04850483322143555, 0.04844748687744141, 0.048360767364501955, 0.04824700927734375, 0.04858108901977539, 0.048435199737548826, 0.048627902984619144, 0.049618881225585935, 0.04875251388549805, 0.04854691314697265, 0.04908329772949219, 0.04850412750244141, 0.04849939346313477, 0.04910291290283203, 0.04853343963623047, 0.04863520050048828, 0.04904540634155274, 0.049840801239013674, 0.05001023864746094, 0.04974796676635742, 0.04916223907470703, 0.04884041595458984, 0.04868329620361328, 0.048540672302246096, 0.04896044921875, 0.04870665740966797, 0.048731105804443356, 0.04856217575073242, 0.04854988861083984, 0.04928623962402344, 0.048812511444091794, 0.04866707229614258, 0.04847411346435547, 0.04845072174072266, 0.048767166137695314, 0.04857289505004883, 0.04871587371826172, 0.0494183349609375, 0.05028790283203125, 0.04967654418945312, 0.049249950408935546, 0.04887347030639649, 0.04869152069091797, 0.04875491333007813, 0.04872192001342773, 0.048879905700683596, 0.048611038208007815, 0.04853583908081055, 0.04842313766479492, 0.04858774566650391, 0.04877366256713867, 0.049363231658935545, 0.049100479125976565, 0.049086368560791016, 0.04869529724121094, 0.048539104461669924, 0.04873292922973633, 0.04908009719848633, 0.04913151931762695, 0.04880384063720703, 0.049018878936767575, 0.04909635162353516, 0.04888137435913086, 0.048516895294189455, 0.048944000244140626, 0.048336158752441405, 0.04830073547363281, 0.04837919998168945, 0.04827001571655273, 0.04853903961181641, 0.04862627029418945, 0.04834463882446289, 0.04823494338989258, 0.04878652954101562, 0.04891891098022461, 0.04958262252807617, 0.04876889419555664, 0.04856230545043945, 0.04909260940551758, 0.048738079071044924, 0.05137430572509766, 0.048852096557617186, 0.04868387222290039, 0.0491376953125, 0.04985027313232422, 0.048939102172851565, 0.04866048049926758, 0.04871331024169922, 0.0485687370300293, 0.0487599983215332, 0.04851340866088867, 0.0485906867980957, 0.04841680145263672, 0.04865491104125977, 0.04912083053588867, 0.0489865608215332, 0.049203201293945314, 0.04986880111694336, 0.04908233642578125, 0.04894460678100586, 0.048476734161376954, 0.04876435089111328, 0.04866876983642578, 0.04862022399902344, 0.04886735916137695, 0.048801536560058596, 0.04907622528076172, 0.04869529724121094, 0.0487116813659668, 0.048889598846435546, 0.04959052658081055, 0.05057260894775391, 0.0498223991394043, 0.04956774520874024, 0.04948787307739258, 0.0487691535949707, 0.048804832458496095, 0.04858780670166016, 0.049202014923095706, 0.048889984130859376, 0.04904140853881836, 0.048658432006835936, 0.048782752990722655, 0.04872048187255859, 0.049364990234375, 0.048639999389648435, 0.04967628860473633, 0.048885791778564454, 0.04862736129760742, 0.048425281524658206, 0.048962974548339845, 0.04903200149536133, 0.04867459106445313, 0.048710655212402344, 0.04846694564819336, 0.04904483032226563, 0.04920150375366211, 0.048668193817138675, 0.04908524703979492, 0.04881103897094727, 0.04878432083129883, 0.04830822372436523, 0.048363521575927736, 0.05046796798706055, 0.04913593673706055, 0.04896636962890625, 0.04843859100341797, 0.04841904067993164, 0.04847443389892578, 0.04854988861083984, 0.048402080535888674, 0.04852703857421875, 0.048480926513671876, 0.048760543823242186, 0.049518878936767576, 0.04938694381713867, 0.04939219284057617, 0.05031628799438476, 0.05054057693481445, 0.04949091339111328, 0.0489697265625, 0.048570369720458986, 0.04866867065429688, 0.04981760025024414, 0.04900265502929688, 0.049200576782226564, 0.049027488708496096, 0.04881603240966797, 0.04932755279541016, 0.04937152099609375, 0.04929878234863281, 0.04966003036499023, 0.04965868759155274, 0.04919705581665039, 0.0510333137512207, 0.056392478942871097, 0.05044950485229492, 0.049309696197509766, 0.04937820816040039, 0.05123356628417969, 0.049788768768310544, 0.049932224273681644, 0.049428192138671875, 0.04929516983032227, 0.04976460647583008, 0.04870953750610352, 0.04908892822265625, 0.04922995376586914, 0.049108768463134764, 0.049217758178710935, 0.04898611068725586, 0.04873625564575195, 0.04875215911865234, 0.049307167053222654, 0.04988604736328125, 0.04921456146240234, 0.048718719482421874, 0.04880364990234375, 0.048496959686279296, 0.04867891311645508, 0.04892409515380859, 0.04923654556274414, 0.049748031616210935, 0.049481246948242186, 0.04940544128417969, 0.04927580642700195, 0.0494571533203125, 0.04912470245361328, 0.04870825576782226, 0.04852431869506836, 0.04855497741699219, 0.04854131317138672, 0.048392478942871096, 0.048932575225830076, 0.048715423583984375, 0.049441505432128906, 0.05083955383300781, 0.05122867202758789, 0.048881664276123046, 0.04881612777709961, 0.04891571044921875, 0.04904012680053711, 0.04865228652954102, 0.0488197135925293, 0.04851958465576172, 0.04894950485229492, 0.049083839416503905, 0.04876275253295898, 0.04887596893310547, 0.048998497009277345, 0.04877273559570312, 0.04862604904174805, 0.04879942321777344, 0.049123519897460936, 0.04865577697753906, 0.04861206436157227, 0.04878745651245117, 0.04904489517211914, 0.0491278076171875, 0.04908835220336914, 0.04939404678344726, 0.049117088317871094, 0.049166751861572267, 0.04872745513916016, 0.048932830810546876, 0.04894086456298828, 0.049253185272216796, 0.04964736175537109, 0.04876950454711914, 0.048549121856689456, 0.04979561614990234, 0.04861337661743164, 0.04860710525512695, 0.049539199829101564, 0.04937932968139649, 0.04897568130493164, 0.0490313606262207, 0.048683006286621096, 0.048486400604248046, 0.04878950500488281, 0.04870348739624023, 0.04900649642944336, 0.04898371124267578, 0.048879520416259765, 0.04912908935546875, 0.04868745422363281, 0.04859552001953125, 0.04950835037231445, 0.04963081741333008, 0.04907827377319336, 0.049012126922607424, 0.04856934356689453, 0.04866048049926758, 0.048519168853759766, 0.048533504486083984, 0.04844134521484375, 0.04870668792724609, 0.04853747177124024, 0.048804351806640625, 0.049396289825439456, 0.050914878845214846, 0.04898035049438477, 0.048732158660888675, 0.051608734130859375, 0.04913238525390625, 0.048906238555908206, 0.048828575134277345, 0.048492095947265626, 0.048742687225341794, 0.049186817169189455, 0.0489615364074707, 0.04882636642456055, 0.04874176025390625, 0.04897260665893555, 0.04889740753173828, 0.048762336730957034, 0.048632801055908205, 0.048928768157958984, 0.04891648101806641, 0.0489881591796875, 0.04900044631958008, 0.048810142517089844, 0.04863068771362305, 0.048558815002441406, 0.04864412689208984, 0.04937564849853516, 0.049121471405029295, 0.04891638565063477, 0.04932812881469727, 0.049274879455566405, 0.049844352722167966, 0.04916537475585937, 0.0491847038269043, 0.0490173110961914, 0.0487632942199707, 0.049995712280273434, 0.049393505096435544, 0.04907644653320312, 0.04956774520874024, 0.049272640228271485, 0.049121471405029295, 0.04901593780517578, 0.04863449478149414, 0.04921164703369141, 0.04906598281860351, 0.04884275054931641, 0.04939519882202149, 0.04913817596435547, 0.049121280670166016, 0.04880281448364258, 0.04903219223022461, 0.04930678558349609, 0.04878422546386719, 0.0489431037902832, 0.048825950622558595, 0.04899676895141602, 0.05019023895263672, 0.05002374267578125, 0.04980201721191406, 0.049220703125, 0.04936592102050781, 0.04954316711425781, 0.04997324752807617, 0.05093580627441406, 0.04935475158691406, 0.0489716796875, 0.048643264770507816, 0.0484955825805664, 0.04895436859130859, 0.049243072509765624, 0.049156288146972656, 0.048895679473876956, 0.048691200256347655, 0.04896371078491211, 0.048912384033203124, 0.04876287841796875, 0.049192958831787106, 0.050874366760253906, 0.04914102554321289, 0.04890070343017578, 0.04913983917236328, 0.04880588912963867, 0.04876678466796875, 0.048799934387207033, 0.04865193557739258, 0.04885334396362305, 0.04853343963623047, 0.048465919494628903, 0.04926425552368164, 0.04876752090454101, 0.049065601348876955, 0.04880812835693359, 0.048781791687011716, 0.04956918334960937, 0.048853248596191404, 0.04878147125244141, 0.048353279113769534, 0.048726016998291016, 0.04842905426025391, 0.04865657424926758, 0.04849404907226563, 0.04844169616699219, 0.04845945739746094, 0.048538047790527346, 0.048617343902587894, 0.04864220809936524, 0.04894499206542969, 0.048433151245117184, 0.04948582458496094, 0.04866191864013672, 0.048788063049316405, 0.04888131332397461, 0.048838302612304686, 0.04976844787597656, 0.04887539291381836, 0.04869782257080078, 0.0487955207824707, 0.04854012680053711, 0.04848204803466797, 0.04857670211791992, 0.04882339096069336, 0.04866147232055664, 0.04870547103881836, 0.048572479248046876, 0.048570240020751956, 0.049307777404785154, 0.048537792205810545, 0.04873606491088867, 0.048797248840332035, 0.04847251129150391, 0.04905187225341797, 0.048915264129638675, 0.049218528747558596, 0.04965280151367187, 0.04910995101928711, 0.05007324981689453, 0.049285472869873045, 0.04843724822998047, 0.04910659027099609, 0.048724319458007814, 0.04882255935668945, 0.048679679870605466, 0.04853449630737305, 0.04850483322143555, 0.048475841522216796, 0.04914947128295898, 0.04964419174194336, 0.04991398239135742, 0.049414142608642575, 0.049198654174804686, 0.0490316162109375, 0.04954281616210938, 0.04971974563598633, 0.04910099029541016, 0.04876217651367187, 0.04879196929931641, 0.04862156677246094, 0.04868505477905274, 0.04870479965209961, 0.04899436950683594, 0.04874102401733398, 0.04842496109008789, 0.048791553497314455, 0.048430816650390625, 0.04861523056030274, 0.04895996856689453, 0.048914432525634766, 0.04931135940551758, 0.049619327545166014, 0.049549312591552735, 0.0490967025756836, 0.049006591796875, 0.0487606086730957, 0.04877888107299805, 0.048736862182617184, 0.048535072326660156, 0.04963375854492187, 0.048691200256347655, 0.048792831420898436, 0.04863071823120117, 0.04907807922363281, 0.04959641647338867, 0.04912947082519531, 0.04868096160888672, 0.049261600494384765, 0.04962198257446289, 0.04919705581665039, 0.049299007415771486, 0.0493900146484375, 0.0490777587890625, 0.049829631805419924, 0.04939238357543945, 0.04915302276611328, 0.049551647186279295, 0.04907452774047852, 0.04899030303955078, 0.048726207733154295, 0.04914745712280273, 0.048968223571777346, 0.04873567962646484, 0.04854617691040039, 0.05776812744140625, 0.04886528015136719, 0.05252451324462891, 0.04945379257202148, 0.04997235107421875, 0.04951724624633789, 0.049364990234375, 0.04895724868774414, 0.04927097702026367, 0.04946457672119141, 0.04904012680053711, 0.048607265472412106, 0.048751743316650394]",tokens/s,20.39548829209842,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.909248,14281.474048,0.0,13878.951936,13865.632768,s,1,7.40221728515625,7.40221728515625,0.0,7.40221728515625,7.40221728515625,7.40221728515625,7.40221728515625,[7.40221728515625],,kWh,9.177912829143981e-06,9.95413461744541e-07,4.876948345999832e-06,1.5050274636888353e-05,,MB,1310.253056,14705.098752,0.0,14289.993728,14237.628416,s,10,1.7139015808105469,0.17139015808105468,0.005320159698691488,0.1723859558105469,0.1769791976928711,0.17698137435913086,0.17698311569213868,"[0.15749183654785157, 0.17054681396484375, 0.1727344970703125, 0.169306396484375, 0.1693844451904297, 0.17438188171386718, 0.17697871398925782, 0.17203741455078125, 0.17698355102539062, 0.1740560302734375]",tokens/s,1493.6680312700992,kWh,5.0241326041670595e-06,5.54059476104806e-07,3.3252427731863206e-06,8.903434853458187e-06,tokens/kWh,28752948.071559925,MB,1330.21696,14872.870912,0.0,14457.765888,14415.235584,s,10,38.14640063476563,3.8146400634765625,0.004063854581580794,3.8156893310546875,3.819505712890625,3.819750048828125,3.819945517578125,"[3.815798828125, 3.809121826171875, 3.8086435546875, 3.809232177734375, 3.81423876953125, 3.815579833984375, 3.81828564453125, 3.819994384765625, 3.81605419921875, 3.819451416015625]",tokens/s,16.515319650521224,kWh,0.00011144962674999969,1.2293141137682412e-05,7.405518024221468e-05,0.00019779794812989674,tokens/kWh,318506.84294574684,,s,630,38.14320635986324,0.06054477199978298,0.0005330866898963981,0.06044280052185058,0.06090510139465332,0.06105789794921875,0.06389833847045899,"[0.06413423919677734, 0.061717121124267575, 0.06087295913696289, 0.06033366394042969, 0.06004515075683594, 0.0600081901550293, 0.060279617309570314, 0.06009446334838867, 0.06031504058837891, 0.06018905639648438, 0.060303585052490234, 0.06053257751464844, 0.06034038543701172, 0.06022895812988281, 0.060183200836181644, 0.06042380905151367, 0.06112294387817383, 0.060980865478515625, 0.060935615539550785, 0.06071187210083008, 0.06032793426513672, 0.06026374435424805, 0.06011481475830078, 0.06008095932006836, 0.060239841461181644, 0.060324928283691404, 0.060201953887939454, 0.06009996795654297, 0.060170623779296876, 0.06018870544433594, 0.06021551895141602, 0.060450942993164065, 0.0606800651550293, 0.0607325439453125, 0.060639839172363284, 0.06068239974975586, 0.060919361114501955, 0.06070687866210937, 0.06052096176147461, 0.06036479949951172, 0.060336128234863284, 0.060211200714111325, 0.06027872085571289, 0.06030547332763672, 0.06036006546020508, 0.06055916976928711, 0.060528961181640625, 0.060596736907958984, 0.060690433502197265, 0.06066790390014649, 0.06079897689819336, 0.061200511932373046, 0.06119820785522461, 0.06106316757202149, 0.060948062896728515, 0.06075638580322266, 0.0607191047668457, 0.060606464385986325, 0.06045203018188477, 0.0603037109375, 0.06026492691040039, 0.06042819213867188, 0.06054646301269531, 0.06355887985229493, 0.061438720703125, 0.060450817108154295, 0.06033990478515625, 0.060098846435546874, 0.060047393798828126, 0.06016755294799805, 0.059961982727050785, 0.06007583999633789, 0.05996940612792969, 0.060107070922851565, 0.06019260787963867, 0.0603466567993164, 0.060344192504882814, 0.06026758575439453, 0.06045526504516602, 0.060856929779052736, 0.06077417755126953, 0.06056367874145508, 0.060259838104248044, 0.06025676727294922, 0.060243968963623044, 0.06017843246459961, 0.060321247100830075, 0.060299678802490236, 0.06020723342895508, 0.06019027328491211, 0.060122623443603515, 0.060353473663330076, 0.06018265533447266, 0.060237697601318356, 0.06043587112426758, 0.060566207885742185, 0.060670944213867185, 0.06066681671142578, 0.06056496047973633, 0.060516033172607425, 0.060443328857421874, 0.06032550430297851, 0.06028464126586914, 0.060402496337890625, 0.060345600128173825, 0.06033603286743164, 0.060400096893310544, 0.060329662322998044, 0.06040143966674805, 0.06044559860229492, 0.060582942962646484, 0.0605255355834961, 0.060672000885009764, 0.06076185607910156, 0.06065177536010742, 0.06053068923950195, 0.06051548767089844, 0.06059097671508789, 0.06056547164916992, 0.060593536376953125, 0.06055180740356445, 0.06043632125854492, 0.06047321701049805, 0.06041321563720703, 0.06041497421264649, 0.060524543762207034, 0.06381999969482421, 0.06148956680297851, 0.06062457656860352, 0.06033875274658203, 0.060128929138183594, 0.06006195068359375, 0.06008028793334961, 0.059947967529296875, 0.06001286315917969, 0.06005615997314453, 0.060008510589599606, 0.0600780143737793, 0.06012480163574219, 0.06002118301391601, 0.060089984893798826, 0.060346687316894534, 0.06065955352783203, 0.06073929595947265, 0.06060819244384766, 0.06039833450317383, 0.06014572906494141, 0.06011897659301758, 0.06010060882568359, 0.060200958251953124, 0.06016204833984375, 0.06031299209594727, 0.06014012908935547, 0.06010265731811523, 0.06012313461303711, 0.06036819076538086, 0.06032454299926758, 0.060469249725341796, 0.06074982452392578, 0.06069247817993164, 0.060633087158203126, 0.060655616760253904, 0.060657440185546876, 0.06049008178710937, 0.060419231414794924, 0.06029385757446289, 0.06024528121948242, 0.06021603012084961, 0.06034543991088867, 0.06034320068359375, 0.06032902526855469, 0.0610002555847168, 0.06035014343261719, 0.06042489624023437, 0.06060358428955078, 0.060574432373046876, 0.060725345611572265, 0.06066707229614258, 0.06079983901977539, 0.06075388717651367, 0.06061996841430664, 0.06055715179443359, 0.06043952178955078, 0.06040063858032227, 0.060453502655029294, 0.060391361236572266, 0.06043199920654297, 0.060372894287109374, 0.06050009536743164, 0.0639409294128418, 0.06152399826049805, 0.060604190826416014, 0.06030883026123047, 0.06008432006835938, 0.060074302673339845, 0.060109249114990236, 0.06009196853637695, 0.060090816497802735, 0.060246017456054686, 0.06014156723022461, 0.06028019332885742, 0.060230335235595706, 0.060151744842529296, 0.06011203384399414, 0.060375904083251955, 0.06057900619506836, 0.06071392059326172, 0.060499839782714844, 0.06035059356689453, 0.06028476715087891, 0.06013727951049805, 0.0601192626953125, 0.06024156951904297, 0.06028323364257813, 0.060276897430419925, 0.06021839904785156, 0.06015673446655274, 0.060203006744384766, 0.060251937866210935, 0.060241600036621094, 0.060340545654296876, 0.060691745758056644, 0.06062176132202148, 0.06060246276855469, 0.06059600067138672, 0.06062092971801758, 0.0604461441040039, 0.06031622314453125, 0.06032928085327149, 0.06040851211547851, 0.06036172866821289, 0.060298240661621094, 0.06035200119018555, 0.06040758514404297, 0.06056009674072266, 0.060442623138427735, 0.06058598327636719, 0.06057072067260742, 0.060666782379150394, 0.06057779312133789, 0.060661376953125, 0.0607665901184082, 0.06065971374511719, 0.060509342193603516, 0.0604760627746582, 0.06036703872680664, 0.06036697769165039, 0.06044863891601562, 0.06048956680297852, 0.06045916748046875, 0.060558433532714846, 0.06045993423461914, 0.06407062530517578, 0.06165689468383789, 0.0606720962524414, 0.06031577682495117, 0.06017020797729492, 0.06006748962402344, 0.06009686279296875, 0.060089759826660157, 0.06007049560546875, 0.06002617645263672, 0.06008646392822266, 0.06022380828857422, 0.06017027282714844, 0.06020518493652344, 0.06031363296508789, 0.06052864074707031, 0.06088499069213867, 0.06093958282470703, 0.060756671905517576, 0.06053683090209961, 0.060319744110107425, 0.060278785705566405, 0.06026383972167969, 0.0602138557434082, 0.06025628662109375, 0.060411872863769533, 0.06030745697021484, 0.06029107284545898, 0.060243873596191405, 0.06016214370727539, 0.060440574645996094, 0.06055321502685547, 0.060780414581298826, 0.06077360153198242, 0.06085315322875977, 0.060805118560791016, 0.060862464904785155, 0.06074687957763672, 0.060620830535888674, 0.060416606903076174, 0.06037673568725586, 0.06028144073486328, 0.060270591735839846, 0.06021299362182617, 0.06041420745849609, 0.06043033599853516, 0.06046515274047851, 0.06043648147583008, 0.060556350708007814, 0.060894176483154296, 0.06088195037841797, 0.06075283050537109, 0.060619998931884765, 0.0606154556274414, 0.06054707336425781, 0.06070259094238281, 0.060616798400878906, 0.06060163116455078, 0.060546878814697266, 0.06066067123413086, 0.06049100875854492, 0.060506366729736326, 0.06057011032104492, 0.06459846496582031, 0.06184755325317383, 0.060674144744873044, 0.060440479278564455, 0.06018560028076172, 0.060181503295898435, 0.06028287887573242, 0.06008224105834961, 0.060098400115966795, 0.060278881072998045, 0.06030131149291992, 0.060232894897460934, 0.060168704986572265, 0.060296703338623046, 0.06021612930297852, 0.06042367935180664, 0.06092416000366211, 0.06107923126220703, 0.060755615234375, 0.06048041534423828, 0.060276737213134764, 0.0602413101196289, 0.06014831924438477, 0.060229633331298826, 0.060262401580810546, 0.06036640167236328, 0.06024959945678711, 0.06028905487060547, 0.06041439819335938, 0.06029935836791992, 0.0603682861328125, 0.060679134368896485, 0.06090451049804688, 0.06088998413085937, 0.06088700866699219, 0.06081340789794922, 0.060649471282958986, 0.060440574645996094, 0.060387168884277344, 0.060418270111083985, 0.06041593551635742, 0.060276737213134764, 0.060275840759277347, 0.06018547058105469, 0.06036070251464844, 0.060389217376708985, 0.060603649139404296, 0.0606923828125, 0.06081433486938476, 0.06093932723999024, 0.060933025360107425, 0.060901313781738284, 0.06092921447753906, 0.06059891128540039, 0.060494110107421874, 0.06043427276611328, 0.06058550262451172, 0.06048534393310547, 0.060418495178222655, 0.06038166427612305, 0.060375038146972655, 0.06040576171875, 0.06061667251586914, 0.06406304168701171, 0.061632926940917966, 0.06066092681884765, 0.06041836929321289, 0.06008883285522461, 0.06011699295043945, 0.06019465637207031, 0.06017859268188477, 0.06022275161743164, 0.060134014129638674, 0.060169921875, 0.06025779342651367, 0.06025923156738281, 0.06026649475097656, 0.06048972702026367, 0.060715007781982425, 0.061132831573486326, 0.06101193618774414, 0.060800384521484375, 0.060437118530273434, 0.06021731185913086, 0.060141407012939456, 0.06016428756713867, 0.060182369232177735, 0.06027484893798828, 0.060268543243408204, 0.060246017456054686, 0.0603045768737793, 0.06039340972900391, 0.06040665435791016, 0.06050787353515625, 0.060760353088378904, 0.06100899124145508, 0.06099772644042969, 0.06091011047363281, 0.06082726287841797, 0.060674560546875, 0.060518016815185545, 0.06058204650878906, 0.060534912109375, 0.06069990539550781, 0.060593185424804685, 0.060620223999023434, 0.060612255096435544, 0.06059507369995117, 0.060534751892089844, 0.0604815673828125, 0.06069247817993164, 0.060921409606933596, 0.06088748931884766, 0.06079487991333008, 0.060782081604003904, 0.06089574432373047, 0.06071088027954102, 0.060475425720214845, 0.06040758514404297, 0.06044899368286133, 0.060392734527587894, 0.06042252731323242, 0.06065155029296875, 0.0606641616821289, 0.060665855407714846, 0.060738590240478514, 0.06429488372802734, 0.061618080139160154, 0.060698368072509765, 0.060402015686035156, 0.060104705810546874, 0.06006988906860351, 0.06016204833984375, 0.060068958282470705, 0.0600544319152832, 0.06032793426513672, 0.06036070251464844, 0.06045286560058594, 0.06048070526123047, 0.06053337478637695, 0.06042841720581055, 0.06064748764038086, 0.061050880432128904, 0.06105497741699219, 0.060715007781982425, 0.06042828750610352, 0.06021292877197266, 0.060150081634521485, 0.06014316940307617, 0.060123233795166015, 0.06026399993896484, 0.06024272155761719, 0.060306560516357424, 0.06022447967529297, 0.06029497528076172, 0.06035670471191406, 0.06042214584350586, 0.06074367904663086, 0.06101606369018555, 0.061005409240722654, 0.061079776763916016, 0.06155487823486328, 0.06103007888793945, 0.060741249084472655, 0.06063132858276367, 0.06050409698486328, 0.06040335845947266, 0.060379104614257814, 0.060357376098632814, 0.060353759765625, 0.06048438262939453, 0.06040127944946289, 0.060469406127929684, 0.06078252792358398, 0.0608504638671875, 0.0609172477722168, 0.06084783935546875, 0.06093494415283203, 0.06099919891357422, 0.06074816131591797, 0.06074915313720703, 0.06072192001342774, 0.06063622283935547, 0.06076448059082031, 0.06075046539306641, 0.06056345748901367, 0.06051430511474609, 0.060491775512695314, 0.06054912185668945, 0.06376310348510743, 0.06150057601928711, 0.06059299087524414, 0.06035865783691406, 0.06008422470092773, 0.06005350494384765, 0.06015385437011719, 0.060080127716064455, 0.060434432983398435, 0.06030745697021484, 0.060424190521240234, 0.06044467163085938, 0.06042214584350586, 0.060407806396484375, 0.06036012649536133, 0.06053535842895508, 0.06106467056274414, 0.060975135803222655, 0.06071088027954102, 0.06049846267700195, 0.060284927368164064, 0.06015999984741211, 0.06014265441894531, 0.06015891265869141, 0.0602086410522461, 0.06023628616333008, 0.06022256088256836, 0.060335006713867184, 0.06028083038330078, 0.06031155014038086, 0.06041731262207031, 0.06054332733154297, 0.06087839889526367, 0.060881729125976565, 0.06090454483032227, 0.06087772750854492, 0.06082355117797852, 0.060565502166748046, 0.060453887939453124, 0.060494846343994144, 0.060391231536865236, 0.060456993103027344, 0.06039081573486328, 0.06037171173095703, 0.060378974914550784, 0.06034428787231445, 0.06069676971435547, 0.06082559967041016, 0.0608004150390625, 0.060922431945800784, 0.061009952545166016, 0.06106032180786133, 0.06093494415283203, 0.06074476623535156, 0.06054598236083984, 0.060483230590820315, 0.06048803329467774, 0.06045836639404297, 0.060442977905273435, 0.060391712188720706, 0.060388607025146486, 0.060412670135498045, 0.060419326782226564, 0.06393033599853516, 0.06157721710205078, 0.06065151977539063, 0.06036684799194336, 0.06007807922363281, 0.060051006317138673, 0.06029151916503906, 0.060114944458007816, 0.06006771087646484, 0.06006332778930664, 0.06006355285644531, 0.060158241271972654, 0.06026665496826172, 0.06036643218994141, 0.060279487609863285, 0.06068838500976562, 0.061026302337646485, 0.061060287475585937, 0.06088351821899414, 0.060464702606201175, 0.060332286834716795, 0.06022393417358399, 0.060344062805175784, 0.06051353454589844, 0.06045142364501953, 0.06038774490356445, 0.0604334716796875, 0.06046188735961914, 0.06058787155151367, 0.06065795135498047, 0.06065116882324219, 0.06085468673706055, 0.06122265625, 0.06101011276245117, 0.06103039932250977, 0.060829696655273435, 0.060862464904785155, 0.060851390838623044, 0.060715934753417966, 0.06060601425170899, 0.06051161575317383, 0.06036783981323242, 0.06034022521972656, 0.060336128234863284, 0.06040739059448242, 0.06051062393188476, 0.06053398513793945, 0.06067020797729492, 0.06081999969482422, 0.06105497741699219, 0.06081081771850586, 0.06068652725219727, 0.060567520141601563, 0.0607501106262207, 0.06063718414306641, 0.0606739501953125, 0.06060566329956055, 0.06060841751098633, 0.060461505889892575, 0.06062480163574219, 0.060553855895996093, 0.06055715179443359, 0.06057708740234375]",tokens/s,16.516702713878985,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.73312,14281.474048,0.0,13878.951936,13865.632768,s,1,8.3498759765625,8.3498759765625,0.0,8.3498759765625,8.3498759765625,8.3498759765625,8.3498759765625,[8.3498759765625],,kWh,9.354132491633511e-06,1.024521445709692e-06,6.0661159639874285e-06,1.644476990133063e-05,,MB,1216.393216,14703.0016,0.0,14289.993728,14241.298944,s,10,12.383355957031249,1.2383355957031248,0.004935034035840183,1.2397705078125,1.2427809448242186,1.2432646179199218,1.2436515563964843,"[1.2263873291015626, 1.234051025390625, 1.2372064208984375, 1.240752197265625, 1.2363861083984375, 1.2390606689453125, 1.242610107421875, 1.2404803466796874, 1.2426734619140625, 1.243748291015625]",tokens/s,206.72909741776715,kWh,3.613930880583666e-05,3.985688760625541e-06,2.402524144239915e-05,6.415023900886135e-05,tokens/kWh,3990632.053056538,MB,1234.1248,14870.77376,0.0,14457.765888,14413.156352,s,10,40.622117187499995,4.0622117187499995,0.004656699934348818,4.060311767578125,4.06980341796875,4.070640356445312,4.071309907226563,"[4.06512744140625, 4.071477294921875, 4.069617431640625, 4.05886328125, 4.058444580078125, 4.059125, 4.05728173828125, 4.06093115234375, 4.0596923828125, 4.061556884765625]",tokens/s,15.508792835491597,kWh,0.00011841903004166244,1.3062563161610457e-05,7.868675739380188e-05,0.00021016835059707473,tokens/kWh,299759.6917947972,,s,630,40.61884122848512,0.06447435115632558,0.00028540208038835377,0.06448127746582032,0.06482949752807618,0.06497841453552246,0.0652202774810791,"[0.06499043273925781, 0.06423123168945312, 0.06395929718017578, 0.06398844909667968, 0.06408601379394531, 0.06407318115234376, 0.06399993515014649, 0.06401084899902344, 0.06411430358886719, 0.06420719909667968, 0.06425603485107421, 0.06417407989501953, 0.06413235473632813, 0.06430745697021484, 0.06438349151611328, 0.06433724975585937, 0.06431603240966798, 0.06428060913085938, 0.06418022155761718, 0.06421814727783202, 0.06425289916992187, 0.06426233673095703, 0.06425580596923829, 0.06437068939208984, 0.06432736206054687, 0.06448159790039062, 0.06440345764160156, 0.06440064239501953, 0.06452239990234375, 0.0646272964477539, 0.06467378997802735, 0.06470041656494141, 0.06466150665283203, 0.06462258911132812, 0.06453247833251953, 0.06449561309814453, 0.06451200103759766, 0.06452223968505859, 0.06463593292236328, 0.06464530944824219, 0.0646396484375, 0.06468211364746093, 0.064753662109375, 0.06463107299804688, 0.06472470092773437, 0.06498464202880859, 0.06476185607910157, 0.06494588470458984, 0.06486249542236328, 0.06473363494873047, 0.06468793487548828, 0.06463302612304687, 0.06466150665283203, 0.0647328643798828, 0.06465977478027343, 0.0648858871459961, 0.06480985260009765, 0.0650035171508789, 0.06489449310302735, 0.06491545867919922, 0.065001953125, 0.06493587493896484, 0.06504825592041015, 0.06556198120117188, 0.06440525054931641, 0.06406380462646484, 0.06412073516845704, 0.06405120086669921, 0.06403526306152343, 0.06410057830810546, 0.06414080047607422, 0.06412134552001954, 0.0642007064819336, 0.06411609649658204, 0.06430579376220703, 0.06429862213134765, 0.06433805084228515, 0.06464041900634766, 0.06468694305419923, 0.064395263671875, 0.06431539154052734, 0.06428057861328125, 0.0643031005859375, 0.06423926544189452, 0.06426249694824218, 0.06432505798339844, 0.06431753540039062, 0.06435273742675782, 0.06446489715576172, 0.06446611022949218, 0.0645864028930664, 0.06458589172363281, 0.06477823638916015, 0.06463452911376953, 0.0648154525756836, 0.06476390075683594, 0.06455500793457031, 0.06457548522949219, 0.06455296325683593, 0.06454425811767578, 0.06455142211914063, 0.06456729888916016, 0.06487245178222656, 0.06458723449707031, 0.06456086730957031, 0.06462547302246094, 0.06472089385986328, 0.06503958129882813, 0.06498957061767578, 0.0651903076171875, 0.06527715301513672, 0.06521241760253907, 0.06491216278076171, 0.0647741470336914, 0.06468134307861328, 0.06474201965332031, 0.06501100921630859, 0.0649076156616211, 0.06511856079101562, 0.06515023803710937, 0.06498172760009766, 0.06505014038085938, 0.06502591705322265, 0.06498944091796875, 0.06514723205566406, 0.06508544158935547, 0.06524140930175781, 0.06443366241455079, 0.06435049438476563, 0.06415817260742188, 0.06424781036376953, 0.06415769958496094, 0.0642191390991211, 0.06446489715576172, 0.06434143829345704, 0.06431391906738282, 0.06426214599609376, 0.0644078369140625, 0.06439459228515625, 0.06444249725341797, 0.06450310516357421, 0.06467212677001953, 0.06444451141357421, 0.06456982421875, 0.06447923278808594, 0.06443417358398437, 0.06429315185546874, 0.0642510757446289, 0.06451663970947266, 0.06442947387695312, 0.0643609619140625, 0.06455101013183594, 0.0644935073852539, 0.06460406494140625, 0.06457564544677734, 0.06464486694335937, 0.06477030181884766, 0.06467151641845703, 0.06456246185302734, 0.06462560272216797, 0.06479257965087891, 0.06456320190429687, 0.06469427490234375, 0.06478559875488281, 0.06464169311523438, 0.0648265609741211, 0.0646809310913086, 0.064716796875, 0.06464307403564454, 0.06471475219726562, 0.06468608093261718, 0.06473113250732422, 0.06476777648925781, 0.06475743865966797, 0.06472137451171875, 0.06471465301513672, 0.06472067260742187, 0.06464128112792969, 0.06459200286865234, 0.06464921569824218, 0.06467174530029297, 0.06456307220458984, 0.06475993347167969, 0.06492774200439454, 0.06490726470947265, 0.06483968353271484, 0.0648043212890625, 0.06493199920654297, 0.06489884948730469, 0.06517961883544922, 0.06435430145263672, 0.06415555572509765, 0.06421702575683594, 0.06408780670166016, 0.0640403823852539, 0.06413206481933593, 0.06409625244140625, 0.06409420776367188, 0.06399795150756836, 0.06403858947753906, 0.06409452819824218, 0.06426009368896485, 0.06424781036376953, 0.06445260620117188, 0.06446080017089843, 0.06437273406982422, 0.06429901123046874, 0.06423551940917968, 0.06415155029296875, 0.06405427551269531, 0.06405999755859375, 0.0641990737915039, 0.06429491424560548, 0.06412083435058594, 0.06411673736572265, 0.06419168090820312, 0.06417906951904297, 0.06433366394042969, 0.06485206604003907, 0.06461440277099609, 0.06452838134765625, 0.06437273406982422, 0.06431129455566406, 0.065115966796875, 0.06433197021484376, 0.06423046112060547, 0.0644760971069336, 0.06441983795166016, 0.06491136169433594, 0.06447103881835937, 0.06436454772949218, 0.06448127746582032, 0.0645692138671875, 0.0643790054321289, 0.0647557144165039, 0.06494003295898437, 0.06474956512451172, 0.06467788696289062, 0.06452633666992187, 0.0646266860961914, 0.0644557113647461, 0.06460044860839843, 0.06455753326416015, 0.06451213073730469, 0.06454476928710938, 0.06463664245605469, 0.06460444641113282, 0.06460211181640625, 0.06460758209228516, 0.06466831970214844, 0.0647529296875, 0.0648117446899414, 0.0651673583984375, 0.06424313354492188, 0.06401068878173828, 0.06399395370483399, 0.06391177749633789, 0.06410185241699219, 0.0639731216430664, 0.06399414443969727, 0.06399980926513672, 0.06404185485839843, 0.06421708679199219, 0.06430678558349609, 0.06425827026367187, 0.06424339294433594, 0.06424217224121094, 0.06437811279296875, 0.06435008239746094, 0.06418521881103516, 0.06425186920166015, 0.06421302032470703, 0.0640836181640625, 0.06406956481933594, 0.06413148498535157, 0.06414860534667968, 0.06406819152832031, 0.06427446746826172, 0.06439347076416016, 0.06437068939208984, 0.06434153747558594, 0.06462457275390625, 0.06467228698730469, 0.06464921569824218, 0.06457337951660157, 0.06493961334228515, 0.0648457260131836, 0.06460063934326171, 0.06439718627929687, 0.06439027404785157, 0.06429574584960937, 0.0643196792602539, 0.06437068939208984, 0.06460825347900391, 0.06458531188964843, 0.0644653091430664, 0.06469382476806641, 0.06453868865966797, 0.06457587432861328, 0.06461619567871094, 0.06462284851074218, 0.06447241973876953, 0.0644942398071289, 0.06444380950927735, 0.06452899169921875, 0.06449708557128907, 0.06448700714111329, 0.06447731018066406, 0.06459836578369141, 0.06484223937988282, 0.06484786987304687, 0.06467501068115235, 0.06476473236083985, 0.06492320251464843, 0.06474591827392578, 0.06501785278320313, 0.06423343658447266, 0.06386300659179688, 0.06428243255615235, 0.06396928024291992, 0.06392403030395508, 0.06411283111572266, 0.064, 0.06397478485107422, 0.0640248031616211, 0.06411449432373047, 0.0641336669921875, 0.06406534576416016, 0.06407215881347657, 0.06430697631835937, 0.06438706970214844, 0.06425536346435547, 0.06414787292480469, 0.06422547149658203, 0.06417814636230469, 0.06459133148193359, 0.06429347229003907, 0.06418841552734375, 0.0641269760131836, 0.06406915283203125, 0.06452681732177734, 0.06428864288330079, 0.06432486724853516, 0.06446169281005859, 0.06453453063964844, 0.0645610580444336, 0.06522684478759766, 0.06471244812011719, 0.06455494689941406, 0.06442758178710938, 0.06465936279296874, 0.06459849548339844, 0.0645164794921875, 0.06437999725341798, 0.06460099029541015, 0.06450399780273437, 0.06449337768554687, 0.06440959930419922, 0.0645038070678711, 0.06471270751953125, 0.0646123504638672, 0.06464505767822265, 0.06475523376464844, 0.06451881408691407, 0.0646429443359375, 0.064505859375, 0.06455296325683593, 0.06448729705810546, 0.06449932861328125, 0.06457542419433594, 0.06457170867919922, 0.06450550079345703, 0.06461090850830079, 0.06468812561035156, 0.06467718505859375, 0.06480067443847656, 0.06472783660888672, 0.0648305892944336, 0.06500962829589843, 0.06422281646728516, 0.06404752349853515, 0.06392524719238281, 0.06396211242675781, 0.06396518325805664, 0.06400348663330079, 0.06387363052368164, 0.06392217636108398, 0.06398566436767578, 0.06399590301513672, 0.06406089782714844, 0.06424630737304687, 0.06408748626708985, 0.06438764953613281, 0.06436825561523438, 0.06430553436279297, 0.06440902709960937, 0.06442451477050781, 0.06422937774658204, 0.06421263885498046, 0.06406582641601563, 0.06408582305908203, 0.06411289978027344, 0.0642334747314453, 0.06418227386474609, 0.06421708679199219, 0.06441519927978516, 0.06435075378417969, 0.06440755462646484, 0.06454681396484375, 0.0643768310546875, 0.06437055969238281, 0.06439129638671875, 0.06446080017089843, 0.06428876495361328, 0.06441741180419921, 0.0643731231689453, 0.06438706970214844, 0.06440550231933594, 0.06438706970214844, 0.06454198455810548, 0.06459001922607421, 0.06463894653320312, 0.06458185577392578, 0.06468220520019531, 0.06469849395751953, 0.06457084655761719, 0.06482729339599609, 0.06468262481689453, 0.0645792007446289, 0.06461023712158204, 0.06460253143310547, 0.0644546890258789, 0.06446080017089843, 0.06450790405273438, 0.06460185241699219, 0.06489907073974609, 0.06530073547363281, 0.06463062286376953, 0.0647456283569336, 0.06488473510742188, 0.06478841400146484, 0.06533299255371093, 0.06433203125, 0.06396489715576172, 0.06388969421386718, 0.06396108627319336, 0.0639975357055664, 0.06397788619995118, 0.06411436462402344, 0.06413362884521484, 0.06415340423583984, 0.06406288146972657, 0.06439785766601562, 0.06421881866455079, 0.06417241668701172, 0.06443212890625, 0.06471475219726562, 0.06487619018554687, 0.06440380859375, 0.06432563018798829, 0.06429462432861328, 0.06456873321533203, 0.06419340515136719, 0.06467407989501953, 0.0642733154296875, 0.06412371063232422, 0.0643051528930664, 0.06440550231933594, 0.06440140533447265, 0.06466307067871094, 0.06459184265136719, 0.06446540832519532, 0.06460384368896484, 0.06465773010253906, 0.0645406723022461, 0.06437891387939453, 0.06442390441894531, 0.06447270202636719, 0.06444684600830078, 0.06446284484863281, 0.06434764862060546, 0.06440191650390625, 0.06436863708496093, 0.0643583984375, 0.06455213165283204, 0.064615234375, 0.06466969299316407, 0.06468985748291016, 0.06463651275634766, 0.06482937622070313, 0.06466844940185547, 0.06455705261230468, 0.06466118621826172, 0.0645465316772461, 0.06449334716796876, 0.06447187042236328, 0.06441280364990234, 0.06453337860107422, 0.06461030578613282, 0.06455296325683593, 0.06489631652832031, 0.06466015625, 0.0648497314453125, 0.06485593414306641, 0.0652234878540039, 0.06436380767822265, 0.06431817626953125, 0.0641269760131836, 0.06397939300537109, 0.0640893783569336, 0.0640884780883789, 0.06404550170898438, 0.06424269104003906, 0.06415052795410156, 0.06424166107177734, 0.06441165161132813, 0.06433948516845703, 0.06439984130859375, 0.06440959930419922, 0.06454681396484375, 0.0644376983642578, 0.06433235168457031, 0.06413721466064454, 0.06410034942626953, 0.06404463958740235, 0.06417040252685546, 0.0640789794921875, 0.06413811492919921, 0.06418131256103515, 0.06425901031494141, 0.06435004425048828, 0.06434534454345703, 0.06436051177978516, 0.06453539276123046, 0.06453836822509766, 0.06457126617431641, 0.06447257232666016, 0.0644361572265625, 0.06428768157958985, 0.06422236633300782, 0.06424457550048829, 0.0643604507446289, 0.06437273406982422, 0.06475981140136719, 0.06447923278808594, 0.06458777618408203, 0.06462258911132812, 0.06448649597167969, 0.06454560089111328, 0.06468412780761719, 0.06481305694580078, 0.06466460418701171, 0.06465827178955078, 0.06449983978271484, 0.06448127746582032, 0.06453862762451172, 0.06452963256835938, 0.06454557037353516, 0.06448332977294922, 0.06455487823486328, 0.06461996459960938, 0.06462739562988282, 0.06476390075683594, 0.06480210876464844, 0.06474518585205079, 0.06499632263183594, 0.06492160034179688, 0.06518915557861328, 0.06457008361816406, 0.06409855651855469, 0.06405094146728516, 0.0639365119934082, 0.06405471801757813, 0.06426886749267578, 0.06414320373535157, 0.06400358581542968, 0.06412675476074219, 0.06412582397460938, 0.06421708679199219, 0.06431436920166016, 0.06475836944580078, 0.0643314208984375, 0.06443084716796875, 0.06433382415771484, 0.064974365234375, 0.06435887908935548, 0.06418611145019532, 0.06414332580566406, 0.06421504211425781, 0.06432729339599609, 0.06433654022216796, 0.06434611511230469, 0.06430924987792969, 0.06448947143554687, 0.06461990356445313, 0.06457769775390625, 0.06454934692382812, 0.0646322250366211, 0.06455971527099609, 0.06441497802734375, 0.06441458892822266, 0.064329345703125, 0.06436608123779297, 0.06446566772460938, 0.06448652648925782, 0.06441404724121094, 0.06451254272460938, 0.06436863708496093, 0.06460537719726563, 0.064652099609375, 0.06460415649414063, 0.06458777618408203, 0.06451551818847656, 0.06482387542724609, 0.06467935943603516, 0.06456966400146484, 0.06449571228027344, 0.06455238342285156, 0.0645926055908203, 0.06460825347900391, 0.06463078308105469, 0.06465740966796875, 0.06456114959716797, 0.06463078308105469, 0.06458163452148437, 0.06459779357910156, 0.06474160003662109, 0.06468812561035156, 0.06476799774169922, 0.06478438568115234]",tokens/s,15.510043638521983,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 214332 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.831424,1333.6576,0.0,931.135488,917.648384,s,1,7.2451396484375,7.2451396484375,0.0,7.2451396484375,7.2451396484375,7.2451396484375,7.2451396484375,[7.2451396484375],,kWh,5.5038424875344085e-06,6.000439068693138e-07,1.862501490007551e-06,7.966387884411273e-06,,MB,1310.84288,1465.778176,0.0,1050.673152,1018.330112,s,10,0.1808848648071289,0.01808848648071289,0.0001914899778385992,0.018042304039001464,0.018179532432556152,0.018405813884735108,0.018586839046478273,"[0.018632095336914064, 0.018129247665405274, 0.017969120025634767, 0.018109760284423827, 0.017950111389160157, 0.017996320724487303, 0.01804038429260254, 0.018044223785400392, 0.01793769645690918, 0.018075904846191405]",tokens/s,14152.648994318219,kWh,5.297698674380046e-07,5.84245318568523e-08,3.529131679927446e-07,9.411075672876015e-07,tokens/kWh,272019914.5118197,MB,1343.381504,1482.555392,0.0,1067.450368,1033.282048,s,10,10.911595581054687,1.0911595581054687,0.006251480996008939,1.09009326171875,1.0977323608398437,1.101920623779297,1.1052712341308595,"[1.10610888671875, 1.0968016357421875, 1.0920335693359375, 1.0877236328125, 1.090129150390625, 1.090057373046875, 1.089203125, 1.080731689453125, 1.090380859375, 1.0884256591796875]",tokens/s,57.73674393631676,kWh,3.154041228006302e-05,3.4784255474658957e-06,1.4023153888407244e-05,4.9041991715936145e-05,tokens/kWh,1284613.4056893985,,s,630,10.90666772460937,0.017312170991443453,0.0002794424886831371,0.01724825668334961,0.017517420196533205,0.017828537845611572,0.01842028247833252,"[0.01756819152832031, 0.018159231185913085, 0.01798784065246582, 0.01813907241821289, 0.0180850887298584, 0.017818592071533204, 0.01785158348083496, 0.017855648040771485, 0.018261663436889647, 0.017909759521484374, 0.017761472702026368, 0.017703327178955078, 0.01762345504760742, 0.01834102439880371, 0.017715999603271484, 0.017633472442626953, 0.017760128021240235, 0.01746124839782715, 0.017326047897338867, 0.017428512573242187, 0.017409536361694337, 0.017379680633544923, 0.01752899169921875, 0.01804697608947754, 0.017524511337280273, 0.017500383377075195, 0.017466976165771485, 0.017408416748046874, 0.01721958351135254, 0.017306816101074218, 0.01730233573913574, 0.017382623672485352, 0.017322784423828126, 0.017504224777221678, 0.017215520858764648, 0.017237791061401365, 0.017590496063232423, 0.01723091125488281, 0.017277311325073243, 0.017166847229003905, 0.017298656463623045, 0.018897216796875, 0.018291231155395507, 0.017356000900268554, 0.01738012886047363, 0.017274879455566407, 0.017262592315673828, 0.01732579231262207, 0.01725654411315918, 0.017410144805908204, 0.01747158432006836, 0.017155551910400392, 0.017199552536010743, 0.017893440246582033, 0.017950016021728514, 0.017269407272338867, 0.017227327346801758, 0.017144319534301757, 0.017267967224121095, 0.01717228889465332, 0.01730179214477539, 0.01718134307861328, 0.01722368049621582, 0.016892864227294923, 0.01828659248352051, 0.017378656387329102, 0.017128095626831055, 0.017133567810058595, 0.01910576057434082, 0.018620447158813478, 0.017257568359375, 0.017180831909179687, 0.01728179168701172, 0.017250015258789064, 0.01717625617980957, 0.017211488723754883, 0.0172938232421875, 0.017169408798217774, 0.01718169593811035, 0.01712886428833008, 0.01717625617980957, 0.017111583709716795, 0.01714419174194336, 0.017152000427246093, 0.017250303268432618, 0.01716223907470703, 0.017121280670166016, 0.017125375747680666, 0.01718272018432617, 0.017190687179565428, 0.017360671997070313, 0.017268991470336913, 0.017368703842163085, 0.017205440521240234, 0.017080703735351564, 0.017229824066162108, 0.01717196846008301, 0.017203712463378908, 0.01711039924621582, 0.017148544311523437, 0.017113088607788086, 0.017064960479736328, 0.01769526481628418, 0.019780063629150392, 0.018091072082519533, 0.017446847915649415, 0.017356992721557617, 0.017503040313720703, 0.017276512145996094, 0.0172589111328125, 0.017134912490844728, 0.0171711368560791, 0.017207359313964842, 0.017377056121826173, 0.01718492889404297, 0.01723391914367676, 0.017321983337402345, 0.017280223846435547, 0.017171232223510743, 0.01722572708129883, 0.01724825668334961, 0.017838239669799805, 0.01974870491027832, 0.017563615798950195, 0.017268768310546877, 0.017303232192993165, 0.01677724838256836, 0.017237951278686523, 0.017331327438354492, 0.017278976440429687, 0.01767078399658203, 0.017248191833496095, 0.01729804801940918, 0.01720639991760254, 0.017277088165283203, 0.01717056083679199, 0.017328479766845702, 0.01720729637145996, 0.01716633605957031, 0.017223615646362305, 0.01710086441040039, 0.017156095504760743, 0.01713968086242676, 0.01737516784667969, 0.017199199676513673, 0.017294815063476562, 0.017295520782470705, 0.017199487686157228, 0.017145856857299805, 0.017383007049560546, 0.017154144287109374, 0.01715203285217285, 0.017183008193969725, 0.01719830322265625, 0.017378080368041993, 0.01729248046875, 0.017234752655029297, 0.01721286392211914, 0.017170400619506837, 0.017293920516967775, 0.01843814468383789, 0.017508352279663086, 0.01818828773498535, 0.01744233512878418, 0.017409984588623046, 0.01787772750854492, 0.01784185600280762, 0.017334175109863282, 0.017314016342163088, 0.01719705581665039, 0.017344512939453126, 0.017166208267211915, 0.01717977523803711, 0.017269760131835937, 0.017383424758911133, 0.017174272537231444, 0.017385631561279296, 0.0171759033203125, 0.018378591537475585, 0.017308576583862305, 0.017184768676757813, 0.017250303268432618, 0.017155231475830077, 0.017208160400390624, 0.017164127349853515, 0.017333568572998045, 0.017224544525146483, 0.017381216049194338, 0.01734364891052246, 0.016867168426513673, 0.017254783630371095, 0.01727337646484375, 0.01729542350769043, 0.017259807586669923, 0.01730633544921875, 0.017323328018188477, 0.017242816925048827, 0.017264192581176757, 0.017258079528808593, 0.017238304138183592, 0.01731350326538086, 0.017246240615844725, 0.017240959167480467, 0.017258432388305663, 0.017260543823242186, 0.01721548843383789, 0.017403871536254882, 0.017258527755737305, 0.01724825668334961, 0.017287168502807617, 0.017257856369018554, 0.01729804801940918, 0.01722572708129883, 0.017276927947998046, 0.017346559524536134, 0.017305631637573243, 0.01724617576599121, 0.01724415969848633, 0.01719647979736328, 0.01727926445007324, 0.01733660888671875, 0.01734560012817383, 0.01729996871948242, 0.017381664276123046, 0.01732204818725586, 0.017539167404174806, 0.017330272674560547, 0.017247743606567383, 0.01726028823852539, 0.01738409614562988, 0.017184768676757813, 0.017258304595947266, 0.017363136291503906, 0.01726425552368164, 0.01716671943664551, 0.01722572708129883, 0.017176576614379883, 0.01716633605957031, 0.01717452812194824, 0.017154048919677735, 0.017121280670166016, 0.017137664794921875, 0.01720022392272949, 0.017384351730346678, 0.01723391914367676, 0.017372703552246092, 0.017340896606445312, 0.017120832443237304, 0.01715839958190918, 0.017255935668945312, 0.017195711135864256, 0.01712892723083496, 0.01679689598083496, 0.01713030433654785, 0.017102079391479494, 0.01707699203491211, 0.01710054397583008, 0.01718092727661133, 0.017184352874755858, 0.017144224166870118, 0.01719862365722656, 0.017246208190917968, 0.017260671615600586, 0.017465024948120116, 0.017824384689331056, 0.01734182357788086, 0.01727145576477051, 0.017362144470214842, 0.01727568054199219, 0.017446207046508788, 0.01733907127380371, 0.017307647705078123, 0.017336320877075196, 0.017258495330810548, 0.017214752197265624, 0.017207775115966797, 0.01740825653076172, 0.017297407150268555, 0.01738243293762207, 0.01748899269104004, 0.017481599807739258, 0.017367040634155274, 0.017511936187744142, 0.01738582420349121, 0.01772271919250488, 0.017175359725952147, 0.017362943649291994, 0.017983488082885742, 0.017502111434936525, 0.017233184814453125, 0.01726076889038086, 0.01751718330383301, 0.017315071105957033, 0.017271520614624024, 0.01718272018432617, 0.017194911956787108, 0.017180767059326172, 0.017236064910888672, 0.017243200302124024, 0.01718671989440918, 0.017292224884033203, 0.017309696197509765, 0.017301504135131835, 0.017342367172241212, 0.017325567245483398, 0.017199712753295897, 0.01715113639831543, 0.01716876792907715, 0.017224128723144532, 0.017174560546875, 0.01719910430908203, 0.017303552627563477, 0.017230112075805663, 0.017190624237060546, 0.017250303268432618, 0.016856576919555662, 0.01718508720397949, 0.017195104598999023, 0.017162336349487304, 0.017334272384643554, 0.017276063919067382, 0.01721958351135254, 0.017249120712280273, 0.017223552703857423, 0.017311456680297852, 0.0172957763671875, 0.017287168502807617, 0.0172825927734375, 0.01728895950317383, 0.0172838077545166, 0.017346559524536134, 0.017192960739135742, 0.017264575958251954, 0.017260608673095704, 0.017227935791015624, 0.017395040512084962, 0.017211904525756837, 0.017145055770874024, 0.017258304595947266, 0.017306592941284178, 0.017313791275024415, 0.01740185546875, 0.01743427276611328, 0.017379680633544923, 0.017335872650146484, 0.017316032409667968, 0.017274303436279295, 0.017438560485839843, 0.017343456268310548, 0.017298847198486327, 0.01741379165649414, 0.01751955223083496, 0.017245920181274414, 0.01735503959655762, 0.017537023544311522, 0.01743052864074707, 0.01721343994140625, 0.017284576416015624, 0.017317440032958983, 0.017412448883056642, 0.017392255783081054, 0.017290271759033204, 0.017318880081176758, 0.01716633605957031, 0.017178176879882812, 0.017270559310913085, 0.01724015998840332, 0.017272512435913087, 0.01728156852722168, 0.017228288650512694, 0.017504095077514648, 0.01734160041809082, 0.01734127998352051, 0.017252351760864256, 0.01722777557373047, 0.017215520858764648, 0.017198368072509764, 0.01752134323120117, 0.016811391830444337, 0.01714841651916504, 0.01722175979614258, 0.01745305633544922, 0.017326080322265625, 0.017184288024902343, 0.017223295211791993, 0.01722659111022949, 0.01723391914367676, 0.017210784912109374, 0.017277536392211915, 0.017155263900756838, 0.017322240829467775, 0.01716211128234863, 0.01742304039001465, 0.01795686340332031, 0.01726464080810547, 0.01720921516418457, 0.017218687057495116, 0.017246784210205077, 0.017407615661621093, 0.017081151962280272, 0.017362943649291994, 0.017614271163940428, 0.0174638729095459, 0.01734822463989258, 0.01734489631652832, 0.017636703491210937, 0.01738819122314453, 0.017321983337402345, 0.01782329559326172, 0.017276575088500976, 0.017246496200561522, 0.01718716812133789, 0.01721356773376465, 0.01723744010925293, 0.01714396858215332, 0.017275327682495116, 0.017186111450195312, 0.017281728744506834, 0.017354751586914064, 0.01742416000366211, 0.017119455337524413, 0.01715135955810547, 0.017148544311523437, 0.017816896438598632, 0.01720368003845215, 0.01719660758972168, 0.017093280792236328, 0.017256256103515624, 0.017164352416992188, 0.01714963150024414, 0.01723436737060547, 0.017154048919677735, 0.01715814399719238, 0.017381216049194338, 0.017387008666992186, 0.017111711502075196, 0.01719203186035156, 0.01718681526184082, 0.017238847732543944, 0.01727702331542969, 0.01716633605957031, 0.01676780891418457, 0.017211231231689453, 0.017129472732543945, 0.017059839248657227, 0.0171312313079834, 0.017078559875488283, 0.01717043113708496, 0.017098751068115235, 0.017198944091796876, 0.017195167541503905, 0.0171944637298584, 0.017105695724487304, 0.017190528869628907, 0.017147552490234374, 0.017133663177490235, 0.017174911499023438, 0.017154048919677735, 0.01724415969848633, 0.01720319938659668, 0.017367040634155274, 0.01738956832885742, 0.017152288436889648, 0.017149663925170897, 0.017053216934204102, 0.017102527618408202, 0.01711497688293457, 0.017111040115356444, 0.016999359130859374, 0.017155328750610353, 0.017160959243774414, 0.017038976669311524, 0.01699468803405762, 0.017180255889892578, 0.01721353530883789, 0.017159648895263672, 0.017247135162353516, 0.017164224624633788, 0.017080320358276366, 0.01704140853881836, 0.01700249671936035, 0.017122560501098633, 0.01706038475036621, 0.017094879150390624, 0.01705120086669922, 0.017129919052124024, 0.017171743392944337, 0.017115232467651367, 0.017097024917602538, 0.017069631576538086, 0.017068544387817384, 0.01711948776245117, 0.01742755126953125, 0.017163040161132813, 0.017332351684570313, 0.017213056564331055, 0.01728121566772461, 0.017182912826538086, 0.017145856857299805, 0.0171615047454834, 0.017190847396850586, 0.017183391571044922, 0.0171944637298584, 0.01720364761352539, 0.01697212791442871, 0.01843731117248535, 0.01719327926635742, 0.017567615509033203, 0.01724671936035156, 0.01712303924560547, 0.017144096374511718, 0.017324031829833983, 0.017286815643310548, 0.017252416610717775, 0.017311264038085937, 0.01715279960632324, 0.017223232269287108, 0.01719862365722656, 0.01724300765991211, 0.01775846481323242, 0.017481472015380858, 0.017324031829833983, 0.01721343994140625, 0.01722572708129883, 0.0172359676361084, 0.017257600784301757, 0.017142656326293946, 0.017228927612304688, 0.017156896591186525, 0.01713302421569824, 0.017345312118530274, 0.01729724884033203, 0.01724140739440918, 0.017310400009155274, 0.017156095504760743, 0.017192480087280273, 0.0172457275390625, 0.017217536926269532, 0.017144256591796875, 0.017203712463378908, 0.017283071517944337, 0.017466976165771485, 0.017233631134033204, 0.017304256439208986, 0.017434080123901366, 0.017698944091796873, 0.0172620792388916, 0.017202079772949217, 0.017352703094482422, 0.017571136474609374, 0.017214143753051758, 0.017202880859375, 0.017291296005249025, 0.017393951416015626, 0.017344512939453126, 0.017266687393188478, 0.017184768676757813, 0.017192800521850585, 0.01721513557434082, 0.017222143173217772, 0.01720467185974121, 0.01726486396789551, 0.01727728080749512, 0.017862655639648437, 0.017311744689941407, 0.01723391914367676, 0.017215103149414063, 0.01680588722229004, 0.01716169548034668, 0.01724060821533203, 0.017119232177734374, 0.01720524787902832, 0.017351999282836914, 0.017233631134033204, 0.017232864379882813, 0.017389535903930664, 0.01717865562438965, 0.017586271286010743, 0.01737071990966797, 0.017434944152832033, 0.01718057632446289, 0.017330272674560547, 0.01718390464782715, 0.017113824844360352, 0.01721766471862793, 0.0171822395324707, 0.017174848556518553, 0.0171561279296875, 0.01735897636413574, 0.017278175354003906, 0.017215871810913087, 0.017212831497192382, 0.017183744430541992, 0.017155839920043946, 0.017408224105834962, 0.017299488067626954, 0.017459199905395507, 0.01783193588256836, 0.017302751541137695, 0.017291263580322267, 0.017339168548583986, 0.017263647079467773, 0.017062112808227538, 0.01727359962463379, 0.017127424240112304, 0.01746668815612793, 0.01715065574645996, 0.01717411231994629, 0.017174047470092775, 0.01758812713623047, 0.017229984283447266, 0.017420448303222657, 0.017179296493530272, 0.017251359939575196, 0.01721353530883789, 0.017136512756347658, 0.0172728328704834, 0.017114431381225585, 0.017172416687011718, 0.01736160087585449, 0.01726857566833496, 0.017119712829589843, 0.017622783660888673, 0.017246208190917968, 0.01724825668334961, 0.017124544143676756, 0.017236576080322266, 0.01722172737121582, 0.01783616065979004, 0.017188863754272463]",tokens/s,57.76283058284547,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,890.748928,2134.769664,0.0,1732.247552,1728.316416,s,1,7.230126953125,7.230126953125,0.0,7.230126953125,7.230126953125,7.230126953125,7.230126953125,[7.230126953125],,kWh,5.519881120812897e-06,6.017748122032517e-07,1.8125014500103287e-06,7.934157383026478e-06,,MB,1215.848448,2336.096256,0.0,1923.088384,1891.2,s,10,0.47046467590332025,0.04704646759033203,0.0017782368019882352,0.04655601692199707,0.04763491554260253,0.04992179355621337,0.051751295967102054,"[0.05220867156982422, 0.04671209716796875, 0.04594099044799805, 0.04601507186889649, 0.04639993667602539, 0.0470335693359375, 0.046295841217041014, 0.046940704345703126, 0.04579107284545898, 0.047126720428466794]",tokens/s,5441.428721688078,kWh,1.740567473263847e-06,1.9195255740916213e-07,1.1500025734404128e-06,3.082522604113422e-06,tokens/kWh,83048863.82937954,MB,1227.681792,2336.096256,0.0,1923.088384,1895.80032,s,10,12.851441040039063,1.2851441040039062,0.020116343326244974,1.2927409057617187,1.3052195312500001,1.3069160400390625,1.3082732470703125,"[1.2610181884765626, 1.2676329345703126, 1.26205908203125, 1.30322216796875, 1.303472900390625, 1.294377685546875, 1.304842529296875, 1.308612548828125, 1.2911041259765625, 1.255098876953125]",tokens/s,49.02173989961246,kWh,3.7487385168820045e-05,4.13418786527561e-06,1.7818373712360477e-05,5.943994674645613e-05,tokens/kWh,1059893.2779790238,,s,630,12.848650630950933,0.02039468354119195,0.0004291693427914185,0.020412848472595217,0.020832409858703615,0.020943762874603273,0.02183797616958618,"[0.02000761604309082, 0.01997612762451172, 0.020201696395874023, 0.019951616287231445, 0.019937280654907227, 0.02026870346069336, 0.01996182441711426, 0.019932704925537108, 0.02012041664123535, 0.020063968658447267, 0.019949855804443358, 0.0199202880859375, 0.019919456481933592, 0.019961311340332032, 0.019980703353881836, 0.02002297592163086, 0.01997782325744629, 0.019997184753417968, 0.020117855072021483, 0.020039199829101562, 0.02005449676513672, 0.019985887527465822, 0.019953920364379884, 0.020162527084350585, 0.019964223861694337, 0.020092735290527342, 0.01992095947265625, 0.020101247787475587, 0.019950624465942382, 0.019974208831787108, 0.020173728942871092, 0.019994623184204103, 0.020092927932739257, 0.020008960723876954, 0.020213760375976563, 0.019949024200439452, 0.02005878448486328, 0.020094079971313475, 0.019931583404541015, 0.019953216552734375, 0.019956480026245116, 0.019961280822753905, 0.019974720001220702, 0.019982336044311523, 0.01994246482849121, 0.01995871925354004, 0.019926464080810547, 0.019943328857421876, 0.019958431243896485, 0.020008447647094727, 0.019966880798339845, 0.02007846450805664, 0.020127456665039064, 0.019992256164550783, 0.01991644859313965, 0.019991199493408204, 0.01998028755187988, 0.020051359176635742, 0.020244863510131834, 0.019990751266479492, 0.020010719299316405, 0.01995599937438965, 0.019921951293945313, 0.01991267204284668, 0.01982854461669922, 0.019880128860473634, 0.019954687118530275, 0.020143104553222657, 0.020203744888305664, 0.02002511978149414, 0.02004128074645996, 0.019999008178710937, 0.019974048614501954, 0.01996620750427246, 0.02000284767150879, 0.020215776443481444, 0.019963903427124022, 0.019931135177612306, 0.01997209548950195, 0.019958879470825194, 0.019989408493041993, 0.020176576614379882, 0.020029760360717772, 0.020109312057495117, 0.020183040618896485, 0.019961856842041017, 0.020011007308959963, 0.019945600509643554, 0.019877567291259765, 0.0199354248046875, 0.019998111724853516, 0.019888288497924806, 0.02009542465209961, 0.019910655975341796, 0.019915807723999025, 0.01996678352355957, 0.019895584106445312, 0.01983577537536621, 0.019910655975341796, 0.020246463775634764, 0.020190752029418946, 0.020156415939331054, 0.0201342716217041, 0.020012672424316407, 0.020068000793457032, 0.019979040145874025, 0.020201568603515626, 0.020377248764038087, 0.020328800201416017, 0.02046771240234375, 0.02042470359802246, 0.020346879959106445, 0.020350976943969725, 0.020488191604614257, 0.020522335052490234, 0.020365247726440428, 0.02041110420227051, 0.020361087799072267, 0.020530912399291994, 0.020455839157104493, 0.020405984878540038, 0.020347007751464842, 0.0203654727935791, 0.020195327758789062, 0.02006220817565918, 0.01999667167663574, 0.01988787269592285, 0.019936128616333006, 0.01982588768005371, 0.020196128845214843, 0.020016223907470702, 0.019920095443725586, 0.01984649658203125, 0.019917152404785157, 0.02004572868347168, 0.019848512649536132, 0.019940128326416017, 0.019843072891235353, 0.019865535736083986, 0.019884096145629884, 0.019998720169067383, 0.020311872482299806, 0.020049184799194337, 0.01987401580810547, 0.01997478485107422, 0.019951679229736327, 0.02034480094909668, 0.02027519989013672, 0.02002332878112793, 0.020289535522460937, 0.02005401611328125, 0.02002035140991211, 0.019979455947875976, 0.020104095458984374, 0.020156959533691406, 0.02077676773071289, 0.020088863372802735, 0.02011177635192871, 0.01998847961425781, 0.020121152877807618, 0.019959680557250975, 0.01996633529663086, 0.01993337631225586, 0.01992844772338867, 0.020046592712402344, 0.019966848373413087, 0.019991071701049804, 0.01997007942199707, 0.020556415557861328, 0.019990335464477538, 0.019914751052856446, 0.020000768661499024, 0.01997132873535156, 0.019921024322509764, 0.019886623382568358, 0.019922143936157228, 0.0198635196685791, 0.019894655227661134, 0.01997494316101074, 0.02010259246826172, 0.02019375991821289, 0.02033443260192871, 0.020031072616577147, 0.019974559783935548, 0.019957759857177734, 0.019931135177612306, 0.020006912231445313, 0.020006912231445313, 0.020152320861816408, 0.02029792022705078, 0.020097312927246095, 0.01997209548950195, 0.020060159683227538, 0.01984444808959961, 0.020286111831665038, 0.019976192474365235, 0.020151647567749023, 0.02067228889465332, 0.02042969512939453, 0.020840448379516603, 0.02072719955444336, 0.020680864334106444, 0.020791744232177733, 0.020817407608032225, 0.02105507278442383, 0.02081065559387207, 0.020846368789672852, 0.020953088760375976, 0.0208155517578125, 0.020851232528686522, 0.020740095138549804, 0.021255807876586916, 0.02073638343811035, 0.020819807052612306, 0.020833791732788084, 0.020789920806884767, 0.02087116813659668, 0.02089369583129883, 0.020926464080810548, 0.020832256317138673, 0.020774591445922853, 0.02078335952758789, 0.02079689598083496, 0.020852991104125976, 0.020736352920532226, 0.02085478401184082, 0.020813024520874024, 0.020936735153198244, 0.020805856704711915, 0.020637216567993163, 0.020603904724121092, 0.0206561279296875, 0.020612512588500977, 0.020865631103515626, 0.020772064208984375, 0.020766624450683592, 0.020784128189086915, 0.02078227233886719, 0.020943552017211913, 0.02061516761779785, 0.02066431999206543, 0.020703231811523438, 0.02073776054382324, 0.02078713607788086, 0.020703584671020507, 0.020733407974243164, 0.020683296203613283, 0.02075961685180664, 0.020421567916870116, 0.020508672714233397, 0.02062131118774414, 0.020568063735961914, 0.020506975173950195, 0.020524927139282226, 0.02035433578491211, 0.02099292755126953, 0.02040537643432617, 0.020578975677490233, 0.020668575286865234, 0.02034627151489258, 0.02052889633178711, 0.02048908805847168, 0.020565984725952148, 0.02048124885559082, 0.020529951095581055, 0.02047385597229004, 0.02043084716796875, 0.02049420738220215, 0.020414592742919922, 0.020377599716186523, 0.020297216415405273, 0.020465375900268555, 0.02056284713745117, 0.020678367614746094, 0.021090463638305666, 0.020776960372924806, 0.020776960372924806, 0.02066431999206543, 0.02190905570983887, 0.02086697578430176, 0.02080851173400879, 0.021142688751220703, 0.02078163146972656, 0.020692447662353515, 0.02073161506652832, 0.020607168197631837, 0.02069536018371582, 0.020676671981811525, 0.020619520187377928, 0.020719327926635743, 0.020701087951660157, 0.02060073661804199, 0.020885791778564453, 0.02057961654663086, 0.020604991912841798, 0.02056278419494629, 0.020557823181152343, 0.020682207107543947, 0.02108451271057129, 0.02071104049682617, 0.02105196762084961, 0.02083430480957031, 0.020668607711791992, 0.02075164794921875, 0.020525663375854493, 0.020618207931518556, 0.020640735626220704, 0.02072985649108887, 0.020831775665283204, 0.020729600906372072, 0.020861183166503906, 0.020697120666503907, 0.020701087951660157, 0.02095884895324707, 0.02083091163635254, 0.02055936050415039, 0.020629152297973633, 0.020626272201538086, 0.020771135330200197, 0.020673215866088866, 0.020707712173461915, 0.02064188766479492, 0.020623584747314454, 0.020608831405639648, 0.020662784576416016, 0.020756479263305663, 0.020647136688232422, 0.020577056884765625, 0.020486143112182616, 0.020516864776611327, 0.020590591430664062, 0.020658048629760742, 0.020516992568969727, 0.020512767791748047, 0.020522560119628906, 0.020840896606445312, 0.02060038375854492, 0.020943296432495116, 0.02085862350463867, 0.020699392318725585, 0.020600831985473633, 0.02056345558166504, 0.020455455780029295, 0.020471584320068358, 0.02241846466064453, 0.022122272491455076, 0.02077680015563965, 0.020713600158691406, 0.020661247253417968, 0.020644575119018554, 0.02068502426147461, 0.020522016525268555, 0.020542400360107422, 0.02054524803161621, 0.020588415145874023, 0.020513471603393556, 0.020548927307128907, 0.020480415344238282, 0.02039193534851074, 0.02026198387145996, 0.020345535278320313, 0.02021785545349121, 0.020312288284301757, 0.02006166458129883, 0.019998655319213868, 0.020025951385498047, 0.020059871673583984, 0.020048160552978516, 0.0200581111907959, 0.019992576599121094, 0.020354080200195312, 0.020014047622680664, 0.020039680480957032, 0.020060159683227538, 0.02002124786376953, 0.02003558349609375, 0.020701120376586914, 0.02094393539428711, 0.020547615051269532, 0.020600799560546876, 0.020497472763061523, 0.020677568435668946, 0.021038623809814454, 0.020652511596679687, 0.021830751419067384, 0.02085980796813965, 0.020683839797973634, 0.020624000549316405, 0.020640064239501953, 0.020462751388549805, 0.020752511978149413, 0.020638431549072266, 0.02068889617919922, 0.020585664749145506, 0.020558111190795897, 0.020552223205566406, 0.020502527236938475, 0.020545536041259766, 0.020506431579589843, 0.020365728378295898, 0.020543264389038085, 0.020332544326782227, 0.020270687103271484, 0.020053823471069335, 0.020082944869995116, 0.020510784149169924, 0.02075574493408203, 0.020642816543579103, 0.020690176010131837, 0.020638463973999023, 0.020797439575195312, 0.020813631057739257, 0.020676031112670898, 0.02068524742126465, 0.020812095642089842, 0.02062851142883301, 0.021304224014282228, 0.023337024688720703, 0.021285152435302733, 0.020809440612792968, 0.020670047760009767, 0.020630975723266602, 0.020634336471557616, 0.020729984283447266, 0.0207030086517334, 0.020529375076293946, 0.02050262451171875, 0.02058857536315918, 0.020463615417480468, 0.020508672714233397, 0.02065760040283203, 0.020600896835327148, 0.021048864364624022, 0.022399967193603515, 0.020731903076171874, 0.02054707145690918, 0.020439552307128905, 0.020373504638671876, 0.020379648208618165, 0.020387840270996094, 0.020519264221191408, 0.02053388786315918, 0.020402175903320312, 0.020434944152832032, 0.020787200927734374, 0.02169753646850586, 0.020400352478027343, 0.020339168548583985, 0.020333887100219727, 0.020335039138793944, 0.020363040924072266, 0.020291807174682618, 0.02024095916748047, 0.02032169532775879, 0.020318815231323242, 0.020328447341918944, 0.02040012741088867, 0.020357120513916017, 0.020449151992797853, 0.020512672424316408, 0.02057644844055176, 0.02062339210510254, 0.020532255172729493, 0.021046239852905272, 0.021840927124023437, 0.02273788833618164, 0.02079737663269043, 0.021033023834228514, 0.02147056007385254, 0.020992576599121095, 0.02087126350402832, 0.020835840225219726, 0.02077132797241211, 0.02083839988708496, 0.02093382453918457, 0.020892480850219726, 0.021200895309448242, 0.020840511322021485, 0.021061248779296875, 0.020924736022949218, 0.020703231811523438, 0.02068070411682129, 0.02084771156311035, 0.02078620719909668, 0.02069900894165039, 0.02081996726989746, 0.020731903076171874, 0.020745376586914062, 0.02070204734802246, 0.02078428840637207, 0.020693855285644533, 0.020559999465942384, 0.02067647933959961, 0.02067865562438965, 0.020715423583984375, 0.020737823486328126, 0.020750656127929687, 0.020872703552246095, 0.021527040481567384, 0.02091779136657715, 0.02080201530456543, 0.02079539108276367, 0.02069708824157715, 0.020659744262695314, 0.021062911987304686, 0.020812543869018554, 0.02072985649108887, 0.020711423873901368, 0.020625408172607423, 0.02062281608581543, 0.020630048751831054, 0.020571552276611327, 0.02046156883239746, 0.020515424728393555, 0.020548959732055665, 0.02051318359375, 0.020510976791381835, 0.02048409652709961, 0.020488191604614257, 0.020575616836547853, 0.021109376907348633, 0.0206561279296875, 0.020744192123413087, 0.020723648071289062, 0.020650047302246094, 0.020772863388061523, 0.0204737606048584, 0.020565183639526367, 0.02060380744934082, 0.020609024047851563, 0.020643743515014648, 0.020883455276489257, 0.020660320281982423, 0.020575359344482423, 0.02053619194030762, 0.02043084716796875, 0.02064739227294922, 0.020638240814208984, 0.020494335174560546, 0.02033568000793457, 0.020386207580566407, 0.0205296630859375, 0.020488224029541015, 0.02040950393676758, 0.02041929626464844, 0.02041632080078125, 0.020492128372192383, 0.02047433662414551, 0.020377599716186523, 0.02067865562438965, 0.020274431228637695, 0.020244287490844726, 0.02029657554626465, 0.02018515205383301, 0.020127424240112303, 0.020397632598876954, 0.020427167892456053, 0.02027667236328125, 0.020132768630981446, 0.02025062370300293, 0.020207359313964845, 0.02056163215637207, 0.019957408905029297, 0.020132415771484374, 0.02007276725769043, 0.019897920608520508, 0.020138240814208983, 0.01997724723815918, 0.020084768295288085, 0.020130752563476562, 0.01981644821166992, 0.019969823837280274, 0.019941312789916992, 0.019900991439819337, 0.019844831466674803, 0.01982464027404785, 0.0198656005859375, 0.01984716796875, 0.020006912231445313, 0.019859296798706055, 0.019888223648071288, 0.019934783935546874, 0.01990297508239746, 0.019855199813842775, 0.019856992721557616, 0.019819072723388672, 0.019868928909301756, 0.01991347122192383, 0.019908607482910155, 0.019990528106689453, 0.01994099235534668, 0.019930496215820312, 0.019861824035644533, 0.01993328094482422, 0.020034080505371095, 0.01981760025024414, 0.019850175857543947, 0.019820255279541017, 0.019855232238769532, 0.01978646469116211, 0.020036415100097658, 0.019960704803466796, 0.01997542381286621, 0.01987436866760254, 0.01987343978881836, 0.02000464057922363, 0.019945407867431642, 0.019960960388183593, 0.01997792053222656, 0.019874975204467772, 0.019858272552490234, 0.019865055084228516, 0.02010713577270508, 0.01987401580810547, 0.01994166374206543, 0.01993766403198242, 0.019939104080200196, 0.020068063735961914, 0.019834720611572265, 0.02022809600830078, 0.019945344924926757, 0.019850976943969728, 0.019872608184814452, 0.01988198471069336, 0.0198656005859375, 0.01987513542175293, 0.019953792572021484, 0.019840639114379884, 0.019939584732055662, 0.019868352890014648]",tokens/s,49.03238620889902,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,806.907904,3458.072576,0.0,3055.550464,2937.680896,s,1,7.1281953125,7.1281953125,0.0,7.1281953125,7.1281953125,7.1281953125,7.1281953125,[7.1281953125],,kWh,4.1001234749652815e-06,4.3997781835463996e-07,2.580557619993873e-06,7.120658913313794e-06,,MB,1263.239168,3529.375744,0.0,3114.27072,2817.475072,s,10,0.42745565414428704,0.04274556541442871,0.000709270054638234,0.04263473701477051,0.043474876785278324,0.04398951930999756,0.04440123332977295,"[0.044504161834716796, 0.04245987319946289, 0.04336051177978516, 0.04212911987304688, 0.04280960083007813, 0.04283552169799805, 0.04282400131225586, 0.04195721435546875, 0.04214764785766602, 0.04242800140380859]",tokens/s,5988.925342734793,kWh,1.385422052409037e-06,1.5272077514202917e-07,9.139949386729807e-07,2.4521377662240468e-06,tokens/kWh,104398702.03304467,MB,1301.651456,3529.375744,0.0,3114.27072,2877.809152,s,10,12.133501098632811,1.2133501098632813,0.005051075499578927,1.2115252685546876,1.2199279418945312,1.2221140563964845,1.223862947998047,"[1.2074105224609375, 1.212034423828125, 1.2243001708984376, 1.2127139892578125, 1.2099710693359376, 1.21101611328125, 1.209778564453125, 1.219442138671875, 1.2094754638671874, 1.217358642578125]",tokens/s,51.92235900246365,kWh,3.543701938134122e-05,3.908309468026914e-06,2.3367135597326694e-05,6.271246444669483e-05,tokens/kWh,1004584.9825204936,,s,630,12.131182794570917,0.019255845705668132,0.00030365563005797336,0.019188015937805177,0.019428143310546875,0.0196509259223938,0.020543609008789068,"[0.019525920867919922, 0.019187583923339843, 0.019107967376708983, 0.01923843193054199, 0.019193376541137695, 0.019121088027954102, 0.019103647232055664, 0.0190501766204834, 0.01915126419067383, 0.019091232299804688, 0.01900156784057617, 0.019099647521972657, 0.019136287689208983, 0.019589344024658204, 0.01941075134277344, 0.019013824462890624, 0.01905379295349121, 0.01904515266418457, 0.019144704818725586, 0.019063968658447266, 0.019161184310913085, 0.01909427261352539, 0.019116031646728517, 0.019159040451049804, 0.019049535751342772, 0.019129280090332032, 0.01930624008178711, 0.019124256134033204, 0.019198495864868163, 0.019117759704589843, 0.01916054344177246, 0.01911833572387695, 0.01916543960571289, 0.019034143447875976, 0.019132415771484376, 0.019159040451049804, 0.019136255264282226, 0.019158304214477537, 0.019032255172729492, 0.019221120834350586, 0.019187871932983398, 0.019264671325683595, 0.01917804718017578, 0.01934774398803711, 0.019148799896240236, 0.019171295166015626, 0.019279199600219725, 0.019167936325073243, 0.01923865509033203, 0.019077375411987306, 0.019179519653320314, 0.019174591064453125, 0.01939948844909668, 0.01908940887451172, 0.01916320037841797, 0.0191343994140625, 0.01910700798034668, 0.019096256256103516, 0.01905193519592285, 0.019131103515625, 0.01918156814575195, 0.019107423782348632, 0.019208608627319337, 0.019996639251708984, 0.0194703369140625, 0.019197696685791014, 0.019116287231445313, 0.019007007598876954, 0.019048927307128906, 0.019193567276000977, 0.01933545684814453, 0.019111711502075194, 0.0191092472076416, 0.019001472473144532, 0.019323455810546876, 0.019271455764770507, 0.01921183967590332, 0.019161888122558594, 0.019164960861206056, 0.019214591979980468, 0.019230592727661134, 0.01943087959289551, 0.019435455322265625, 0.01916089630126953, 0.019145631790161134, 0.01937331199645996, 0.019216896057128906, 0.01915705680847168, 0.019128448486328126, 0.019099712371826172, 0.019105119705200194, 0.019532447814941407, 0.019668991088867188, 0.019247039794921875, 0.019195104598999025, 0.01917184066772461, 0.019360095977783202, 0.019215871810913086, 0.019094015121459963, 0.019122175216674805, 0.019139680862426758, 0.019129247665405275, 0.019095552444458007, 0.019158143997192383, 0.019188608169555664, 0.019332895278930663, 0.019371360778808595, 0.01941593551635742, 0.019177471160888672, 0.0192225284576416, 0.019130367279052735, 0.01939455986022949, 0.0192774715423584, 0.01917580795288086, 0.019193824768066407, 0.01917647933959961, 0.019198944091796875, 0.01938627243041992, 0.019240095138549806, 0.019289024353027345, 0.01925734329223633, 0.01911555290222168, 0.019284448623657226, 0.019228128433227538, 0.019075616836547852, 0.019109792709350586, 0.021039424896240236, 0.01976675224304199, 0.019499551773071288, 0.01942527961730957, 0.019269632339477538, 0.019201215744018556, 0.019086143493652345, 0.019118080139160155, 0.01917318344116211, 0.019513023376464843, 0.019277664184570314, 0.019573408126831053, 0.019641439437866212, 0.019334047317504884, 0.01923072052001953, 0.019902271270751955, 0.020936895370483398, 0.019943424224853516, 0.01942527961730957, 0.01949875259399414, 0.019377792358398437, 0.01913510322570801, 0.019322879791259767, 0.01913987159729004, 0.019200576782226562, 0.019094911575317383, 0.019153696060180664, 0.019301759719848634, 0.019212671279907226, 0.01914031982421875, 0.019337247848510743, 0.019122688293457032, 0.019043872833251953, 0.022429695129394533, 0.019743104934692383, 0.019331167221069336, 0.01935740852355957, 0.01925107192993164, 0.019024255752563477, 0.019103776931762694, 0.019113183975219727, 0.019049247741699218, 0.0190382080078125, 0.019131872177124025, 0.0190797119140625, 0.019330623626708985, 0.019150432586669923, 0.019122655868530274, 0.019092927932739256, 0.019118688583374024, 0.01909702491760254, 0.020254720687866212, 0.020031455993652345, 0.020066272735595702, 0.019395551681518554, 0.019314687728881837, 0.019658687591552735, 0.019292224884033204, 0.019195903778076173, 0.01924095916748047, 0.01921433639526367, 0.019275423049926757, 0.01913270378112793, 0.02019171142578125, 0.019750911712646483, 0.019270975112915038, 0.019237567901611328, 0.019219936370849608, 0.01925779151916504, 0.019357791900634767, 0.019219615936279296, 0.0191046085357666, 0.01923481559753418, 0.01920351982116699, 0.019135040283203127, 0.019105791091918945, 0.019119583129882812, 0.01908995246887207, 0.019292160034179686, 0.019179519653320314, 0.019175424575805664, 0.019136512756347656, 0.01926144027709961, 0.019475616455078126, 0.01926553535461426, 0.019251104354858398, 0.019370943069458007, 0.019199935913085938, 0.019307872772216798, 0.019245792388916015, 0.019209983825683594, 0.019099136352539063, 0.019341407775878908, 0.019386816024780273, 0.019347679138183593, 0.019523040771484375, 0.019182111740112303, 0.019134336471557618, 0.019120256423950197, 0.019292160034179686, 0.019315807342529297, 0.019120447158813475, 0.01915350341796875, 0.01924665641784668, 0.019327423095703126, 0.019207199096679686, 0.01928495979309082, 0.01909097671508789, 0.0190284481048584, 0.019098880767822266, 0.019069696426391603, 0.01903593635559082, 0.01917513656616211, 0.019275392532348633, 0.01915328025817871, 0.019190271377563475, 0.019197568893432618, 0.019359392166137697, 0.01912495994567871, 0.01921023941040039, 0.019156991958618166, 0.019381311416625975, 0.01914771270751953, 0.01923641586303711, 0.01930463981628418, 0.019300607681274413, 0.019797727584838866, 0.019472991943359375, 0.019244768142700194, 0.01917398452758789, 0.01907913589477539, 0.01897884750366211, 0.019263071060180666, 0.019055007934570312, 0.019203968048095703, 0.01922982406616211, 0.019112960815429687, 0.019294208526611328, 0.019458047866821288, 0.019345407485961915, 0.01929583930969238, 0.01917923164367676, 0.019120832443237305, 0.0190947208404541, 0.019185983657836914, 0.019423328399658202, 0.019061023712158204, 0.019134592056274415, 0.019056640625, 0.019271167755126953, 0.019177120208740236, 0.019111936569213867, 0.01915951919555664, 0.01915532875061035, 0.019240127563476563, 0.01918000030517578, 0.019361183166503905, 0.0191026554107666, 0.019107839584350587, 0.019068384170532228, 0.01908995246887207, 0.01930348777770996, 0.01909001541137695, 0.01920240020751953, 0.019603456497192383, 0.019189407348632812, 0.019156543731689454, 0.019370784759521486, 0.019175424575805664, 0.019146751403808594, 0.01919385528564453, 0.019072351455688478, 0.01918838310241699, 0.01924300765991211, 0.019310592651367187, 0.01912985610961914, 0.019120735168457033, 0.019072576522827147, 0.0191648006439209, 0.01907072067260742, 0.01909654426574707, 0.019144256591796874, 0.01914089584350586, 0.019194015502929686, 0.019148799896240236, 0.01901545524597168, 0.018968191146850586, 0.01939036750793457, 0.019528127670288085, 0.0198287353515625, 0.019519487380981446, 0.019267263412475585, 0.01924127960205078, 0.019281280517578124, 0.01925542449951172, 0.01952204895019531, 0.019175424575805664, 0.019249120712280274, 0.019210079193115234, 0.01941231918334961, 0.01919875144958496, 0.019169343948364257, 0.019154752731323242, 0.019151039123535156, 0.019154943466186524, 0.01910806465148926, 0.01905846405029297, 0.01907859230041504, 0.019167808532714842, 0.01927168083190918, 0.019107839584350587, 0.019123519897460937, 0.019137216567993165, 0.019177471160888672, 0.019333120346069335, 0.019096864700317382, 0.01928835105895996, 0.01906528091430664, 0.019100831985473632, 0.019354143142700196, 0.01904057693481445, 0.019064159393310548, 0.019223199844360352, 0.019515071868896484, 0.019194175720214843, 0.019130367279052735, 0.019130367279052735, 0.019118080139160155, 0.01924710464477539, 0.019070272445678712, 0.01918841552734375, 0.019101343154907226, 0.019109472274780274, 0.019155712127685548, 0.019070432662963866, 0.019161344528198242, 0.01932316780090332, 0.019341312408447265, 0.019252704620361327, 0.01926355171203613, 0.019226207733154296, 0.019206975936889647, 0.019314752578735352, 0.019176895141601563, 0.019216096878051758, 0.019227487564086914, 0.019384319305419923, 0.019396608352661132, 0.01916873550415039, 0.019173919677734377, 0.019260543823242188, 0.019075071334838867, 0.020032447814941408, 0.01962393569946289, 0.019342432022094725, 0.019520416259765624, 0.01922467231750488, 0.019115583419799805, 0.019134815216064454, 0.01921023941040039, 0.01926044845581055, 0.019180511474609373, 0.019146751403808594, 0.01909760093688965, 0.01918976020812988, 0.01920204734802246, 0.019367040634155272, 0.0191343994140625, 0.01934636878967285, 0.0192225284576416, 0.019136512756347656, 0.019320831298828126, 0.01908121681213379, 0.01929737663269043, 0.019227327346801756, 0.019132640838623045, 0.019339263916015623, 0.019144704818725586, 0.019109439849853516, 0.019145151138305665, 0.019138559341430664, 0.01909052848815918, 0.019088287353515625, 0.019061983108520506, 0.019190208435058594, 0.019153247833251952, 0.01901158332824707, 0.019165184020996092, 0.019281919479370118, 0.0192346248626709, 0.019235008239746092, 0.019220319747924805, 0.01914896011352539, 0.019139999389648436, 0.01912892723083496, 0.01919795227050781, 0.019140607833862306, 0.019369983673095705, 0.019403839111328126, 0.0191026554107666, 0.01914665603637695, 0.01903830337524414, 0.019118080139160155, 0.01907904052734375, 0.019099231719970702, 0.019180063247680665, 0.01908691215515137, 0.01904684829711914, 0.019260543823242188, 0.01908620834350586, 0.019052576065063477, 0.01910745620727539, 0.01925155258178711, 0.019125919342041015, 0.019085664749145508, 0.019834815979003908, 0.019627744674682618, 0.019313472747802735, 0.019120256423950197, 0.01903740882873535, 0.019135263442993163, 0.0193939208984375, 0.022542976379394532, 0.0214466552734375, 0.019742271423339845, 0.019155391693115233, 0.019152671813964843, 0.019210336685180664, 0.020408447265625, 0.01908121681213379, 0.019083263397216797, 0.019169279098510742, 0.01911363220214844, 0.019164703369140626, 0.019131200790405273, 0.019132415771484376, 0.019382272720336914, 0.019679231643676756, 0.019533824920654298, 0.019592575073242188, 0.01923446464538574, 0.01911404800415039, 0.01929209518432617, 0.019206111907958984, 0.01906790351867676, 0.019326976776123047, 0.019322240829467773, 0.019237279891967773, 0.019314176559448244, 0.01919254493713379, 0.019105791091918945, 0.01927987289428711, 0.01920614433288574, 0.01977289581298828, 0.019196447372436524, 0.01902124786376953, 0.019118431091308594, 0.019471807479858397, 0.019188512802124025, 0.019091455459594727, 0.019201120376586913, 0.019114463806152344, 0.019095903396606446, 0.019220575332641602, 0.019122175216674805, 0.01913209533691406, 0.019105791091918945, 0.01906924819946289, 0.01942652893066406, 0.019168031692504882, 0.01915251159667969, 0.019437952041625975, 0.019200000762939453, 0.019187711715698243, 0.019149919509887696, 0.019250080108642577, 0.019111295700073243, 0.01912281608581543, 0.02059881591796875, 0.019899423599243165, 0.01938915252685547, 0.019205503463745117, 0.01912656021118164, 0.019199615478515626, 0.019084224700927733, 0.01903411293029785, 0.019140127182006837, 0.019108320236206056, 0.019316736221313476, 0.019378175735473634, 0.019140607833862306, 0.019214080810546874, 0.019161344528198242, 0.01907302474975586, 0.019148799896240236, 0.019029727935791017, 0.019027456283569336, 0.01910243225097656, 0.01912633514404297, 0.019326143264770508, 0.01922131156921387, 0.01908940887451172, 0.019091455459594727, 0.01906892776489258, 0.01910495948791504, 0.01899769592285156, 0.01915737533569336, 0.01903411293029785, 0.018960384368896483, 0.019179519653320314, 0.01900124740600586, 0.01903740882873535, 0.01908585548400879, 0.019035839080810548, 0.019036767959594726, 0.01941100883483887, 0.019117343902587892, 0.019061567306518555, 0.019162944793701172, 0.019851167678833007, 0.01930668830871582, 0.01922991943359375, 0.019277824401855468, 0.019251392364501952, 0.019206592559814453, 0.01909312057495117, 0.019226751327514648, 0.01909984016418457, 0.01904627227783203, 0.019253599166870118, 0.01905254364013672, 0.0190382080078125, 0.019064191818237305, 0.01910028839111328, 0.01932195281982422, 0.019431392669677736, 0.019194400787353516, 0.019138816833496095, 0.019054752349853515, 0.019273727416992188, 0.01904025650024414, 0.02025929641723633, 0.019619327545166015, 0.01928447914123535, 0.019212287902832033, 0.019128320693969726, 0.01923436737060547, 0.019188159942626952, 0.019152896881103516, 0.01923807907104492, 0.019208127975463868, 0.01922751998901367, 0.01926553535461426, 0.019152128219604492, 0.01917001533508301, 0.019165216445922853, 0.019151935577392577, 0.019784032821655275, 0.01938252830505371, 0.019197471618652345, 0.019132608413696288, 0.019286624908447264, 0.019237920761108397, 0.019096576690673828, 0.019160224914550782, 0.019093599319458008, 0.019173408508300783, 0.01934351921081543, 0.019122751235961914, 0.019185663223266602, 0.019424928665161132, 0.01943587112426758, 0.019304447174072266, 0.01926553535461426, 0.019187711715698243, 0.019412927627563477, 0.01928550338745117, 0.019241535186767578, 0.01933465576171875, 0.019427839279174804, 0.019417087554931642, 0.019312639236450196, 0.019208192825317383, 0.019485727310180664, 0.019399648666381837, 0.01921433639526367, 0.019283967971801756, 0.01921023941040039, 0.019187711715698243, 0.019373504638671876, 0.019262016296386717, 0.019372032165527343, 0.019311712265014647, 0.019301279067993164, 0.019230335235595704, 0.019283647537231444, 0.01920787239074707, 0.019190719604492187, 0.019314079284667968, 0.01925596809387207, 0.019140607833862306, 0.019189056396484376, 0.020122304916381836, 0.020662111282348634]",tokens/s,51.93228151519936,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,922.222592,12647.79264,0.0,12245.270528,12234.440192,s,1,7.398658203125,7.398658203125,0.0,7.398658203125,7.398658203125,7.398658203125,7.398658203125,[7.398658203125],,kWh,9.35644310416895e-06,1.0048064185584025e-06,4.840003871998955e-06,1.5201253394726308e-05,,MB,1323.225088,12935.102464,0.0,12517.900288,12440.746496,s,10,1.7439638977050782,0.1743963897705078,0.0043022600962920035,0.17578302764892578,0.17703073425292967,0.1773337646484375,0.17757618896484376,"[0.16203318786621093, 0.17620573425292968, 0.1764721984863281, 0.1737040710449219, 0.17536032104492189, 0.17668031311035157, 0.17376771545410155, 0.1776367950439453, 0.17514016723632814, 0.17696339416503906]",tokens/s,1467.9202954652687,kWh,5.178242691447168e-06,5.70962661955631e-07,3.4408799456842005e-06,9.190085299087e-06,tokens/kWh,27856107.06196956,MB,1349.083136,12998.017024,0.0,12580.814848,12543.681536,s,10,36.66371240234375,3.666371240234375,0.00349469966810928,3.665279174804687,3.67171259765625,3.67243857421875,3.67301935546875,"[3.6616396484375, 3.663314208984375, 3.664678466796875, 3.6658798828125, 3.6644814453125, 3.664269287109375, 3.668839111328125, 3.66589453125, 3.67316455078125, 3.67155126953125]",tokens/s,17.183202646978184,kWh,0.00010715087853980724,1.1819028502381853e-05,7.109354079291573e-05,0.00019006344783510475,tokens/kWh,331468.25819269335,,s,630,36.660847240447985,0.058191821016584114,0.000433248994672403,0.058120159149169924,0.058474699783325194,0.05858609733581543,0.06093741123199463,"[0.06036467361450195, 0.05832735824584961, 0.057699649810791016, 0.05770086288452148, 0.05762662506103516, 0.057843006134033204, 0.057831649780273435, 0.05792611312866211, 0.05788467025756836, 0.05799935913085937, 0.05789014434814453, 0.05808809661865234, 0.05780070495605469, 0.05773241424560547, 0.05768479919433594, 0.05776793670654297, 0.05814873504638672, 0.05815843200683594, 0.05806966400146484, 0.05789081573486328, 0.057879871368408206, 0.05779228973388672, 0.057783199310302735, 0.0579788818359375, 0.057847808837890625, 0.05789007949829102, 0.05799190521240234, 0.058099838256835935, 0.057996414184570313, 0.05799542236328125, 0.05797644805908203, 0.05806179046630859, 0.058028030395507815, 0.05818889617919922, 0.05818051147460938, 0.0582369270324707, 0.0583372802734375, 0.058093086242675784, 0.05795449447631836, 0.058049983978271484, 0.05799203109741211, 0.05861785507202148, 0.05829180908203125, 0.0582578239440918, 0.058036224365234375, 0.05818982315063476, 0.05806284713745117, 0.058187393188476565, 0.05821683120727539, 0.05819379043579102, 0.05849715042114258, 0.05847180938720703, 0.05835225677490234, 0.05845996856689453, 0.05823231887817383, 0.05855491256713867, 0.05835996627807617, 0.05852979278564453, 0.058234878540039066, 0.05819596862792969, 0.05809356689453125, 0.05816524887084961, 0.05837823867797851, 0.06093958282470703, 0.058493087768554684, 0.05801219177246094, 0.0579502067565918, 0.0576879997253418, 0.057945537567138675, 0.05759164810180664, 0.057635616302490235, 0.057673057556152346, 0.05790787124633789, 0.05781625747680664, 0.057953086853027344, 0.05783347320556641, 0.057888160705566405, 0.05778643035888672, 0.057685951232910156, 0.058065216064453126, 0.05835804748535156, 0.05828422546386719, 0.058261310577392575, 0.05795852661132812, 0.058027679443359376, 0.05802809524536133, 0.05829167938232422, 0.057879230499267575, 0.05813248062133789, 0.058103809356689455, 0.05809561538696289, 0.05799321746826172, 0.05801891326904297, 0.05810630416870117, 0.0580302734375, 0.05812022399902344, 0.058249473571777344, 0.058224639892578124, 0.059039169311523435, 0.05829251098632812, 0.058281631469726564, 0.058173248291015625, 0.05822560119628906, 0.05845334243774414, 0.05828457641601562, 0.058035648345947266, 0.05811667251586914, 0.05809356689453125, 0.05813638305664062, 0.05815311813354492, 0.058130462646484374, 0.058119457244873045, 0.058092254638671875, 0.05810176086425781, 0.05807718276977539, 0.05815500640869141, 0.058215873718261715, 0.05827423858642578, 0.05847052764892578, 0.05833065414428711, 0.058313182830810543, 0.05804553604125977, 0.05805353546142578, 0.05802188873291016, 0.058148639678955075, 0.05818185424804687, 0.06082963180541992, 0.058264190673828126, 0.05768339157104492, 0.057791263580322265, 0.057614112854003904, 0.05788620758056641, 0.05776406478881836, 0.05794377517700195, 0.05791801452636719, 0.0579420166015625, 0.05787238311767578, 0.05806489562988281, 0.05790924835205078, 0.05794815826416016, 0.058044414520263675, 0.05815439987182617, 0.05807097625732422, 0.058311328887939454, 0.05802755355834961, 0.058126815795898436, 0.05798092651367188, 0.05797635269165039, 0.05801763153076172, 0.05808323287963867, 0.058090206146240234, 0.05813862228393555, 0.058019840240478515, 0.05791120147705078, 0.057948257446289064, 0.057950496673583984, 0.0580371208190918, 0.058116958618164065, 0.05814243316650391, 0.05820220947265625, 0.0582841911315918, 0.05800553512573242, 0.05834067153930664, 0.05826755142211914, 0.058168094635009764, 0.058156864166259765, 0.05801596832275391, 0.05815292739868164, 0.058044158935546875, 0.05834982299804688, 0.057974369049072265, 0.058048927307128906, 0.05795600128173828, 0.05809801483154297, 0.058003456115722656, 0.05847449493408203, 0.05820211029052735, 0.05842943954467773, 0.058331390380859376, 0.05851315307617187, 0.05839462280273437, 0.05824716949462891, 0.058657886505126954, 0.05836483383178711, 0.058277889251708986, 0.058310657501220706, 0.05837171173095703, 0.058693313598632814, 0.05846905517578125, 0.06105491256713867, 0.05856655883789062, 0.05797945785522461, 0.05792943954467773, 0.057877025604248046, 0.05786617660522461, 0.05786387252807617, 0.057769920349121096, 0.057586112976074216, 0.05777612686157227, 0.058111358642578125, 0.05787014389038086, 0.05773299026489258, 0.05796988677978516, 0.05782006454467774, 0.05797561645507812, 0.0582391357421875, 0.05837539291381836, 0.058200225830078126, 0.058181568145751955, 0.058006046295166015, 0.05799651336669922, 0.057891616821289064, 0.057988094329833983, 0.05777510452270508, 0.05790924835205078, 0.057845088958740236, 0.058090335845947264, 0.05795616149902344, 0.058130367279052735, 0.057905025482177734, 0.05803440093994141, 0.05800508880615234, 0.05822911834716797, 0.058324993133544924, 0.058506622314453124, 0.05842803192138672, 0.058439937591552735, 0.05820800018310547, 0.05848867034912109, 0.05838828659057617, 0.05828243255615234, 0.05812009429931641, 0.05847449493408203, 0.05804851150512695, 0.05818777465820312, 0.058314304351806644, 0.05814521789550781, 0.05805875015258789, 0.05840198516845703, 0.05832611083984375, 0.058252449035644534, 0.05830303955078125, 0.058350753784179685, 0.05839503860473633, 0.05851795196533203, 0.05852358245849609, 0.05839846420288086, 0.05833657455444336, 0.05812083053588867, 0.05808371353149414, 0.05841628646850586, 0.05826236724853515, 0.06118608093261719, 0.05853731155395508, 0.058014270782470706, 0.05782287979125977, 0.05762678527832031, 0.057739521026611326, 0.057839614868164066, 0.05785599899291992, 0.05789286422729492, 0.0584189453125, 0.05783587265014648, 0.05788422393798828, 0.05798332977294922, 0.05793900680541992, 0.05793868637084961, 0.05796198272705078, 0.058143329620361325, 0.05846774291992188, 0.0582437744140625, 0.05804646301269531, 0.05788668823242187, 0.05781939315795898, 0.05777913665771484, 0.05790806579589844, 0.05801574325561523, 0.057970687866210936, 0.05795840072631836, 0.058180831909179685, 0.05803907012939453, 0.05789286422729492, 0.05798451232910156, 0.05795686340332031, 0.05815203094482422, 0.05823123168945313, 0.05811862564086914, 0.05821440124511719, 0.05819126510620117, 0.058358367919921876, 0.058184864044189454, 0.05808415985107422, 0.058078273773193356, 0.05811299133300781, 0.05817958450317383, 0.05812223815917969, 0.058070304870605466, 0.058075870513916016, 0.058028030395507815, 0.058019008636474606, 0.05804652786254883, 0.058288894653320315, 0.058234878540039066, 0.05849046325683594, 0.05833171081542969, 0.05829414367675781, 0.058444862365722654, 0.05829724884033203, 0.0582691535949707, 0.05853804779052734, 0.05834163284301758, 0.05857712173461914, 0.05823897552490234, 0.058267425537109375, 0.05854435348510742, 0.06108905410766602, 0.0586080322265625, 0.05788198471069336, 0.05782009506225586, 0.05778188705444336, 0.05797119903564453, 0.057864063262939455, 0.057927806854248046, 0.05791526412963867, 0.057948223114013674, 0.058084320068359375, 0.05822563171386719, 0.05793404769897461, 0.0580544319152832, 0.05801369476318359, 0.05816729736328125, 0.058258750915527346, 0.058286785125732425, 0.05815004730224609, 0.05808623886108399, 0.05783967971801758, 0.057836830139160154, 0.057899681091308594, 0.05799481582641602, 0.057858497619628906, 0.05801113510131836, 0.057868927001953126, 0.058019710540771485, 0.05798092651367188, 0.05799935913085937, 0.05807513427734375, 0.058007137298583984, 0.05806313705444336, 0.05822447967529297, 0.05831884765625, 0.05843289566040039, 0.05869456100463867, 0.05847062301635742, 0.058148639678955075, 0.05807833480834961, 0.05804297637939453, 0.05808771133422851, 0.057973758697509765, 0.057957374572753906, 0.05801359939575195, 0.0579666862487793, 0.058017791748046874, 0.05799910354614258, 0.05811382293701172, 0.05804694366455078, 0.058289249420166014, 0.05852048110961914, 0.058489887237548825, 0.05861228942871094, 0.05821686553955078, 0.05818163299560547, 0.05822054290771484, 0.05830246353149414, 0.058433536529541016, 0.058257408142089843, 0.05810992050170898, 0.0581099853515625, 0.0581541748046875, 0.06093209457397461, 0.05846745681762695, 0.057860992431640626, 0.05783347320556641, 0.057870334625244144, 0.058054656982421876, 0.057795902252197266, 0.057993694305419924, 0.05796230316162109, 0.05801590347290039, 0.05798345565795898, 0.05803219223022461, 0.05794934463500977, 0.05796716690063477, 0.058042144775390624, 0.05796793746948242, 0.058364734649658204, 0.058177631378173826, 0.058222015380859374, 0.05804870223999024, 0.05797529602050781, 0.05795008087158203, 0.05807820892333984, 0.05800838470458984, 0.058050464630126954, 0.058044769287109374, 0.058019710540771485, 0.057960704803466795, 0.05802988815307617, 0.05808083343505859, 0.05811238479614258, 0.058261566162109375, 0.05872844696044922, 0.05847654342651367, 0.05839990234375, 0.05825008010864258, 0.05810723114013672, 0.05875708770751953, 0.05826425552368164, 0.058234878540039066, 0.05812838363647461, 0.05806032180786133, 0.05818211364746094, 0.05809135818481445, 0.058593441009521484, 0.05844960021972656, 0.05828771209716797, 0.058257183074951174, 0.05810208129882812, 0.05935782241821289, 0.058525375366210934, 0.058458206176757815, 0.05838396835327148, 0.0582476806640625, 0.05857497787475586, 0.05816524887084961, 0.05810176086425781, 0.05836185455322265, 0.058173439025878904, 0.05817689514160156, 0.05806953430175781, 0.05822025680541992, 0.058243457794189456, 0.06101424026489258, 0.058439327239990235, 0.05791993713378906, 0.05781081771850586, 0.05780287933349609, 0.05789491271972656, 0.05791955184936524, 0.05790435028076172, 0.05787721633911133, 0.05804032135009766, 0.057929729461669924, 0.05795430374145508, 0.05792563247680664, 0.058038272857666016, 0.058028289794921875, 0.058080734252929686, 0.05820467376708984, 0.058307552337646486, 0.058110782623291016, 0.05814022445678711, 0.0578724479675293, 0.05793939208984375, 0.05789766311645508, 0.058010112762451174, 0.05784960174560547, 0.058007137298583984, 0.05813468933105469, 0.05806655883789062, 0.058092159271240236, 0.05799673461914062, 0.057989696502685546, 0.05803519821166992, 0.058090496063232425, 0.058242591857910156, 0.058186206817626956, 0.05822671890258789, 0.05838409423828125, 0.05830665588378906, 0.058170848846435544, 0.05808812713623047, 0.05796799850463867, 0.05801385498046875, 0.05805065536499023, 0.05818217468261719, 0.058195808410644534, 0.05804851150512695, 0.05808736038208008, 0.05811001586914062, 0.0580404167175293, 0.058107807159423826, 0.05829955291748047, 0.059212638854980466, 0.05830863952636719, 0.058340415954589844, 0.05870665740966797, 0.0586212158203125, 0.05823958587646484, 0.05831097412109375, 0.05821152114868164, 0.058396575927734375, 0.0585266227722168, 0.0584007682800293, 0.058269664764404296, 0.061506145477294924, 0.058783039093017575, 0.05805043029785156, 0.057922367095947266, 0.05781520080566406, 0.05799305725097656, 0.05785968017578125, 0.057915199279785154, 0.05805321502685547, 0.05798294448852539, 0.05790307235717773, 0.05822675323486328, 0.05795865631103515, 0.0580333137512207, 0.05784841537475586, 0.058019840240478515, 0.058413055419921874, 0.05839049530029297, 0.05822262573242187, 0.058391582489013674, 0.058258590698242185, 0.05823443222045899, 0.058076736450195315, 0.05814259338378906, 0.05794192123413086, 0.05816985702514649, 0.057995742797851565, 0.05818975830078125, 0.05796585464477539, 0.05824995040893555, 0.05806854248046875, 0.05793836975097656, 0.05799731063842774, 0.05842073440551758, 0.05848115158081055, 0.05851932907104492, 0.05844400024414063, 0.05829759979248047, 0.0582110710144043, 0.05840224075317383, 0.058270015716552735, 0.05828838348388672, 0.058259456634521485, 0.05829759979248047, 0.05815987014770508, 0.05821839904785156, 0.05816867065429687, 0.05840054321289063, 0.058320960998535155, 0.0584918098449707, 0.05844543838500976, 0.0585456657409668, 0.058641281127929684, 0.05864191818237305, 0.05847296142578125, 0.058710014343261716, 0.05871820831298828, 0.05842534255981445, 0.05824300765991211, 0.058517566680908205, 0.05841097640991211, 0.05838585662841797, 0.058509376525878905, 0.06152803039550781, 0.058668609619140624, 0.05795270538330078, 0.057870334625244144, 0.05780188751220703, 0.058264415740966795, 0.05779203033447266, 0.058011871337890625, 0.05788467025756836, 0.0579238395690918, 0.058000446319580075, 0.05815119934082031, 0.0578443832397461, 0.05814476776123047, 0.05788604736328125, 0.05814300918579102, 0.058054943084716794, 0.05831257629394531, 0.05812041473388672, 0.058044448852539066, 0.05795836639404297, 0.058025089263916016, 0.05842585754394531, 0.05833692932128906, 0.05820694351196289, 0.05818703842163086, 0.0581618881225586, 0.05820172882080078, 0.05804889678955078, 0.05804032135009766, 0.05802560043334961, 0.058143104553222656, 0.058072929382324216, 0.0582841911315918, 0.058340415954589844, 0.05867411041259766, 0.05841299057006836, 0.0582492790222168, 0.05819596862792969, 0.05824723052978516, 0.058461280822753904, 0.05833529663085937, 0.05810671997070312, 0.058318782806396484, 0.05818163299560547, 0.058351070404052734, 0.05822022247314453, 0.05841187286376953, 0.058394207000732425, 0.05839708709716797, 0.05849087905883789, 0.05849257659912109, 0.058712192535400394, 0.05851363372802734, 0.05831433486938477, 0.05830607986450195, 0.05822252655029297, 0.05844678497314453, 0.05845142364501953, 0.058356414794921874, 0.05825519943237305, 0.05833523178100586, 0.05854207992553711]",tokens/s,17.184545568955635,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.9584,3361.603584,0.0,2959.081472,2942.567424,s,1,7.27462158203125,7.27462158203125,0.0,7.27462158203125,7.27462158203125,7.27462158203125,7.27462158203125,[7.27462158203125],,kWh,6.0537026375110746e-06,6.604991502022048e-07,3.546113947996976e-06,1.0260315735710255e-05,,MB,1228.26752,3552.444416,0.0,3139.436544,3105.830912,s,10,2.390224822998047,0.2390224822998047,0.00107424738458269,0.23926183319091798,0.23976158142089843,0.24009872436523438,0.24036843872070313,"[0.23968666076660156, 0.23896989440917968, 0.2392821044921875, 0.2384141082763672, 0.2395382080078125, 0.2404358673095703, 0.2361840057373047, 0.23924156188964843, 0.23892098999023437, 0.23955142211914063]",tokens/s,1071.0289573467842,kWh,7.241062424593546e-06,7.983265881059993e-07,4.806704387365662e-06,1.2846093400065207e-05,tokens/kWh,19928237.48258755,MB,1258.811392,3594.387456,0.0,3181.379584,3162.0096,s,10,13.430729980468751,1.3430729980468752,0.004247876913244177,1.3441656494140624,1.3470467895507812,1.3477643615722654,1.348338419189453,"[1.3448695068359375, 1.343515869140625, 1.34255419921875, 1.3461380615234375, 1.341357421875, 1.34848193359375, 1.3329671630859374, 1.33914306640625, 1.3468873291015624, 1.3448154296875]",tokens/s,46.90735357766548,kWh,3.93085921166585e-05,4.3356724393718664e-06,2.4448652350233776e-05,6.809291690626413e-05,tokens/kWh,925206.3630454402,,s,630,13.428271265029895,0.021314716293698268,0.0002777882043361118,0.02125070381164551,0.021519564247131348,0.021778947448730466,0.02257164154052735,"[0.021823295593261717, 0.02182681655883789, 0.021461536407470703, 0.021444416046142577, 0.021445024490356446, 0.02121708869934082, 0.021203327178955077, 0.021136703491210936, 0.021301952362060547, 0.021462976455688478, 0.021216575622558593, 0.021276992797851564, 0.021163936614990234, 0.02134809684753418, 0.021359392166137695, 0.02124985694885254, 0.021373119354248047, 0.02126643180847168, 0.021159135818481445, 0.02124799919128418, 0.022753503799438475, 0.022856256484985352, 0.021368831634521485, 0.021180416107177736, 0.021217279434204102, 0.02120070457458496, 0.021297119140625, 0.02119702339172363, 0.021217279434204102, 0.021251552581787108, 0.02129769515991211, 0.021468544006347658, 0.021176959991455076, 0.021168127059936523, 0.02131315231323242, 0.021240192413330077, 0.021292640686035157, 0.02126678466796875, 0.02127187156677246, 0.02131839942932129, 0.021190399169921874, 0.02122755241394043, 0.021395679473876952, 0.021219327926635743, 0.02123904037475586, 0.021158559799194336, 0.021243999481201172, 0.021222496032714845, 0.021496543884277342, 0.02135264015197754, 0.021307071685791015, 0.02192758369445801, 0.02126643180847168, 0.021151807785034178, 0.021215839385986326, 0.0211231689453125, 0.021234975814819337, 0.021254783630371095, 0.021188608169555666, 0.021154144287109374, 0.021163679122924803, 0.02129302406311035, 0.021275775909423828, 0.021708799362182618, 0.021518335342407227, 0.021297311782836913, 0.021221216201782227, 0.021176319122314453, 0.021782720565795898, 0.021259744644165038, 0.02118252754211426, 0.02125008010864258, 0.021303007125854492, 0.021213727951049803, 0.02119647979736328, 0.021137727737426757, 0.021202943801879884, 0.02125004768371582, 0.021153888702392577, 0.021241695404052734, 0.02121721649169922, 0.021139583587646484, 0.021501535415649413, 0.021236127853393554, 0.021452768325805664, 0.02139548873901367, 0.021210527420043944, 0.021379680633544923, 0.02126576042175293, 0.021193023681640624, 0.021247840881347655, 0.021252607345581053, 0.022486623764038087, 0.02131395149230957, 0.02127667236328125, 0.02127872085571289, 0.021630783081054688, 0.021303487777709962, 0.02141798400878906, 0.021389312744140625, 0.02133318328857422, 0.021472063064575195, 0.021456895828247072, 0.02125132751464844, 0.021322496414184572, 0.021196800231933592, 0.02122457695007324, 0.021162879943847655, 0.021243904113769533, 0.021302431106567384, 0.021263200759887694, 0.021196352005004884, 0.021381568908691407, 0.021733375549316408, 0.02124393653869629, 0.021188575744628905, 0.021164031982421876, 0.021139455795288087, 0.02134121513366699, 0.021185312271118164, 0.02137107276916504, 0.021319679260253906, 0.021238975524902344, 0.021349088668823242, 0.021184608459472655, 0.021323776245117186, 0.021628511428833007, 0.021567007064819337, 0.02158016014099121, 0.021345951080322265, 0.021332639694213867, 0.021182655334472656, 0.02115123176574707, 0.02122742462158203, 0.021131872177124023, 0.021336063385009766, 0.02126950454711914, 0.021234687805175782, 0.02127052879333496, 0.021341663360595703, 0.02133203125, 0.02124028778076172, 0.021249408721923827, 0.021248607635498046, 0.021390399932861327, 0.021302175521850587, 0.021237823486328126, 0.021325536727905273, 0.021285152435302733, 0.021331968307495116, 0.021313535690307618, 0.021210336685180665, 0.021136383056640624, 0.021278495788574218, 0.02145280075073242, 0.02161577606201172, 0.022360448837280274, 0.02138083267211914, 0.02149862480163574, 0.021265920639038087, 0.02131337547302246, 0.02122368049621582, 0.021242271423339842, 0.021145503997802736, 0.021243999481201172, 0.02126438331604004, 0.021159936904907226, 0.021208160400390624, 0.02132371139526367, 0.0213035831451416, 0.02126304054260254, 0.021250015258789064, 0.021282848358154298, 0.02126198387145996, 0.021326175689697267, 0.021202943801879884, 0.021188608169555666, 0.02147123146057129, 0.0214300479888916, 0.021233024597167968, 0.02124435234069824, 0.02115011215209961, 0.02127248001098633, 0.02111497688293457, 0.021140480041503908, 0.02110927963256836, 0.021115360260009767, 0.021614591598510743, 0.021170175552368165, 0.021774335861206053, 0.021555200576782226, 0.02132771110534668, 0.021188608169555666, 0.02119500732421875, 0.021206079483032228, 0.022452287673950196, 0.022176544189453126, 0.021384735107421875, 0.021262815475463867, 0.02149510383605957, 0.021254240036010744, 0.021164640426635743, 0.021321727752685548, 0.021200895309448242, 0.021276031494140625, 0.021145696640014647, 0.021175968170166017, 0.02115059280395508, 0.021298624038696288, 0.02142620849609375, 0.022606367111206054, 0.02225065612792969, 0.021316448211669923, 0.021301248550415038, 0.02119481658935547, 0.021163808822631837, 0.021219488143920898, 0.02123980712890625, 0.021309440612792968, 0.021247840881347655, 0.021141632080078125, 0.0211757755279541, 0.021224000930786132, 0.021071487426757813, 0.021143936157226564, 0.021094400405883788, 0.021141504287719725, 0.02123776054382324, 0.021138687133789063, 0.021139904022216795, 0.021178592681884767, 0.02123481559753418, 0.021220191955566407, 0.021178304672241213, 0.021186752319335936, 0.02121116828918457, 0.021208255767822266, 0.021193504333496094, 0.02108006477355957, 0.021215232849121093, 0.021222911834716796, 0.02120140838623047, 0.021379072189331053, 0.022814367294311525, 0.02229702377319336, 0.02142608070373535, 0.02136012840270996, 0.021586431503295898, 0.021288192749023438, 0.021236480712890624, 0.021215232849121093, 0.021149696350097655, 0.021696512222290038, 0.02149740791320801, 0.021322175979614257, 0.021207040786743164, 0.02110873603820801, 0.02123161506652832, 0.021217023849487305, 0.021192960739135742, 0.021194080352783203, 0.021101215362548827, 0.02113871955871582, 0.021147424697875977, 0.021214143753051758, 0.021341184616088867, 0.021176639556884765, 0.021318336486816407, 0.021209087371826172, 0.021157888412475585, 0.021144832611083984, 0.02110745620727539, 0.021223072052001954, 0.021140960693359374, 0.021212032318115234, 0.02122137641906738, 0.021232927322387695, 0.021118976593017577, 0.02120982360839844, 0.021207040786743164, 0.021321727752685548, 0.021394624710083007, 0.021268800735473634, 0.021178848266601564, 0.02120911979675293, 0.021311487197875977, 0.02126438331604004, 0.021129024505615233, 0.02129964828491211, 0.02127039909362793, 0.021276063919067383, 0.02127257537841797, 0.02130940818786621, 0.021228031158447267, 0.021245887756347656, 0.021237823486328126, 0.02127257537841797, 0.021317792892456056, 0.021274431228637695, 0.021680160522460936, 0.02147327995300293, 0.021427648544311523, 0.021481472015380858, 0.021684255599975586, 0.0213591365814209, 0.021313407897949218, 0.021368959426879882, 0.02124348831176758, 0.021277088165283203, 0.021335456848144533, 0.021356767654418945, 0.02128291130065918, 0.021374847412109373, 0.021418399810791015, 0.021639167785644533, 0.02190118408203125, 0.021770336151123046, 0.021585920333862304, 0.021878751754760742, 0.021542943954467774, 0.021460224151611328, 0.021572351455688477, 0.021397504806518555, 0.021417343139648437, 0.021383039474487303, 0.02164352035522461, 0.021339935302734377, 0.021451488494873047, 0.021574880599975584, 0.02140444755554199, 0.021234687805175782, 0.02132192039489746, 0.021273408889770508, 0.02143027114868164, 0.021379072189331053, 0.021499519348144532, 0.021434335708618163, 0.02149622344970703, 0.02140598487854004, 0.021423839569091798, 0.021426048278808594, 0.02162633514404297, 0.021445280075073243, 0.021493759155273438, 0.021477184295654296, 0.021432512283325194, 0.021489664077758788, 0.02146303939819336, 0.021337791442871092, 0.021425952911376955, 0.021401311874389647, 0.021513023376464845, 0.021329824447631835, 0.02141360092163086, 0.021387903213500977, 0.021306655883789063, 0.021391839981079103, 0.021338111877441408, 0.021286144256591796, 0.02143075180053711, 0.021257951736450197, 0.021288671493530274, 0.02125667190551758, 0.021277055740356446, 0.021350400924682617, 0.021230592727661132, 0.021224416732788087, 0.02125827217102051, 0.021329311370849608, 0.021254751205444337, 0.021260095596313477, 0.021214719772338866, 0.02126844787597656, 0.021195199966430663, 0.021274784088134765, 0.021231359481811523, 0.021143936157226564, 0.021294464111328126, 0.02166169548034668, 0.021430112838745116, 0.021298751831054688, 0.021248607635498046, 0.021267616271972656, 0.021239871978759765, 0.021166879653930663, 0.02111894416809082, 0.02112335968017578, 0.02106915283203125, 0.02104547119140625, 0.020998336791992187, 0.02103932762145996, 0.02098092842102051, 0.021097055435180666, 0.020993696212768555, 0.021063936233520507, 0.021014623641967774, 0.020991424560546874, 0.020951328277587892, 0.021106975555419922, 0.0210631046295166, 0.02104911994934082, 0.021599008560180665, 0.02121232032775879, 0.02102729606628418, 0.021318016052246095, 0.021177440643310546, 0.021076095581054687, 0.0210579833984375, 0.021147647857666017, 0.02110908889770508, 0.021125024795532226, 0.021272607803344726, 0.02114156723022461, 0.020963327407836914, 0.021069887161254883, 0.021047231674194335, 0.021032543182373048, 0.021039520263671875, 0.021090303421020508, 0.021020448684692383, 0.021175519943237305, 0.021041887283325195, 0.021169696807861328, 0.021213151931762694, 0.02109644889831543, 0.02109663963317871, 0.021073888778686524, 0.021033599853515626, 0.021118303298950196, 0.020990623474121093, 0.022097888946533202, 0.021184223175048828, 0.02113158416748047, 0.021140512466430665, 0.02113225555419922, 0.021725183486938478, 0.021143552780151367, 0.02117136001586914, 0.02116281509399414, 0.021188575744628905, 0.021098495483398438, 0.0218090877532959, 0.02170719909667969, 0.021395456314086913, 0.021222848892211914, 0.02125267219543457, 0.02123529624938965, 0.0212109432220459, 0.021234272003173828, 0.021179967880249024, 0.021143999099731445, 0.021194751739501954, 0.021198848724365234, 0.02127257537841797, 0.021135360717773437, 0.021187999725341796, 0.021172832489013672, 0.02127257537841797, 0.021153791427612305, 0.021216991424560547, 0.021111007690429687, 0.02132383918762207, 0.02109769630432129, 0.02120579147338867, 0.02107187271118164, 0.021135360717773437, 0.021133312225341795, 0.02105379295349121, 0.021049312591552734, 0.02112233543395996, 0.02243971252441406, 0.02125008010864258, 0.021043359756469728, 0.021072320938110352, 0.021126592636108398, 0.02104355239868164, 0.02107734489440918, 0.021136159896850585, 0.021128448486328125, 0.02110550308227539, 0.021124351501464845, 0.02123583984375, 0.021181312561035157, 0.02110438346862793, 0.021358272552490235, 0.02113158416748047, 0.021182464599609374, 0.02124959945678711, 0.021180864334106445, 0.021278335571289064, 0.02157401657104492, 0.021438432693481446, 0.021339616775512694, 0.021293632507324217, 0.021823488235473632, 0.02128895950317383, 0.021202943801879884, 0.02122547149658203, 0.021515552520751952, 0.021314144134521484, 0.021245183944702147, 0.02118511962890625, 0.0213035831451416, 0.021202943801879884, 0.02179075241088867, 0.021493696212768556, 0.021428255081176757, 0.02117532730102539, 0.02106262397766113, 0.02116419219970703, 0.021052831649780272, 0.021151424407958985, 0.021191423416137695, 0.02116156768798828, 0.021262752532958985, 0.021181535720825196, 0.021291648864746094, 0.02134864044189453, 0.021833728790283204, 0.021190784454345704, 0.021374624252319337, 0.02122096061706543, 0.021289600372314452, 0.021448223114013672, 0.02137049674987793, 0.02134223937988281, 0.021352672576904298, 0.021371488571166993, 0.021311487197875977, 0.021348352432250976, 0.022285791397094728, 0.021383712768554688, 0.021368383407592773, 0.02127663993835449, 0.02128124809265137, 0.021805055618286134, 0.02123366355895996, 0.021378816604614256, 0.021467391967773437, 0.021276384353637694, 0.021358400344848632, 0.021380767822265626, 0.0214085750579834, 0.02136649513244629, 0.021546304702758787, 0.02149679946899414, 0.021473024368286135, 0.021535167694091795, 0.021458751678466798, 0.021811264038085938, 0.021342144012451172, 0.021315584182739256, 0.021341920852661133, 0.02140176010131836, 0.02131350326538086, 0.02138332748413086, 0.02128486442565918, 0.02128486442565918, 0.021370880126953123, 0.021235712051391603, 0.02128656005859375, 0.021295455932617186, 0.02127052879333496, 0.021420032501220702, 0.021409791946411134, 0.02148294448852539, 0.02138310432434082, 0.022052896499633788, 0.021530624389648437, 0.02129305648803711, 0.0212193603515625, 0.021204992294311522, 0.021288192749023438, 0.021144224166870118, 0.021239072799682616, 0.021268640518188477, 0.021219680786132813, 0.021197439193725586, 0.02112886428833008, 0.021180416107177736, 0.021163423538208007, 0.021145727157592772, 0.02110905647277832, 0.021144992828369142, 0.02109667205810547, 0.021211679458618165, 0.02106710433959961, 0.02111350440979004, 0.02116099166870117, 0.02112816047668457, 0.021134592056274413, 0.021240543365478516, 0.02109414482116699, 0.021178367614746094, 0.021166528701782227, 0.0211060791015625, 0.021051679611206055, 0.023422239303588867, 0.02188172721862793, 0.021562944412231444, 0.02128121566772461, 0.02126028823852539, 0.021113983154296877, 0.02121232032775879, 0.021102304458618163, 0.021069759368896483, 0.021102655410766603, 0.021106687545776368, 0.021073919296264648, 0.021042688369750977, 0.021084287643432616, 0.02114748764038086, 0.02190355110168457, 0.02353596878051758, 0.021376031875610352, 0.021301631927490235, 0.021254751205444337, 0.021127168655395507, 0.021129312515258788, 0.021153696060180666, 0.02107366371154785, 0.021190975189208986, 0.02115167999267578, 0.021221311569213867, 0.021274879455566408, 0.021236608505249024, 0.021309471130371092, 0.02306972885131836, 0.022228639602661134, 0.021258464813232424]",tokens/s,46.915942310508335,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 634, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 102, in __init__ self.query_key_value = nn.Linear(config.hidden_size, 3 * config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 217659 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 239383 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 215842 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1015, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 840, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 634, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 102, in __init__ self.query_key_value = nn.Linear(config.hidden_size, 3 * config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 217305 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.270272,813.563904,0.0,411.041792,391.374848,s,1,7.48321875,7.48321875,0.0,7.48321875,7.48321875,7.48321875,7.48321875,[7.48321875],,kWh,4.3827517416730184e-06,4.763208172630246e-07,8.98889607997555e-07,5.757962166933598e-06,,MB,1221.48864,886.964224,0.0,473.956352,454.832128,s,14,0.34915372467041017,0.024939551762172153,0.0004752968524060241,0.024781711578369142,0.024976924896240236,0.02555138111114502,0.02640233402252197,"[0.02661507225036621, 0.024744543075561523, 0.024754207611083986, 0.024972959518432616, 0.024935104370117187, 0.024758975982666017, 0.024804447174072267, 0.02470159912109375, 0.024667264938354493, 0.02473695945739746, 0.024819711685180663, 0.02497862434387207, 0.02493257522583008, 0.024731679916381834]",tokens/s,10264.819610282493,kWh,8.251415942679199e-07,9.096519061581378e-08,5.454987100960368e-07,1.4616054949797704e-06,tokens/kWh,175149861.49086913,MB,1253.00736,914.2272,0.0,501.219328,454.834688,s,14,9.788775939941406,0.6991982814243861,0.008938843571091176,0.69572412109375,0.7119116760253906,0.7126324066162109,0.7127987469482422,"[0.6892632446289062, 0.696510498046875, 0.7033971557617188, 0.7125204467773437, 0.7104912109375, 0.6886373291015625, 0.689298095703125, 0.691288330078125, 0.6941023559570313, 0.6922561645507812, 0.71284033203125, 0.7027774658203125, 0.71045556640625, 0.694937744140625]",tokens/s,90.10319629455932,kWh,2.0258209736981944e-05,2.2337409699881696e-06,7.996512091760668e-06,3.0488462798730772e-05,tokens/kWh,2066355.4084669258,,s,882,9.781736865043635,0.011090404608893017,0.00023912303103861167,0.011000895977020263,0.011398863887786865,0.011475065422058105,0.01182962532043457,"[0.010652447700500488, 0.011087871551513672, 0.010956031799316406, 0.010957568168640137, 0.010894368171691895, 0.010953472137451173, 0.01092630386352539, 0.010939871788024903, 0.010838560104370117, 0.010973183631896973, 0.010954751968383789, 0.010925215721130371, 0.010921983718872071, 0.01090236759185791, 0.010941439628601075, 0.010869759559631348, 0.01094041633605957, 0.010948479652404785, 0.010863776206970216, 0.010844191551208497, 0.010955360412597656, 0.010951168060302734, 0.010910911560058594, 0.010905887603759766, 0.010920191764831543, 0.010939711570739747, 0.010899999618530273, 0.010887776374816895, 0.010919615745544434, 0.010938367843627929, 0.010889216423034668, 0.010969056129455566, 0.010923295974731446, 0.01092627239227295, 0.010965567588806152, 0.01090336036682129, 0.010911935806274415, 0.01081481647491455, 0.010854496002197265, 0.010884767532348632, 0.01090227222442627, 0.010885503768920899, 0.010958623886108399, 0.010926207542419434, 0.010967328071594238, 0.010903136253356933, 0.010950752258300781, 0.011019583702087402, 0.010951487541198731, 0.010960512161254883, 0.010952287673950196, 0.011190848350524902, 0.010936415672302247, 0.010862079620361328, 0.01108022403717041, 0.010985216140747071, 0.010952799797058106, 0.011007776260375976, 0.011083328247070312, 0.010912256240844726, 0.010905471801757813, 0.010914143562316894, 0.010983327865600585, 0.010799008369445801, 0.011063039779663086, 0.010981504440307616, 0.011155200004577637, 0.011139455795288087, 0.010983424186706543, 0.011016096115112305, 0.010946656227111816, 0.010977279663085938, 0.010991616249084473, 0.010922080039978027, 0.010990912437438966, 0.011012351989746094, 0.01102064037322998, 0.011139072418212891, 0.011098367691040039, 0.011107456207275391, 0.011181983947753906, 0.011113280296325684, 0.01099283218383789, 0.011010815620422363, 0.011061311721801758, 0.011005279541015624, 0.01202783966064453, 0.010973823547363282, 0.010977279663085938, 0.010960000038146973, 0.010934271812438966, 0.010910592079162597, 0.010921983718872071, 0.010954879760742187, 0.010911744117736816, 0.01090454387664795, 0.010971712112426759, 0.010911711692810059, 0.01094694423675537, 0.010893312454223633, 0.010940095901489259, 0.01096121597290039, 0.010901439666748046, 0.010916064262390137, 0.010893152236938477, 0.01093609619140625, 0.010882816314697265, 0.010968864440917969, 0.010922176361083984, 0.010929951667785645, 0.011674336433410645, 0.012572863578796386, 0.011906496047973633, 0.010930560111999511, 0.010990943908691406, 0.010957504272460937, 0.010950624465942383, 0.01112185573577881, 0.010976063728332519, 0.011011615753173828, 0.010962431907653808, 0.010951456069946288, 0.010967552185058594, 0.0109933443069458, 0.010931296348571777, 0.010984095573425293, 0.010590208053588868, 0.010911840438842774, 0.010935296058654785, 0.010914719581604004, 0.010889280319213867, 0.010890975952148437, 0.010889439582824708, 0.01097663974761963, 0.010936960220336914, 0.010926176071166992, 0.01088912010192871, 0.01099071979522705, 0.010904159545898438, 0.010966815948486329, 0.010907551765441895, 0.010887776374816895, 0.010889504432678223, 0.010886240005493163, 0.01083456039428711, 0.010859647750854491, 0.010888192176818847, 0.010880191802978516, 0.01085091209411621, 0.010912159919738769, 0.010939583778381348, 0.010916192054748535, 0.01096617603302002, 0.011109375953674316, 0.011157312393188477, 0.011127167701721192, 0.011134048461914063, 0.011789024353027344, 0.013529279708862304, 0.011413311958312989, 0.01129043197631836, 0.011348159790039062, 0.011208703994750976, 0.011259967803955079, 0.011278271675109863, 0.011208703994750976, 0.011286527633666991, 0.01126585578918457, 0.011264320373535155, 0.011370271682739257, 0.011305055618286132, 0.0113438720703125, 0.011305279731750488, 0.011356927871704102, 0.011320096015930176, 0.011245087623596191, 0.011178624153137208, 0.011308704376220703, 0.01131760025024414, 0.011236960411071778, 0.011289216041564942, 0.011250783920288086, 0.011194144248962402, 0.011389856338500976, 0.011347968101501465, 0.011370304107666016, 0.011314528465270996, 0.011270912170410156, 0.011241567611694337, 0.011143168449401856, 0.01141759967803955, 0.011507136344909668, 0.01138649559020996, 0.011393983840942383, 0.011367775917053223, 0.011338399887084962, 0.011280384063720703, 0.011306528091430665, 0.0114335355758667, 0.011404191970825196, 0.011348223686218262, 0.011346943855285644, 0.01130684757232666, 0.011328415870666504, 0.011356191635131836, 0.01137660789489746, 0.011227328300476075, 0.011242464065551757, 0.011240608215332031, 0.01123465633392334, 0.01132579231262207, 0.011243519783020019, 0.011202848434448243, 0.011185888290405274, 0.011167584419250488, 0.011210687637329101, 0.011268575668334961, 0.01126159954071045, 0.011182175636291505, 0.011386879920959473, 0.011380224227905274, 0.01125222396850586, 0.011263999938964844, 0.011268095970153809, 0.0114236478805542, 0.011198559761047363, 0.011171839714050292, 0.011161824226379395, 0.011284159660339356, 0.011259008407592773, 0.011251999855041503, 0.011217599868774413, 0.011194527626037598, 0.011192000389099122, 0.011204928398132323, 0.01120035171508789, 0.01136025619506836, 0.011349056243896484, 0.01125062370300293, 0.011293855667114258, 0.01122704029083252, 0.01127519989013672, 0.01128649616241455, 0.01130303955078125, 0.011286432266235352, 0.011272192001342773, 0.0112424955368042, 0.011300959587097169, 0.011276384353637696, 0.011318079948425293, 0.011284031867980958, 0.012271391868591308, 0.011229215621948242, 0.01147059154510498, 0.01139907169342041, 0.011669504165649413, 0.011412799835205078, 0.011558688163757325, 0.01135536003112793, 0.011325087547302246, 0.011288448333740235, 0.011414943695068359, 0.011324064254760742, 0.011308223724365234, 0.011317248344421387, 0.011302847862243653, 0.011305600166320802, 0.011354528427124023, 0.011931232452392578, 0.012299839973449706, 0.011465727806091308, 0.011398943901062011, 0.01135206413269043, 0.011398143768310547, 0.011362879753112794, 0.011378239631652833, 0.011377535820007324, 0.011314208030700684, 0.011283424377441406, 0.011313152313232423, 0.011463935852050782, 0.01136511993408203, 0.011350015640258788, 0.011265439987182617, 0.011301471710205077, 0.01125376033782959, 0.011220000267028808, 0.011502880096435547, 0.011349856376647949, 0.011143232345581055, 0.011197504043579101, 0.01113161563873291, 0.011051008224487305, 0.010987520217895508, 0.011072896003723144, 0.011041760444641113, 0.011245535850524903, 0.01109779167175293, 0.01141759967803955, 0.011130751609802246, 0.011116671562194824, 0.011018239974975585, 0.01092147159576416, 0.01092249584197998, 0.010991616249084473, 0.010977279663085938, 0.01174937629699707, 0.010917856216430663, 0.010977472305297852, 0.011040608406066894, 0.011228256225585938, 0.01087782382965088, 0.010894559860229492, 0.010902560234069825, 0.010982751846313477, 0.010631168365478515, 0.010901823997497558, 0.01090732765197754, 0.010943743705749511, 0.0108984956741333, 0.010909279823303223, 0.011007360458374024, 0.01091596794128418, 0.01092240047454834, 0.010933759689331055, 0.010937024116516113, 0.010938367843627929, 0.010884799957275391, 0.010961440086364746, 0.010887136459350586, 0.010940383911132813, 0.010882719993591309, 0.010936863899230957, 0.010922847747802734, 0.010847392082214356, 0.01088918399810791, 0.010908512115478516, 0.01093008041381836, 0.010871199607849122, 0.01087497615814209, 0.01094700813293457, 0.010870783805847169, 0.010878080368041993, 0.010942720413208007, 0.01112332820892334, 0.011145183563232422, 0.010945568084716797, 0.010993760108947754, 0.010940383911132813, 0.01084124755859375, 0.010870559692382813, 0.010843199729919433, 0.010877247810363769, 0.01098582363128662, 0.010887680053710937, 0.010903327941894532, 0.010852543830871583, 0.01090726375579834, 0.01107100772857666, 0.01096339225769043, 0.010901727676391602, 0.010987327575683594, 0.010950847625732422, 0.010962719917297364, 0.010866911888122558, 0.010900799751281737, 0.010955455780029297, 0.010981568336486816, 0.010941408157348632, 0.010953568458557128, 0.010953984260559081, 0.010918911933898925, 0.010862336158752442, 0.010959872245788574, 0.010912768363952637, 0.010883071899414062, 0.010993023872375489, 0.010909472465515137, 0.01070531177520752, 0.01093222427368164, 0.01085961627960205, 0.010967840194702148, 0.010958399772644043, 0.010943039894104004, 0.010923680305480956, 0.011003999710083008, 0.010973183631896973, 0.010957311630249024, 0.01088646411895752, 0.010899904251098633, 0.010909695625305176, 0.010905376434326172, 0.011102432250976563, 0.010953824043273925, 0.011213727951049805, 0.011011072158813476, 0.011025312423706055, 0.011197664260864259, 0.010912639617919922, 0.010905599594116211, 0.010911744117736816, 0.010947872161865234, 0.01094320011138916, 0.010883071899414062, 0.010965056419372558, 0.01088044834136963, 0.010957599639892578, 0.01083516788482666, 0.01084671974182129, 0.010855711936950684, 0.010907903671264648, 0.01086633586883545, 0.01087775993347168, 0.010868736267089844, 0.010853887557983399, 0.010916352272033691, 0.010926079750061036, 0.010889216423034668, 0.010862943649291992, 0.010906496047973634, 0.010881823539733887, 0.010868736267089844, 0.010888287544250488, 0.010898112297058105, 0.010970623970031738, 0.010867424011230468, 0.01095680046081543, 0.010963168144226074, 0.010899328231811523, 0.010886783599853516, 0.010946847915649415, 0.010973024368286132, 0.010975392341613769, 0.010940128326416016, 0.011047231674194335, 0.010993632316589355, 0.010946080207824707, 0.010908127784729004, 0.0109486083984375, 0.010968607902526855, 0.01099830436706543, 0.010599616050720215, 0.010920767784118652, 0.010898591995239258, 0.010935392379760742, 0.010878080368041993, 0.010922368049621583, 0.010895584106445312, 0.010893343925476074, 0.010921983718872071, 0.010897024154663086, 0.010903231620788574, 0.010846624374389649, 0.01089798355102539, 0.010948320388793946, 0.010895487785339355, 0.010905535697937011, 0.010922112464904786, 0.010870431900024414, 0.010922112464904786, 0.010886848449707031, 0.010902112007141113, 0.010934271812438966, 0.011161120414733886, 0.011098496437072754, 0.011073375701904297, 0.011112256050109863, 0.01089964771270752, 0.010889247894287109, 0.010960351943969727, 0.010959360122680664, 0.010903231620788574, 0.01089891242980957, 0.011031392097473145, 0.011035712242126464, 0.01092089557647705, 0.011032575607299805, 0.01096025562286377, 0.010967679977416992, 0.010907072067260742, 0.010965888023376464, 0.01100870418548584, 0.011617280006408692, 0.011037856101989747, 0.010994527816772461, 0.01101414394378662, 0.011210751533508301, 0.010927935600280762, 0.010918239593505859, 0.01100755214691162, 0.010924384117126465, 0.010956543922424317, 0.010925663948059081, 0.010960736274719239, 0.010938400268554688, 0.010875616073608398, 0.010882783889770507, 0.010897695541381836, 0.010864992141723633, 0.01086633586883545, 0.011341823577880859, 0.011188223838806152, 0.011049216270446777, 0.010995455741882324, 0.010621408462524413, 0.010913951873779297, 0.01090675163269043, 0.010969280242919922, 0.010877856254577637, 0.011058688163757324, 0.010909024238586426, 0.010894559860229492, 0.01085001564025879, 0.010909567832946778, 0.011004032135009766, 0.01088646411895752, 0.011141119956970215, 0.011139776229858398, 0.011413503646850585, 0.011288576126098633, 0.010906847953796386, 0.01093507194519043, 0.010935903549194336, 0.010901984214782714, 0.01083795166015625, 0.011068448066711426, 0.010982368469238281, 0.010999808311462403, 0.010856448173522949, 0.010962080001831055, 0.010928128242492676, 0.010895999908447265, 0.010870207786560058, 0.010866815567016602, 0.010872608184814453, 0.010942975997924804, 0.010950816154479981, 0.01090991973876953, 0.010944255828857422, 0.010866944313049316, 0.010908864021301269, 0.011229344367980957, 0.011328160285949707, 0.011515040397644043, 0.01112559986114502, 0.011111968040466309, 0.011038751602172851, 0.011000255584716796, 0.010972607612609863, 0.011057184219360352, 0.011084320068359375, 0.01105510425567627, 0.010913344383239746, 0.01106726360321045, 0.011120415687561036, 0.011074175834655762, 0.011027968406677247, 0.01118393611907959, 0.011068608283996582, 0.011040608406066894, 0.011060864448547363, 0.01117638397216797, 0.011166879653930664, 0.01108460807800293, 0.010946335792541504, 0.010983424186706543, 0.011065312385559082, 0.01065167999267578, 0.011458527565002442, 0.011534720420837403, 0.01194547176361084, 0.0110349760055542, 0.01098793601989746, 0.011132575988769532, 0.01097763156890869, 0.010922752380371093, 0.01089027214050293, 0.010913151741027832, 0.011020799636840821, 0.011034624099731445, 0.010919936180114746, 0.010952639579772949, 0.010934335708618164, 0.010936032295227051, 0.010991904258728027, 0.010944512367248535, 0.010996800422668457, 0.011028736114501954, 0.0109202880859375, 0.010961055755615234, 0.010873023986816405, 0.010847519874572754, 0.010879712104797363, 0.010889216423034668, 0.010883071899414062, 0.010862591743469239, 0.0110632963180542, 0.010893312454223633, 0.01096451187133789, 0.010885055541992188, 0.010890975952148437, 0.01090595245361328, 0.010899328231811523, 0.011047519683837891, 0.010868736267089844, 0.01105510425567627, 0.010870783805847169, 0.011003904342651367, 0.010997759819030761, 0.010979328155517578, 0.010891263961791992, 0.010934271812438966, 0.010908896446228028, 0.010922783851623536, 0.010876928329467773, 0.011372032165527344, 0.011007488250732422, 0.010904576301574707, 0.0109518404006958, 0.01095577621459961, 0.01093619155883789, 0.011083744049072265, 0.010870783805847169, 0.010907967567443848, 0.011014080047607422, 0.010965824127197266, 0.010903552055358886, 0.010900511741638183, 0.01089731216430664, 0.010903807640075683, 0.011006560325622559, 0.011529215812683105, 0.011807488441467286, 0.01100153636932373, 0.011005632400512695, 0.010969632148742676, 0.010897664070129395, 0.010968576431274414, 0.01131760025024414, 0.01099779224395752, 0.0109649600982666, 0.0110448637008667, 0.011008000373840332, 0.010960895538330078, 0.010965312004089355, 0.011120320320129395, 0.011132736206054688, 0.011163488388061523, 0.011067744255065917, 0.011085951805114746, 0.011134847640991211, 0.011165792465209962, 0.011153311729431152, 0.011102208137512207, 0.011257856369018555, 0.01144217586517334, 0.011356160163879395, 0.011829248428344727, 0.01142579174041748, 0.011521344184875488, 0.011831232070922851, 0.011686880111694336, 0.0114334716796875, 0.011475135803222656, 0.011466848373413085, 0.011488320350646973, 0.01147590446472168, 0.011531488418579101, 0.011481216430664062, 0.01147372817993164, 0.011406399726867677, 0.011450143814086914, 0.011420255661010742, 0.011444671630859375, 0.011462271690368652, 0.011448672294616699, 0.01141977596282959, 0.011507264137268066, 0.01146070384979248, 0.011464544296264648, 0.011516287803649902, 0.01143171215057373, 0.011370719909667969, 0.011353856086730956, 0.011387136459350586, 0.011433440208435059, 0.011391584396362304, 0.011425375938415527, 0.011187744140625, 0.011138912200927734, 0.011082719802856446, 0.011118623733520508, 0.011257120132446289, 0.011096351623535157, 0.01122713565826416, 0.011202560424804688, 0.011124735832214355, 0.011175935745239257, 0.0111595516204834, 0.011202688217163087, 0.011333151817321778, 0.0111080961227417, 0.011167327880859374, 0.011115455627441406, 0.01106287956237793, 0.01107196807861328, 0.01128275203704834, 0.011267423629760742, 0.011325407981872558, 0.011380767822265625, 0.011338047981262207, 0.011216192245483399, 0.011186911582946777, 0.011184224128723145, 0.011177887916564941, 0.011187968254089355, 0.011104063987731934, 0.011149279594421387, 0.011177696228027344, 0.011076352119445802, 0.010991616249084473, 0.011036640167236328, 0.011081119537353516, 0.011096159934997558, 0.011039456367492676, 0.011046624183654786, 0.011063520431518554, 0.011202688217163087, 0.0110316162109375, 0.010912447929382325, 0.010936032295227051, 0.011004287719726563, 0.011042207717895507, 0.010918463706970215, 0.011079615592956542, 0.011106016159057618, 0.010998368263244629, 0.010931360244750976, 0.010916255950927734, 0.010969120025634766, 0.01095206356048584, 0.010926943778991699, 0.011001728057861327, 0.01106707191467285, 0.01112451171875, 0.011153023719787597, 0.011242400169372559, 0.011333503723144532, 0.01147539234161377, 0.011331520080566405, 0.011359999656677246, 0.011300864219665528, 0.011340864181518555, 0.011377471923828125, 0.011384767532348633, 0.011335455894470214, 0.011108127593994141, 0.011356351852416992, 0.011357248306274414, 0.011477984428405762, 0.0114585599899292, 0.011447711944580078, 0.01142745590209961, 0.0113919677734375, 0.011333760261535644, 0.011433631896972656, 0.011418848037719727, 0.01151692771911621, 0.011378368377685548, 0.011404704093933106, 0.011410304069519043, 0.01139408016204834, 0.011354432106018067, 0.01130726432800293, 0.011499199867248535, 0.0114204158782959, 0.01140121555328369, 0.011400256156921387, 0.011487968444824219, 0.011493599891662598, 0.011449343681335449, 0.011492351531982421, 0.011613183975219727, 0.011528544425964356, 0.011502240180969238, 0.011568672180175781, 0.011513664245605468, 0.011636544227600097, 0.011408415794372559, 0.011347776412963867, 0.011243519783020019, 0.011124704360961914, 0.011375871658325196, 0.011282464027404786, 0.01109068775177002, 0.011150783538818359, 0.011112319946289062, 0.01115817642211914, 0.011179936408996583, 0.011063424110412598, 0.011059200286865235, 0.010996064186096192, 0.01095849609375, 0.01098259162902832, 0.010965824127197266, 0.010991328239440918, 0.011022624015808105, 0.01091932773590088, 0.010974911689758301, 0.010963935852050782, 0.010876864433288573, 0.01118342399597168, 0.01107040023803711, 0.011060992240905761, 0.010948320388793946, 0.011632927894592285, 0.010989151954650878, 0.010911775588989257, 0.01095299243927002, 0.010725631713867187, 0.010949824333190918, 0.01104364776611328, 0.010981696128845215, 0.010987199783325196, 0.010901632308959961, 0.010994591712951661, 0.011049535751342774, 0.011139295578002929, 0.011245247840881347, 0.011069791793823242, 0.011098272323608398, 0.011105536460876465, 0.011212800025939941, 0.011236096382141113, 0.011079520225524902, 0.011028639793395996, 0.011058591842651367, 0.011103967666625976, 0.011188223838806152, 0.011153599739074708, 0.011397824287414551, 0.011188384056091308, 0.011191583633422852, 0.011072064399719238, 0.011114687919616699, 0.011087679862976074, 0.011057151794433593, 0.010991744041442871, 0.011060992240905761, 0.011071104049682617, 0.01109062385559082, 0.011063167572021484, 0.011075136184692382, 0.011039104461669922, 0.010958847999572753, 0.01100595188140869, 0.01102847957611084, 0.011034048080444336, 0.0109552001953125, 0.010925439834594727, 0.011082528114318847, 0.011024479866027831, 0.01094643211364746, 0.010913791656494141, 0.010971487998962403, 0.01098089599609375, 0.01091977596282959, 0.01092131233215332, 0.011066304206848145, 0.010970720291137695, 0.010968992233276367, 0.01093887996673584, 0.010901215553283692, 0.010886560440063477, 0.010857343673706055, 0.010903552055358886, 0.010893088340759277, 0.010905247688293457, 0.010869312286376952, 0.010919936180114746, 0.010964127540588378, 0.010898271560668945]",tokens/s,90.16803581702818,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.249792,813.563904,0.0,411.041792,391.374848,s,1,7.228544921875,7.228544921875,0.0,7.228544921875,7.228544921875,7.228544921875,7.228544921875,[7.228544921875],,kWh,4.339828587467309e-06,4.716263721967338e-07,9.063896139888117e-07,5.717844573652854e-06,,MB,1344.733184,889.061376,0.0,473.956352,454.832128,s,17,0.17702256107330322,0.010413091827841366,9.311530951650476e-05,0.010417183876037598,0.010512742614746093,0.010544077110290527,0.01062941707611084,"[0.010327296257019043, 0.010453184127807618, 0.01048259162902832, 0.010373344421386719, 0.01044863986968994, 0.010255999565124511, 0.010384160041809082, 0.01036524772644043, 0.01031283187866211, 0.01029971218109131, 0.010509632110595702, 0.010439616203308105, 0.010362560272216798, 0.01042240047454834, 0.010417183876037598, 0.010650752067565918, 0.01051740837097168]",tokens/s,24584.43699838848,kWh,2.962083987309926e-07,3.2666701513307126e-08,1.7150803342741917e-07,5.003831336717189e-07,tokens/kWh,511607971.51878273,MB,1377.81248,916.324352,0.0,501.219328,454.834688,s,17,10.322815124511717,0.6072244190889245,0.006037663715614185,0.6075043334960938,0.614862841796875,0.6166523559570312,0.6172914575195312,"[0.6059036254882812, 0.6099108276367188, 0.613802978515625, 0.6127144775390625, 0.5969122314453125, 0.6022156372070312, 0.6036224365234375, 0.5999121704101562, 0.5970960693359375, 0.6045029296875, 0.6077559814453125, 0.605859619140625, 0.6075043334960938, 0.6078861083984375, 0.61645263671875, 0.6174512329101562, 0.6133118286132813]",tokens/s,103.75076828188955,kWh,1.656038897997227e-05,1.8263270678938328e-06,6.781120985461139e-06,2.5167837033327244e-05,tokens/kWh,2503194.8481140994,,s,1071,10.312918652534469,0.009629242439341256,0.0002006644959957485,0.00961900806427002,0.009798303604125976,0.009898288249969482,0.010411151885986326,"[0.009303872108459472, 0.009488608360290527, 0.009494496345520019, 0.009552000045776367, 0.009484416007995605, 0.009475520133972168, 0.009521663665771484, 0.009408320426940918, 0.009602047920227052, 0.009467103958129882, 0.009490495681762695, 0.009528863906860351, 0.00958892822265625, 0.009512928009033203, 0.00948799991607666, 0.009496671676635742, 0.00948624038696289, 0.009374112129211425, 0.009357312202453612, 0.009487456321716308, 0.009484895706176758, 0.00945529556274414, 0.009554240226745605, 0.009521471977233886, 0.009559200286865235, 0.009603360176086425, 0.009548128128051758, 0.01011734390258789, 0.009598976135253906, 0.009588735580444336, 0.009512031555175781, 0.009534367561340332, 0.00984067153930664, 0.009752544403076172, 0.009607168197631836, 0.009547455787658692, 0.00962934398651123, 0.010065919876098632, 0.009695903778076172, 0.009687040328979492, 0.009638208389282227, 0.009587455749511718, 0.009653087615966796, 0.009588831901550294, 0.009705727577209472, 0.009793536186218262, 0.009831551551818848, 0.009749247550964355, 0.009942527770996093, 0.009652607917785645, 0.009727968215942382, 0.00967683219909668, 0.009633312225341797, 0.009687552452087403, 0.009734047889709472, 0.009609279632568359, 0.009569631576538087, 0.009585311889648437, 0.009692383766174316, 0.009634495735168457, 0.009754752159118652, 0.009652192115783692, 0.009695232391357422, 0.009491999626159668, 0.009887519836425782, 0.009785728454589844, 0.0096046724319458, 0.009611871719360352, 0.009641823768615722, 0.00960921573638916, 0.00969647979736328, 0.009617280006408692, 0.009610143661499024, 0.009719807624816895, 0.009646080017089843, 0.009815872192382813, 0.009709759712219239, 0.009668767929077149, 0.009617247581481934, 0.009745984077453614, 0.009693632125854492, 0.009617216110229492, 0.009677248001098633, 0.009618687629699706, 0.009725695610046386, 0.009652992248535157, 0.009858655929565429, 0.009731807708740235, 0.009632543563842773, 0.009696288108825684, 0.009593728065490723, 0.009584575653076173, 0.009685279846191407, 0.00961513614654541, 0.009803775787353516, 0.009695232391357422, 0.00991436767578125, 0.009721856117248535, 0.00963379192352295, 0.009639936447143555, 0.009588735580444336, 0.009765952110290528, 0.009678879737854003, 0.009882528305053711, 0.009561087608337402, 0.009634816169738769, 0.009641983985900878, 0.009627360343933106, 0.009551168441772461, 0.009565152168273926, 0.010145792007446289, 0.009538944244384766, 0.009538240432739258, 0.009610272407531739, 0.009500831604003905, 0.00946662425994873, 0.010143232345581055, 0.00956486415863037, 0.009623359680175782, 0.009537599563598632, 0.009635775566101075, 0.009576448440551758, 0.009520447731018067, 0.009785568237304688, 0.009632255554199219, 0.009729855537414551, 0.009477472305297852, 0.00960374355316162, 0.009641983985900878, 0.009504704475402833, 0.009549407958984376, 0.009556447982788086, 0.009621472358703613, 0.00956761646270752, 0.009798303604125976, 0.010055839538574219, 0.009738080024719238, 0.009885343551635742, 0.009752287864685058, 0.009670495986938477, 0.00973465633392334, 0.009684864044189454, 0.009800095558166504, 0.009678848266601562, 0.00966857624053955, 0.009868703842163085, 0.009757311820983886, 0.009832159996032715, 0.009660703659057617, 0.009819295883178711, 0.009803936004638672, 0.009916768074035645, 0.00975712013244629, 0.009764320373535156, 0.00978377628326416, 0.009801471710205078, 0.009742560386657715, 0.009731103897094726, 0.009953408241271973, 0.009769824028015137, 0.009820159912109374, 0.009725728034973145, 0.009813376426696777, 0.00973846435546875, 0.009715616226196289, 0.009831135749816894, 0.009817824363708497, 0.009834783554077148, 0.009719103813171386, 0.00975692844390869, 0.009687711715698242, 0.009689087867736817, 0.009754400253295898, 0.009648127555847168, 0.009717791557312012, 0.009709024429321289, 0.009705216407775878, 0.009700096130371094, 0.009678720474243164, 0.009699392318725585, 0.00961894416809082, 0.009665087699890136, 0.00970751953125, 0.009744383811950684, 0.009842271804809571, 0.009752991676330567, 0.009889311790466308, 0.009629471778869629, 0.00967091178894043, 0.009571711540222167, 0.009601152420043946, 0.009686816215515136, 0.009710016250610352, 0.009681535720825194, 0.009622655868530273, 0.009732928276062012, 0.0097259521484375, 0.009750528335571289, 0.00972390365600586, 0.00968511962890625, 0.009711487770080567, 0.009654272079467773, 0.009620863914489746, 0.009658368110656738, 0.009845376014709473, 0.009919872283935547, 0.010321632385253906, 0.010047679901123046, 0.009689824104309082, 0.009756671905517577, 0.009664159774780273, 0.009727904319763184, 0.009671104431152345, 0.009750207901000977, 0.00958291244506836, 0.009555968284606933, 0.00975222396850586, 0.009533791542053223, 0.009609408378601074, 0.009662272453308106, 0.00962342357635498, 0.009594176292419434, 0.00963257598876953, 0.009518752098083496, 0.009550175666809081, 0.009526816368103028, 0.009643839836120606, 0.01044486427307129, 0.009809856414794921, 0.009648256301879883, 0.009588895797729492, 0.009642271995544434, 0.009578783988952637, 0.009623488426208497, 0.009629216194152832, 0.009648480415344239, 0.009524383544921876, 0.00975692844390869, 0.010353119850158691, 0.010485600471496583, 0.010536959648132324, 0.009664223670959473, 0.009561823844909669, 0.009642560005187989, 0.009677087783813477, 0.009547776222229003, 0.009531392097473144, 0.009626879692077636, 0.009580960273742676, 0.009582528114318848, 0.00956441593170166, 0.009570464134216309, 0.009439231872558594, 0.00966220760345459, 0.009654080390930176, 0.009710111618041993, 0.009649824142456055, 0.009533696174621582, 0.00943513584136963, 0.009385055541992187, 0.009448639869689942, 0.009419839859008789, 0.009396896362304688, 0.00946726417541504, 0.009405055999755859, 0.009405983924865722, 0.009425375938415527, 0.00941427230834961, 0.009613696098327637, 0.009395199775695801, 0.009517215728759765, 0.009374431610107422, 0.009314432144165039, 0.009311840057373047, 0.009392831802368164, 0.009317119598388672, 0.009331904411315917, 0.009395520210266113, 0.009397791862487794, 0.009395392417907714, 0.009381664276123048, 0.009326560020446777, 0.009277440071105958, 0.00930851173400879, 0.009318304061889649, 0.009408255577087403, 0.009375743865966797, 0.009315711975097656, 0.009321120262145996, 0.009317888259887695, 0.009314784049987793, 0.00930611228942871, 0.009371552467346191, 0.009314592361450196, 0.009305439949035644, 0.00938646411895752, 0.00932863998413086, 0.009291808128356933, 0.009525216102600098, 0.00940777587890625, 0.009409215927124024, 0.009322527885437012, 0.009259008407592773, 0.009323807716369628, 0.009345888137817383, 0.009377280235290527, 0.00936793613433838, 0.009356767654418945, 0.00927295970916748, 0.010396703720092774, 0.010055551528930664, 0.01028611183166504, 0.009947135925292968, 0.010670944213867188, 0.00945580768585205, 0.009176447868347168, 0.009446016311645508, 0.009506815910339356, 0.009588735580444336, 0.0097259521484375, 0.009418656349182129, 0.009410655975341797, 0.009401439666748047, 0.009391008377075195, 0.009412351608276368, 0.009473695755004882, 0.009385727882385253, 0.00938646411895752, 0.00935155200958252, 0.009348192214965821, 0.00942585563659668, 0.00947606372833252, 0.009506367683410644, 0.009422400474548339, 0.00949295997619629, 0.009439711570739745, 0.009396160125732422, 0.009425951957702636, 0.009484288215637206, 0.00946025562286377, 0.009437631607055665, 0.009702431678771972, 0.009859295845031738, 0.009561023712158202, 0.00945248031616211, 0.009512160301208496, 0.009616448402404785, 0.00953609561920166, 0.009578335762023925, 0.009498271942138672, 0.009503232002258302, 0.009545663833618163, 0.009485407829284668, 0.009510111808776856, 0.009549535751342774, 0.009492192268371582, 0.009626239776611327, 0.009451199531555175, 0.009476096153259277, 0.009615360260009765, 0.009558015823364258, 0.00976467227935791, 0.009795231819152832, 0.009842656135559081, 0.009683008193969727, 0.00971827220916748, 0.009764543533325195, 0.00964236831665039, 0.00962992000579834, 0.00961030387878418, 0.009679231643676758, 0.009724224090576172, 0.009926624298095703, 0.009670656204223632, 0.00955951976776123, 0.009630240440368653, 0.009675935745239258, 0.009694047927856445, 0.009421183586120605, 0.009670880317687989, 0.009779520034790039, 0.009789440155029297, 0.009777503967285156, 0.00969257640838623, 0.009613568305969239, 0.009617183685302734, 0.009637311935424804, 0.009593631744384766, 0.009689087867736817, 0.009635135650634766, 0.009699168205261231, 0.00955628776550293, 0.009532256126403809, 0.009582271575927734, 0.009514047622680663, 0.009585247993469239, 0.009517120361328126, 0.009488672256469726, 0.009531519889831542, 0.009727871894836426, 0.010027008056640625, 0.009644031524658203, 0.009557439804077148, 0.00957868766784668, 0.009591168403625488, 0.009760800361633301, 0.009441247940063477, 0.0094203519821167, 0.009552191734313965, 0.009541760444641114, 0.00960307216644287, 0.009529343605041504, 0.00952950382232666, 0.009703519821166993, 0.00968614387512207, 0.009740639686584472, 0.009648415565490722, 0.009556256294250488, 0.009574144363403321, 0.00948630428314209, 0.009463680267333985, 0.009396448135375976, 0.009398176193237304, 0.009413760185241699, 0.009365983963012695, 0.009354751586914062, 0.009457856178283692, 0.009425215721130371, 0.00942905616760254, 0.009630047798156738, 0.009500672340393066, 0.009416704177856445, 0.009411744117736816, 0.009525504112243652, 0.009644639968872071, 0.009629695892333985, 0.00970137596130371, 0.009546015739440918, 0.009541407585144043, 0.009545536041259765, 0.009457311630249023, 0.009253439903259277, 0.009517312049865723, 0.009546624183654786, 0.009562848091125488, 0.009926272392272948, 0.009724479675292969, 0.009584704399108886, 0.009560064315795898, 0.009489855766296386, 0.009560640335083009, 0.009537407875061035, 0.0094967041015625, 0.009553983688354492, 0.009531328201293945, 0.009555232048034669, 0.009462495803833008, 0.009753631591796876, 0.009761759757995606, 0.009449472427368164, 0.00960921573638916, 0.009550975799560548, 0.009412927627563477, 0.009558591842651367, 0.009504384040832519, 0.009424768447875976, 0.009503232002258302, 0.009566304206848144, 0.009676511764526368, 0.009498815536499023, 0.009455679893493652, 0.009490367889404296, 0.009392416000366211, 0.009374624252319335, 0.009577280044555663, 0.009355263710021973, 0.009432671546936035, 0.009481632232666015, 0.009393088340759278, 0.009408576011657715, 0.009394335746765136, 0.009373184204101562, 0.009494144439697266, 0.009436896324157715, 0.009396639823913575, 0.009493087768554688, 0.009459456443786622, 0.00958899211883545, 0.009578111648559571, 0.009488415718078614, 0.009502047538757325, 0.009427264213562011, 0.009584511756896973, 0.009488960266113282, 0.009412256240844727, 0.009461983680725098, 0.009531776428222656, 0.009601280212402344, 0.009651776313781738, 0.00948243236541748, 0.009469951629638672, 0.009511263847351074, 0.009487263679504395, 0.0095480318069458, 0.009261311531066895, 0.009431039810180664, 0.009433088302612304, 0.009508128166198731, 0.009446111679077149, 0.009385631561279298, 0.009341183662414551, 0.009323616027832032, 0.009372672080993653, 0.009453375816345215, 0.00936569595336914, 0.009395872116088867, 0.009530048370361328, 0.009399456024169922, 0.00937168025970459, 0.009406240463256835, 0.009481120109558105, 0.009479007720947265, 0.009468799591064453, 0.009424736022949218, 0.009473952293395996, 0.009568575859069825, 0.009564160346984863, 0.009431039810180664, 0.009412128448486328, 0.009480671882629394, 0.009399616241455078, 0.009407168388366699, 0.00942080020904541, 0.00935321617126465, 0.009412256240844727, 0.009545920372009277, 0.009410655975341797, 0.009347295761108399, 0.009410400390625, 0.009426912307739259, 0.009526975631713867, 0.009611616134643555, 0.009576479911804199, 0.009582559585571289, 0.009422335624694824, 0.009422592163085938, 0.00949465560913086, 0.009431103706359863, 0.009583104133605956, 0.009486175537109376, 0.009867487907409668, 0.009721920013427735, 0.009533375740051269, 0.009540767669677734, 0.009448543548583984, 0.009519071578979492, 0.009592608451843261, 0.009500543594360351, 0.009487744331359863, 0.009504863739013672, 0.00946448040008545, 0.009485695838928223, 0.00958902359008789, 0.009396575927734374, 0.009482144355773926, 0.009463520050048828, 0.009439616203308106, 0.009318911552429199, 0.009461248397827148, 0.00943727970123291, 0.009544320106506347, 0.009613023757934571, 0.009684991836547852, 0.009600255966186523, 0.009607359886169434, 0.009605695724487304, 0.009633983612060547, 0.009711008071899414, 0.00955840015411377, 0.009672575950622559, 0.009592415809631348, 0.009564512252807618, 0.009650431632995605, 0.009672672271728515, 0.009604960441589356, 0.009666720390319824, 0.009687040328979492, 0.009543007850646972, 0.009669280052185059, 0.009560064315795898, 0.009603039741516113, 0.009640000343322753, 0.00960099220275879, 0.00956169605255127, 0.009550432205200195, 0.0095927677154541, 0.009609248161315918, 0.009530976295471191, 0.00946611213684082, 0.009467967987060547, 0.009587871551513671, 0.00951529598236084, 0.009454079627990723, 0.009437408447265625, 0.009392224311828613, 0.009501824378967285, 0.009597503662109375, 0.00956230354309082, 0.009583680152893067, 0.009488191604614258, 0.009599552154541016, 0.00966489601135254, 0.009494400024414063, 0.009557600021362305, 0.00956879997253418, 0.009601152420043946, 0.009604991912841797, 0.009664640426635743, 0.009633664131164552, 0.009672639846801757, 0.009628735542297363, 0.009628095626831056, 0.009531968116760254, 0.009627360343933106, 0.009697792053222656, 0.009719584465026855, 0.009587807655334473, 0.009593376159667969, 0.009778783798217774, 0.009654656410217285, 0.009349408149719239, 0.009581855773925781, 0.009609567642211913, 0.009664159774780273, 0.009562111854553223, 0.009587424278259277, 0.009682111740112305, 0.009659199714660645, 0.009654272079467773, 0.009552096366882324, 0.009598464012145995, 0.009636223793029785, 0.009572223663330078, 0.009662272453308106, 0.009648351669311524, 0.00964406394958496, 0.00964844799041748, 0.009545375823974609, 0.009566207885742188, 0.009619359970092773, 0.009598688125610351, 0.009775487899780274, 0.009626784324645997, 0.01000864028930664, 0.009866175651550292, 0.009725728034973145, 0.00961900806427002, 0.009601535797119141, 0.009745920181274414, 0.009668224334716797, 0.00962019157409668, 0.009564319610595702, 0.009527296066284179, 0.009607168197631836, 0.009573920249938965, 0.009555520057678223, 0.009556639671325683, 0.009513216018676758, 0.00958198356628418, 0.00968172836303711, 0.009776127815246583, 0.009526047706604004, 0.009545727729797364, 0.009715231895446777, 0.009678879737854003, 0.009622079849243164, 0.009586560249328612, 0.00951852798461914, 0.009632320404052734, 0.009661855697631836, 0.009740096092224121, 0.00990835189819336, 0.009720479965209961, 0.009572416305541993, 0.009643679618835449, 0.009778495788574218, 0.009580608367919921, 0.009591360092163086, 0.009646431922912597, 0.00962559986114502, 0.009683103561401367, 0.009645919799804687, 0.009699423789978028, 0.0095382080078125, 0.009627264022827148, 0.010127743721008301, 0.009733759880065918, 0.009629695892333985, 0.009683327674865723, 0.009667840003967285, 0.00963046360015869, 0.009634079933166504, 0.009692640304565429, 0.009636096000671387, 0.009650176048278808, 0.009598976135253906, 0.009642047882080078, 0.009688544273376464, 0.009693663597106933, 0.009781248092651367, 0.009655839920043945, 0.009728480339050292, 0.00966652774810791, 0.009668064117431641, 0.00975881576538086, 0.009638591766357422, 0.00972812843322754, 0.009594528198242188, 0.009611583709716797, 0.00966419219970703, 0.009575615882873536, 0.009652671813964843, 0.009606592178344727, 0.009581503868103027, 0.009576736450195312, 0.00963145637512207, 0.009612735748291015, 0.009617504119873046, 0.009549983978271484, 0.009627967834472657, 0.00987667179107666, 0.009524031639099121, 0.009482239723205567, 0.009498623847961426, 0.009551008224487305, 0.009487520217895508, 0.009511712074279786, 0.009431103706359863, 0.009425408363342285, 0.009530752182006837, 0.009604063987731934, 0.009727999687194825, 0.00956208038330078, 0.009490816116333008, 0.009479840278625489, 0.009498335838317872, 0.009565535545349122, 0.009585599899291992, 0.00953929615020752, 0.009566495895385742, 0.009515007972717286, 0.009479392051696777, 0.00962230396270752, 0.009489664077758789, 0.009427712440490723, 0.009453568458557129, 0.009482175827026366, 0.009516544342041015, 0.009504608154296875, 0.00952188777923584, 0.009443327903747559, 0.009436415672302247, 0.009374400138854981, 0.009345120429992675, 0.009457632064819336, 0.009371135711669922, 0.009398847579956056, 0.009331680297851563, 0.009383071899414062, 0.009434399604797363, 0.009341471672058106, 0.009314240455627442, 0.010098688125610352, 0.01158681583404541, 0.011430624008178712, 0.010174752235412598, 0.009592639923095702, 0.009564160346984863, 0.009471072196960448, 0.009475104331970215, 0.009355135917663574, 0.0093820161819458, 0.009734016418457031, 0.009911487579345703, 0.010739551544189454, 0.009422880172729492, 0.009685952186584473, 0.009479776382446289, 0.009429408073425292, 0.009441280364990234, 0.009459712028503419, 0.009408576011657715, 0.009562047958374023, 0.009502400398254394, 0.009533984184265137, 0.009588512420654297, 0.009498623847961426, 0.009614848136901855, 0.009671168327331543, 0.009642111778259277, 0.009897312164306641, 0.009836799621582031, 0.009482527732849121, 0.00958579158782959, 0.009705951690673828, 0.009526080131530761, 0.009528927803039551, 0.009537535667419434, 0.009469951629638672, 0.009523360252380372, 0.00948624038696289, 0.009488320350646973, 0.009785599708557129, 0.009728927612304688, 0.009761631965637207, 0.009534751892089844, 0.009570112228393554, 0.009669535636901856, 0.009676799774169922, 0.009400768280029297, 0.009700032234191894, 0.009634783744812012, 0.009678879737854003, 0.009576383590698243, 0.009624064445495606, 0.009637632369995118, 0.009578432083129883, 0.009664447784423828, 0.00962816047668457, 0.009748576164245605, 0.009712703704833984, 0.00957753562927246, 0.009673760414123536, 0.00960540771484375, 0.009632320404052734, 0.00956816005706787, 0.00975692844390869, 0.009640064239501953, 0.009592255592346192, 0.009636128425598144, 0.009612319946289062, 0.00957539176940918, 0.009775103569030762, 0.009632991790771485, 0.009622143745422364, 0.009547967910766602, 0.009621472358703613, 0.009631744384765625, 0.009584511756896973, 0.009672863960266113, 0.009635168075561524, 0.009679488182067871, 0.009658368110656738, 0.009598719596862792, 0.009793791770935058, 0.009648127555847168, 0.009650176048278808, 0.009652223587036133, 0.009615360260009765, 0.009600352287292481, 0.010057472229003906, 0.009848928451538086, 0.009667743682861328, 0.009823904037475586, 0.009588735580444336, 0.009613568305969239, 0.009677632331848145, 0.009632703781127929, 0.009649696350097657, 0.009499103546142577, 0.0095447359085083, 0.009630687713623047, 0.009641983985900878, 0.009625472068786622, 0.009519519805908204, 0.009590496063232421, 0.009638943672180177, 0.0096627197265625, 0.009613056182861329, 0.009489376068115235, 0.0096212797164917, 0.00961731243133545, 0.009384384155273437, 0.009540863990783691, 0.00974828815460205, 0.009573216438293458, 0.009561727523803711, 0.009509056091308594, 0.009541407585144043, 0.010455007553100587, 0.009554176330566407, 0.010824959754943847, 0.00978172779083252, 0.009719200134277343, 0.009652992248535157, 0.009701151847839356, 0.00965283203125, 0.00981760025024414, 0.009937151908874512, 0.009664544105529785, 0.009611231803894043, 0.009594207763671874, 0.009650848388671874, 0.00961945629119873, 0.00974028778076172, 0.00988099193572998, 0.009708127975463866, 0.009818112373352051, 0.009899264335632324, 0.009739007949829102, 0.009766816139221191, 0.00977235221862793, 0.009623807907104492, 0.009718303680419921, 0.0109486083984375, 0.009735520362854005, 0.00973305606842041, 0.009739520072937012, 0.009679488182067871, 0.009827360153198243, 0.00967148780822754, 0.009680864334106445, 0.009772735595703125, 0.00967516803741455, 0.009793472290039063, 0.009712672233581543, 0.00969212818145752, 0.00976857566833496, 0.009723648071289063, 0.010463744163513184, 0.009772959709167481, 0.009768351554870606, 0.009724127769470214, 0.00973680019378662, 0.010067935943603515, 0.009824224472045898, 0.009764703750610351, 0.009760383605957032, 0.009690943717956544, 0.009689408302307129, 0.009863391876220703, 0.009623776435852051, 0.009725407600402832, 0.0096278076171875, 0.009818431854248046, 0.009780608177185058, 0.00971225643157959, 0.009971712112426758, 0.00982572841644287, 0.009883711814880371, 0.009675264358520508, 0.009772159576416016, 0.009748448371887207, 0.009812895774841308, 0.010133503913879394, 0.009800736427307128, 0.009720800399780273, 0.009759872436523438, 0.009839424133300781, 0.00976467227935791, 0.009746687889099121, 0.010003840446472168, 0.010002176284790039, 0.00973027229309082, 0.009808544158935548, 0.009719296455383301, 0.009728511810302735, 0.009797087669372559, 0.009718303680419921, 0.009672703742980958, 0.009734111785888672, 0.009730079650878905, 0.009652000427246094, 0.009685215950012207, 0.010210975646972657, 0.009763104438781738, 0.009889856338500976, 0.009741408348083496, 0.009709952354431152, 0.009721952438354492, 0.009695039749145509, 0.009714207649230956, 0.009695551872253417, 0.009737119674682618, 0.00964083194732666, 0.009959487915039063, 0.009738176345825196, 0.009744383811950684, 0.009676799774169922, 0.009764863967895507, 0.0097259521484375, 0.009691264152526856, 0.009723744392395019, 0.009697600364685059, 0.009766624450683594, 0.009785344123840332, 0.009969663619995118, 0.009772192001342774, 0.009769951820373535, 0.010101663589477538, 0.010306367874145508, 0.009668800354003906, 0.009578559875488281, 0.009771039962768555, 0.009934880256652833, 0.009791359901428223, 0.00968841552734375, 0.009783935546875, 0.00956278419494629, 0.009597984313964844, 0.009726943969726563, 0.009789440155029297, 0.009710687637329102, 0.009759743690490723, 0.009647456169128418, 0.009656928062438964, 0.009721823692321777, 0.009727328300476075, 0.009635744094848632, 0.009642304420471192, 0.009805888175964355, 0.00961315155029297, 0.009677023887634277, 0.009615551948547364, 0.009753984451293945, 0.009628416061401367, 0.00963535976409912, 0.00967728042602539, 0.00971571159362793, 0.009805824279785156, 0.009695232391357422, 0.009831711769104004, 0.009757408142089844, 0.009684896469116211, 0.00961734390258789, 0.009620991706848145, 0.009684991836547852, 0.009718432426452636, 0.009756671905517577, 0.009776255607604981, 0.009628543853759766, 0.009658368110656738, 0.009605119705200196, 0.009736191749572755, 0.009678848266601562, 0.009661727905273438, 0.00976137638092041, 0.009705087661743165, 0.009739007949829102, 0.009703167915344238, 0.009731743812561035, 0.00969059181213379, 0.009849504470825195, 0.009893759727478028, 0.009710944175720215, 0.009685919761657715, 0.009670656204223632, 0.009948384284973145, 0.009720671653747558, 0.009773088455200194, 0.009791487693786622, 0.009762304306030273, 0.00970304012298584, 0.009656224250793457, 0.010111776351928711, 0.009990240097045898, 0.009867199897766113, 0.00971996784210205, 0.009743647575378418, 0.009755359649658204, 0.009808992385864258]",tokens/s,103.85032948328288,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.204736,1333.6576,0.0,931.135488,917.648384,s,1,7.32265869140625,7.32265869140625,0.0,7.32265869140625,7.32265869140625,7.32265869140625,7.32265869140625,[7.32265869140625],,kWh,5.41717052920679e-06,5.903661378679718e-07,1.9183348680118506e-06,7.925871535086612e-06,,MB,1333.993472,1467.875328,0.0,1050.673152,1018.330112,s,10,0.24341530036926268,0.024341530036926268,0.0002706625480827537,0.0242399206161499,0.024653299713134763,0.024785145950317382,0.024890622940063476,"[0.024239776611328125, 0.024183456420898437, 0.024099008560180664, 0.02407644844055176, 0.024624000549316405, 0.024348703384399414, 0.02461440086364746, 0.0249169921875, 0.02407244873046875, 0.02424006462097168]",tokens/s,10517.005283219512,kWh,7.146740239100448e-07,7.88157113541851e-08,4.7294862611735186e-07,1.266438361381582e-06,tokens/kWh,202141697.3825119,MB,1373.007872,1484.652544,0.0,1067.450368,1032.767488,s,10,13.970494995117189,1.3970494995117186,0.021547635118359953,1.3995083618164061,1.4239273315429688,1.4288072937011718,1.4327112634277344,"[1.404370361328125, 1.372952880859375, 1.3702789306640626, 1.39901904296875, 1.3999976806640626, 1.41591357421875, 1.433687255859375, 1.4228428955078125, 1.3740086669921876, 1.3774237060546874]",tokens/s,45.09503780790807,kWh,4.1074298936089776e-05,4.530078558407494e-06,1.707314874408228e-05,6.267752623857955e-05,tokens/kWh,1005144.9663184371,,s,630,13.964617408752442,0.02216605937897213,0.0005317623545357554,0.022151216506958008,0.022710658836364746,0.022829525566101076,0.023482625942230228,"[0.022708223342895507, 0.023187456130981447, 0.022089632034301757, 0.022209888458251954, 0.022569919586181642, 0.022121759414672853, 0.02208412742614746, 0.0219238395690918, 0.022105823516845702, 0.022227392196655274, 0.02241702461242676, 0.022597856521606445, 0.02261305618286133, 0.022631359100341798, 0.022765567779541016, 0.022603776931762694, 0.022541791915893554, 0.022958208084106445, 0.02255504035949707, 0.022611967086791994, 0.022620447158813478, 0.022810335159301757, 0.022789663314819335, 0.022673471450805664, 0.02276563262939453, 0.022770015716552735, 0.02277110481262207, 0.022653696060180663, 0.022644575119018556, 0.022685855865478517, 0.022764671325683595, 0.02261222457885742, 0.022436159133911133, 0.022456480026245118, 0.022617408752441406, 0.022606367111206054, 0.02228236770629883, 0.022134431838989256, 0.022149728775024413, 0.022418399810791016, 0.02212124824523926, 0.02220854377746582, 0.02209548759460449, 0.02196518325805664, 0.02192300796508789, 0.021766143798828123, 0.021649791717529298, 0.02177084732055664, 0.021732704162597656, 0.021735872268676758, 0.021653568267822266, 0.021636831283569337, 0.021645599365234375, 0.021635072708129883, 0.021707904815673827, 0.021839807510375977, 0.021683135986328126, 0.021823488235473632, 0.021618688583374023, 0.02184806442260742, 0.02281260871887207, 0.021794879913330078, 0.022016000747680665, 0.02147532844543457, 0.022040416717529297, 0.02213699150085449, 0.021933696746826173, 0.02204470443725586, 0.02206572723388672, 0.022015775680541992, 0.02200115203857422, 0.02197983932495117, 0.021933439254760743, 0.021972543716430665, 0.021902463912963868, 0.022039295196533203, 0.021949440002441405, 0.022261920928955077, 0.022188192367553712, 0.022722240447998046, 0.022398880004882812, 0.022219903945922853, 0.022119392395019533, 0.022046720504760742, 0.02186854362487793, 0.021658912658691406, 0.02161942481994629, 0.02162483215332031, 0.02153267288208008, 0.021550464630126952, 0.021655296325683592, 0.021572479248046873, 0.022160608291625975, 0.021957408905029296, 0.021731327056884766, 0.02164735984802246, 0.02170217514038086, 0.021579519271850586, 0.02175984001159668, 0.021661983489990235, 0.021566272735595703, 0.021567264556884767, 0.021470912933349608, 0.021754112243652344, 0.021493824005126953, 0.02168544006347656, 0.02163408088684082, 0.021595935821533203, 0.021622016906738283, 0.021624799728393554, 0.02158255958557129, 0.021632287979125978, 0.02155913543701172, 0.021705663681030274, 0.02164735984802246, 0.02160825538635254, 0.021656896591186522, 0.02163596725463867, 0.02166579246520996, 0.021574752807617188, 0.021598400115966795, 0.02156003189086914, 0.021575231552124024, 0.021584064483642577, 0.021565696716308595, 0.02154857635498047, 0.021227455139160155, 0.021515552520751952, 0.021721887588500976, 0.021718624114990235, 0.021582239151000975, 0.021540864944458008, 0.021506048202514647, 0.02148761558532715, 0.021553152084350585, 0.02152038383483887, 0.021671552658081055, 0.021543296813964843, 0.021587488174438476, 0.021588447570800782, 0.021569055557250978, 0.021545024871826173, 0.02160620880126953, 0.021662303924560547, 0.021827583312988282, 0.021712928771972655, 0.02155926322937012, 0.021671232223510743, 0.021568096160888672, 0.021534528732299805, 0.021647647857666017, 0.02159775924682617, 0.02179862403869629, 0.021709535598754885, 0.021705984115600586, 0.021801727294921875, 0.021872703552246093, 0.02197420883178711, 0.021908416748046874, 0.021945280075073244, 0.022036800384521483, 0.02208211135864258, 0.022040735244750975, 0.02193414306640625, 0.022173471450805664, 0.022162975311279295, 0.02205948829650879, 0.022105472564697266, 0.02211311912536621, 0.022209856033325197, 0.022053472518920897, 0.02196006393432617, 0.02170899200439453, 0.021681535720825196, 0.021637760162353515, 0.02192006492614746, 0.021760000228881835, 0.0216760311126709, 0.021598207473754884, 0.021639167785644533, 0.021656831741333007, 0.021604448318481444, 0.02167875289916992, 0.0216856632232666, 0.02163350486755371, 0.021899391174316406, 0.021628639221191407, 0.02171036720275879, 0.021731103897094727, 0.021375295639038085, 0.0216878719329834, 0.021600255966186522, 0.02161942481994629, 0.023228128433227538, 0.02166988754272461, 0.021614591598510743, 0.021568511962890623, 0.0217955207824707, 0.021720607757568358, 0.02164406394958496, 0.021651456832885742, 0.021645023345947267, 0.021649696350097655, 0.021648416519165038, 0.021671968460083006, 0.02205526351928711, 0.02206175994873047, 0.02231491279602051, 0.022312959671020507, 0.02237830352783203, 0.022431936264038086, 0.022468288421630858, 0.022377824783325194, 0.02232419204711914, 0.022351776123046875, 0.022452320098876953, 0.022374591827392577, 0.022355775833129882, 0.022204416275024414, 0.022529151916503905, 0.022391679763793946, 0.022376447677612304, 0.022347776412963868, 0.02232636833190918, 0.02243097686767578, 0.022244064331054688, 0.022320064544677734, 0.022388864517211914, 0.022419071197509764, 0.022451520919799805, 0.022346687316894532, 0.022365535736083984, 0.022423423767089844, 0.02229852867126465, 0.02231590461730957, 0.022327327728271486, 0.022300640106201173, 0.02235331153869629, 0.022286880493164064, 0.02233660888671875, 0.02231190490722656, 0.02246156883239746, 0.02229862403869629, 0.022340192794799804, 0.02248918342590332, 0.022397216796875, 0.022396480560302735, 0.022393535614013672, 0.022357088088989258, 0.02243548774719238, 0.022348352432250976, 0.02233798408508301, 0.021902463912963868, 0.02230393600463867, 0.022315263748168945, 0.022421855926513672, 0.02229862403869629, 0.022363359451293946, 0.022336191177368164, 0.022435935974121093, 0.022421600341796875, 0.02243574333190918, 0.02249728012084961, 0.022370527267456055, 0.02244710350036621, 0.02239583969116211, 0.02229408073425293, 0.022456607818603515, 0.022323200225830078, 0.023586816787719726, 0.022673408508300782, 0.022500959396362305, 0.023110048294067383, 0.022460416793823244, 0.022529535293579102, 0.022409311294555666, 0.022452991485595705, 0.02239148712158203, 0.022284767150878907, 0.022265727996826173, 0.022186975479125976, 0.022412960052490234, 0.022375551223754883, 0.021908416748046874, 0.02175939178466797, 0.021901376724243166, 0.021881311416625977, 0.021654624938964844, 0.021733407974243165, 0.021936960220336914, 0.02216556739807129, 0.02197737693786621, 0.02214908790588379, 0.022179584503173828, 0.02232966423034668, 0.022240959167480468, 0.022200319290161134, 0.02235759925842285, 0.022370880126953124, 0.02198646354675293, 0.022131391525268555, 0.021787839889526366, 0.022004159927368164, 0.02183616065979004, 0.021755104064941407, 0.021725343704223632, 0.02190572738647461, 0.022097663879394533, 0.02201033592224121, 0.021895263671875, 0.02188870429992676, 0.022091615676879884, 0.021906944274902345, 0.021989728927612303, 0.02199385643005371, 0.021777151107788086, 0.02184009552001953, 0.021716991424560548, 0.02191155242919922, 0.021815296173095702, 0.02195609664916992, 0.021912063598632812, 0.0217325439453125, 0.021694528579711915, 0.02164169692993164, 0.021635360717773437, 0.021730527877807618, 0.022020896911621093, 0.021739519119262696, 0.021952447891235353, 0.022849376678466798, 0.022079647064208983, 0.02215670394897461, 0.02211702346801758, 0.022226720809936523, 0.022542560577392578, 0.022503007888793947, 0.022485311508178712, 0.022589727401733397, 0.022495040893554686, 0.022573055267333983, 0.022632064819335936, 0.022747295379638672, 0.023208160400390625, 0.02847158432006836, 0.02270947265625, 0.022499488830566405, 0.022619680404663087, 0.022514495849609375, 0.022574432373046877, 0.022600351333618166, 0.022644672393798828, 0.02262166404724121, 0.02260383987426758, 0.02248784065246582, 0.02240835189819336, 0.022534751892089845, 0.022501312255859374, 0.022609344482421877, 0.022475391387939452, 0.022597055435180664, 0.022946367263793944, 0.022998464584350585, 0.022454208374023437, 0.022507999420166017, 0.02256057548522949, 0.022541696548461915, 0.02276655960083008, 0.022482847213745116, 0.02236591911315918, 0.02261952018737793, 0.022487199783325196, 0.02235919952392578, 0.02246147155761719, 0.022442655563354494, 0.022649055480957032, 0.022441375732421876, 0.02237273597717285, 0.022238847732543945, 0.02313049507141113, 0.022769664764404295, 0.022575103759765625, 0.02248089599609375, 0.022365663528442385, 0.02236988830566406, 0.022519807815551757, 0.02241187286376953, 0.022487232208251953, 0.022356096267700194, 0.022369983673095704, 0.022503904342651367, 0.022331520080566405, 0.022497024536132813, 0.022379936218261717, 0.022719072341918944, 0.02255036735534668, 0.022306079864501952, 0.022440736770629882, 0.02242953681945801, 0.022926624298095704, 0.023496768951416017, 0.02281564712524414, 0.02258732795715332, 0.022687135696411134, 0.022712991714477538, 0.02261756706237793, 0.0226079044342041, 0.022843807220458985, 0.02271039962768555, 0.022662239074707033, 0.02273164749145508, 0.022722560882568358, 0.02273695945739746, 0.023285696029663086, 0.023828479766845705, 0.022802335739135742, 0.022799680709838867, 0.022778656005859373, 0.023061504364013673, 0.022852031707763672, 0.02279225540161133, 0.02270627212524414, 0.022583135604858397, 0.022768192291259766, 0.022838495254516603, 0.022829856872558594, 0.022775264739990236, 0.02277238464355469, 0.02273628807067871, 0.02319817543029785, 0.02515660858154297, 0.022715391159057616, 0.022646656036376955, 0.022587167739868165, 0.02268185615539551, 0.02258127975463867, 0.022646848678588866, 0.022599679946899414, 0.02304614448547363, 0.022863231658935546, 0.022975072860717774, 0.022478687286376954, 0.023621471405029296, 0.02372435188293457, 0.023138303756713868, 0.022674911499023436, 0.022799936294555664, 0.023329759597778322, 0.02263599967956543, 0.022690656661987305, 0.022593376159667968, 0.022572032928466795, 0.022574079513549804, 0.02269919967651367, 0.022537919998168947, 0.02252899169921875, 0.02285875129699707, 0.022510591506958007, 0.022530176162719726, 0.022484800338745118, 0.022829120635986327, 0.022503423690795898, 0.022450080871582033, 0.0226711368560791, 0.022701791763305664, 0.02261404800415039, 0.022591232299804687, 0.02252044868469238, 0.02253228759765625, 0.022542144775390623, 0.022752960205078124, 0.022431615829467774, 0.02344799995422363, 0.02259382438659668, 0.022704383850097657, 0.022475872039794922, 0.0225849609375, 0.022392959594726564, 0.02235475158691406, 0.022414527893066406, 0.02240390396118164, 0.02251366424560547, 0.022603551864624025, 0.022560991287231446, 0.022491199493408203, 0.02242348861694336, 0.0224420166015625, 0.022425376892089843, 0.022759136199951173, 0.022564640045166017, 0.0224385929107666, 0.022501375198364256, 0.02251366424560547, 0.022440256118774413, 0.0224901123046875, 0.02259833526611328, 0.022485088348388672, 0.02231088066101074, 0.02222892761230469, 0.022152704238891603, 0.022034944534301756, 0.0220153923034668, 0.021885536193847657, 0.021821216583251955, 0.021486112594604492, 0.02168390464782715, 0.021578048706054686, 0.021556991577148438, 0.021618080139160157, 0.021611360549926757, 0.021592063903808592, 0.02162073516845703, 0.02165894317626953, 0.021651647567749024, 0.021641727447509765, 0.021700544357299803, 0.021749343872070313, 0.02164784049987793, 0.021712896347045898, 0.021558847427368164, 0.021674432754516602, 0.021581087112426758, 0.021689056396484375, 0.021648895263671874, 0.02163520050048828, 0.021637504577636718, 0.021932031631469725, 0.02188809585571289, 0.021719968795776368, 0.021780223846435548, 0.0217476806640625, 0.021716543197631836, 0.021639328002929687, 0.02169264030456543, 0.021805408477783204, 0.021716991424560548, 0.02206515121459961, 0.02168832015991211, 0.021806400299072267, 0.021774303436279296, 0.02217788887023926, 0.022223167419433594, 0.02233580780029297, 0.022294431686401366, 0.022200416564941407, 0.02222492790222168, 0.02251094436645508, 0.022487871170043944, 0.02218988800048828, 0.022650623321533205, 0.022377023696899413, 0.021776063919067383, 0.021715999603271485, 0.021688671112060548, 0.021684511184692383, 0.02162723159790039, 0.021626880645751953, 0.02161369514465332, 0.021692352294921877, 0.021584480285644532, 0.021624160766601563, 0.021642175674438477, 0.021712575912475586, 0.02186038398742676, 0.021750112533569337, 0.021705951690673828, 0.021637439727783203, 0.0214517765045166, 0.021606752395629883, 0.02165171241760254, 0.021690784454345705, 0.021792768478393554, 0.02192793655395508, 0.02168940734863281, 0.0217523193359375, 0.021799360275268555, 0.021696704864501953, 0.021699455261230467, 0.021623743057250976, 0.021651456832885742, 0.021712896347045898, 0.02167398452758789, 0.02161180877685547, 0.021639904022216796, 0.02164531135559082, 0.021806175231933594, 0.021610815048217772, 0.021477632522583008, 0.021696863174438478, 0.021651679992675782, 0.02160111999511719, 0.02168454360961914, 0.021612447738647463, 0.02161123275756836, 0.021716991424560548, 0.02202134323120117, 0.021715744018554688, 0.021695775985717772, 0.021922527313232423, 0.02203647994995117, 0.021788448333740235, 0.02183907127380371, 0.021914911270141602, 0.021882240295410155, 0.02181769561767578, 0.021795072555541993, 0.02183772850036621, 0.021808992385864256, 0.02180246353149414, 0.021964448928833008, 0.02204147148132324, 0.021803007125854493, 0.021833824157714843, 0.02155660820007324, 0.021469728469848633, 0.021581823348999024, 0.02169606399536133, 0.021791488647460937, 0.0218919677734375, 0.022038751602172852, 0.022148000717163087, 0.022378335952758788, 0.022423744201660156, 0.022509504318237304, 0.022486047744750978, 0.022354623794555665, 0.023001375198364257, 0.02240483283996582, 0.022380863189697266, 0.02235696029663086]",tokens/s,45.114017918252614,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.470976,3361.603584,0.0,2959.081472,2942.567424,s,1,7.147583984375,7.147583984375,0.0,7.147583984375,7.147583984375,7.147583984375,7.147583984375,[7.147583984375],,kWh,5.761196495829305e-06,6.283274877993356e-07,1.9227793159881656e-06,8.312303299616807e-06,,MB,1345.16736,3556.63872,0.0,3141.533696,3105.830912,s,10,0.28855046653747557,0.02885504665374756,0.0012550584409207724,0.028532000541687013,0.029589280509948728,0.03093654508590698,0.032014356746673585,"[0.03228380966186523, 0.028086143493652342, 0.02781507110595703, 0.02781260871887207, 0.028941280364990236, 0.028394176483154298, 0.028669824600219728, 0.029289888381958007, 0.028041343688964843, 0.029216320037841796]",tokens/s,8871.93159213804,kWh,1.035492461214524e-06,1.141959224595695e-07,6.837397116879257e-07,1.833428095362019e-06,tokens/kWh,139629146.43208387,MB,1377.591296,3598.58176,0.0,3183.476736,3163.048448,s,10,11.09806982421875,1.109806982421875,0.0036724458448768294,1.1088184204101563,1.1154548461914062,1.115927313232422,1.1163052868652343,"[1.1119915771484374, 1.1163997802734376, 1.1082696533203125, 1.1081903076171875, 1.1100640869140626, 1.115349853515625, 1.1035675048828124, 1.1093671875, 1.1079283447265624, 1.1069415283203126]",tokens/s,56.766627889219365,kWh,3.2173906293373334e-05,3.547628171257534e-06,2.1256916729712772e-05,5.6978451194343635e-05,tokens/kWh,1105681.159797024,,s,630,11.095657752990716,0.017612155163477337,0.00020689108512658607,0.017571952819824217,0.017754485702514648,0.017852357482910156,0.018587411804199223,"[0.017704959869384765, 0.01764352035522461, 0.01765564727783203, 0.01767580795288086, 0.01759881591796875, 0.01754252815246582, 0.01759539222717285, 0.018323392868041993, 0.017692960739135743, 0.017847999572753907, 0.01762918472290039, 0.017657855987548828, 0.017588224411010742, 0.01784003257751465, 0.01744905662536621, 0.017469440460205078, 0.01741414451599121, 0.017360895156860352, 0.01740595245361328, 0.01783523178100586, 0.01757187271118164, 0.01747158432006836, 0.01750668716430664, 0.0174881591796875, 0.017469440460205078, 0.017449024200439454, 0.01757382392883301, 0.017500160217285156, 0.01760870361328125, 0.017528831481933595, 0.017651679992675782, 0.017622304916381837, 0.01766092872619629, 0.017644832611083985, 0.01761948776245117, 0.017686016082763673, 0.01761529541015625, 0.01756979179382324, 0.019197439193725584, 0.018533952713012697, 0.017615039825439452, 0.0178404483795166, 0.01775660705566406, 0.017686399459838867, 0.01759859275817871, 0.017604127883911132, 0.017975519180297852, 0.01780531120300293, 0.017504512786865236, 0.017504447937011718, 0.01769862365722656, 0.01760870361328125, 0.017661951065063478, 0.017691680908203125, 0.01752137565612793, 0.017460575103759767, 0.01746012878417969, 0.017451007843017577, 0.01745510482788086, 0.017493951797485353, 0.017524799346923827, 0.017467391967773437, 0.0174653434753418, 0.017537055969238283, 0.017549280166625977, 0.01761859130859375, 0.01772774314880371, 0.017707103729248046, 0.01796054458618164, 0.01768489646911621, 0.017534975051879884, 0.017487871170043946, 0.017498111724853514, 0.01747724723815918, 0.017654144287109375, 0.017589599609375, 0.017516672134399416, 0.01751299285888672, 0.017530879974365234, 0.017448671340942384, 0.017606943130493165, 0.017540992736816405, 0.017569984436035156, 0.017586111068725586, 0.017885183334350584, 0.017829248428344727, 0.01766873550415039, 0.017635168075561522, 0.017704320907592774, 0.017619743347167968, 0.017745344161987305, 0.01763795280456543, 0.01790355110168457, 0.017623104095458985, 0.01827840042114258, 0.017680063247680664, 0.017723487854003905, 0.01764784049987793, 0.017682527542114256, 0.01957468795776367, 0.01785638427734375, 0.017698272705078125, 0.017703584671020508, 0.01758995246887207, 0.01773980712890625, 0.01768684768676758, 0.01796236801147461, 0.01781616020202637, 0.01785036849975586, 0.01785398483276367, 0.017744543075561524, 0.017655616760253907, 0.017720800399780273, 0.017637920379638673, 0.017691648483276368, 0.01764419174194336, 0.01764748764038086, 0.01759008026123047, 0.017657535552978516, 0.017781728744506835, 0.017733856201171874, 0.0178439998626709, 0.017670143127441407, 0.017715200424194336, 0.017752063751220702, 0.017727487564086913, 0.017817440032958983, 0.01777619171142578, 0.0176627197265625, 0.01797324752807617, 0.01763737678527832, 0.01761676788330078, 0.017545343399047852, 0.01762099266052246, 0.017504255294799806, 0.01757155227661133, 0.017635616302490234, 0.01756550407409668, 0.01763462448120117, 0.017617792129516602, 0.017581375122070312, 0.01759228706359863, 0.017666784286499024, 0.017547264099121093, 0.01764556884765625, 0.017565696716308594, 0.017475616455078124, 0.017567712783813475, 0.017522495269775392, 0.0174716796875, 0.017571840286254883, 0.01773535919189453, 0.017492511749267577, 0.017518112182617188, 0.01759052848815918, 0.017550815582275392, 0.017502431869506837, 0.01747999954223633, 0.01753232002258301, 0.017581663131713866, 0.017525983810424806, 0.017546367645263673, 0.017662303924560547, 0.01767030334472656, 0.017737888336181642, 0.017543359756469725, 0.017704704284667968, 0.017547328948974608, 0.01747260856628418, 0.017474464416503906, 0.017493696212768556, 0.01752284812927246, 0.01758019256591797, 0.017553823471069336, 0.01751932716369629, 0.017508415222167967, 0.017476415634155272, 0.017508607864379883, 0.017604352951049805, 0.017468671798706054, 0.017550079345703126, 0.017573408126831055, 0.01757436752319336, 0.017549184799194335, 0.01761702346801758, 0.01762713623046875, 0.017573888778686524, 0.01780944061279297, 0.017666015625, 0.017618719100952147, 0.017608095169067382, 0.017789024353027344, 0.017560287475585936, 0.017479679107666016, 0.017534975051879884, 0.017547264099121093, 0.017639328002929687, 0.017578079223632814, 0.017530879974365234, 0.017528831481933595, 0.017559551239013673, 0.017555776596069335, 0.017491584777832032, 0.017596351623535157, 0.01760268783569336, 0.01748102378845215, 0.017518751144409178, 0.017550880432128907, 0.017542015075683592, 0.017531007766723634, 0.017555807113647463, 0.017557407379150392, 0.01753878402709961, 0.017536256790161135, 0.017527584075927735, 0.017516544342041016, 0.017526784896850587, 0.017537248611450194, 0.017775936126708983, 0.017564128875732422, 0.017592063903808595, 0.01759052848815918, 0.017596416473388672, 0.017451007843017577, 0.01754521560668945, 0.01754521560668945, 0.017496320724487306, 0.01755625534057617, 0.017539936065673827, 0.017526239395141602, 0.017641504287719725, 0.017771135330200194, 0.017590272903442384, 0.017554752349853514, 0.017517120361328124, 0.017646879196166993, 0.017551456451416016, 0.017537471771240234, 0.017551008224487304, 0.017609600067138673, 0.017600288391113283, 0.0186092472076416, 0.01773151969909668, 0.01758847999572754, 0.017545888900756836, 0.017500223159790038, 0.01751420783996582, 0.017516799926757812, 0.017520063400268553, 0.017522592544555664, 0.01757254409790039, 0.017845407485961914, 0.017594560623168946, 0.017449119567871093, 0.017532960891723633, 0.017612800598144532, 0.017596063613891603, 0.01757219123840332, 0.017580320358276367, 0.017513919830322265, 0.01754307174682617, 0.017488256454467773, 0.01754457664489746, 0.01764761543273926, 0.01745692825317383, 0.017595008850097658, 0.017536352157592774, 0.017517152786254882, 0.017467008590698243, 0.0175230712890625, 0.01751078414916992, 0.017512096405029296, 0.017687904357910157, 0.01764240074157715, 0.017657855987548828, 0.01763737678527832, 0.01755340766906738, 0.01761075210571289, 0.018915327072143554, 0.018992223739624024, 0.017625343322753905, 0.017660064697265623, 0.017569343566894532, 0.017477855682373047, 0.017561311721801757, 0.017527807235717775, 0.01744895935058594, 0.017428224563598632, 0.017414207458496093, 0.017521856307983398, 0.01762323188781738, 0.017478464126586914, 0.017477632522583008, 0.01749420738220215, 0.017526592254638672, 0.017494016647338868, 0.01800396728515625, 0.018309375762939454, 0.01792140769958496, 0.017672672271728515, 0.017577823638916017, 0.017602624893188475, 0.017504255294799806, 0.0174653434753418, 0.017504480361938475, 0.01749567985534668, 0.0179017276763916, 0.01757798385620117, 0.017526784896850587, 0.017502208709716797, 0.017497152328491212, 0.0175031681060791, 0.017465408325195313, 0.017516447067260743, 0.017440799713134766, 0.017514400482177735, 0.017560863494873048, 0.017660640716552736, 0.017667423248291014, 0.01756166458129883, 0.01760873603820801, 0.017754688262939453, 0.017502208709716797, 0.01761484718322754, 0.017551679611206055, 0.01748899269104004, 0.01751878356933594, 0.017596832275390627, 0.017682592391967775, 0.01766793632507324, 0.01756480026245117, 0.01763212776184082, 0.017573888778686524, 0.01761075210571289, 0.01905254364013672, 0.017751808166503905, 0.018402816772460938, 0.017642240524291992, 0.017695871353149414, 0.017784767150878907, 0.018092992782592774, 0.017690624237060547, 0.017780736923217775, 0.017757535934448242, 0.017916576385498047, 0.017669727325439453, 0.01762339210510254, 0.017618911743164063, 0.017839391708374022, 0.01754604721069336, 0.01760207939147949, 0.017934816360473633, 0.017833471298217773, 0.017682144165039063, 0.017746719360351562, 0.017663999557495116, 0.017762304306030274, 0.01757209587097168, 0.017550111770629883, 0.017543647766113283, 0.01757993507385254, 0.017986143112182617, 0.01765990447998047, 0.017618688583374023, 0.01749177551269531, 0.01757638359069824, 0.017526784896850587, 0.017563615798950195, 0.017498048782348632, 0.017593759536743164, 0.017480159759521486, 0.017506528854370117, 0.01744895935058594, 0.017541120529174805, 0.017663936614990234, 0.017611904144287108, 0.019082176208496095, 0.017597440719604493, 0.01760630416870117, 0.01755174446105957, 0.017505407333374023, 0.017504831314086915, 0.017469343185424806, 0.017460704803466797, 0.017478559494018556, 0.017529983520507813, 0.017463903427124023, 0.017520959854125977, 0.017436256408691408, 0.017455488204956054, 0.017395744323730467, 0.017483360290527345, 0.017358848571777344, 0.017411775588989258, 0.01753891181945801, 0.01745392036437988, 0.01738956832885742, 0.017485824584960938, 0.017413631439208984, 0.017439231872558594, 0.017403583526611328, 0.01744927978515625, 0.017442943572998047, 0.017489152908325194, 0.01740985679626465, 0.017426687240600584, 0.017465471267700195, 0.017446720123291015, 0.01735744094848633, 0.017585407257080077, 0.017373952865600586, 0.01744486427307129, 0.017526016235351563, 0.017593055725097655, 0.017455135345458984, 0.017500160217285156, 0.01766806411743164, 0.017612831115722656, 0.017624544143676757, 0.01752118492126465, 0.017538463592529297, 0.01756528091430664, 0.017456127166748048, 0.017726463317871095, 0.01747865676879883, 0.01748534393310547, 0.01751299285888672, 0.017561376571655272, 0.017714656829833985, 0.01753107261657715, 0.017555967330932617, 0.01760223960876465, 0.017557247161865235, 0.017676992416381834, 0.017609952926635742, 0.01768704032897949, 0.01771900749206543, 0.017525184631347657, 0.01749350357055664, 0.017589759826660157, 0.01758639907836914, 0.017988224029541016, 0.017809600830078126, 0.01762441635131836, 0.017670848846435546, 0.017692127227783204, 0.01768707275390625, 0.017704000473022462, 0.01781235122680664, 0.017643871307373046, 0.01806496047973633, 0.017645824432373048, 0.01753887939453125, 0.017689855575561523, 0.017628000259399413, 0.017607839584350585, 0.017636192321777343, 0.017657632827758788, 0.017682655334472656, 0.017688575744628905, 0.017626527786254884, 0.01770966339111328, 0.01767430305480957, 0.01762268829345703, 0.017577247619628908, 0.017605375289916993, 0.017631488800048827, 0.017589248657226563, 0.0175765438079834, 0.017650079727172852, 0.017680383682250975, 0.017727487564086913, 0.0176680965423584, 0.017709056854248048, 0.017550975799560546, 0.017463680267333984, 0.01754521560668945, 0.017500160217285156, 0.017473024368286134, 0.017543167114257813, 0.017420799255371093, 0.01743667221069336, 0.017489919662475584, 0.017514495849609374, 0.017528831481933595, 0.017432735443115233, 0.01754284858703613, 0.017490079879760742, 0.017539072036743163, 0.017512447357177736, 0.01751203155517578, 0.01749033546447754, 0.017496063232421876, 0.017427488327026366, 0.017599456787109374, 0.017520063400268553, 0.017573951721191406, 0.017574399948120118, 0.017634464263916017, 0.01766864013671875, 0.017625375747680663, 0.017479711532592774, 0.0174815673828125, 0.01753718376159668, 0.01755388832092285, 0.017490144729614257, 0.017541120529174805, 0.017498111724853514, 0.01744076728820801, 0.017440576553344727, 0.017493408203125, 0.01753987121582031, 0.01762816047668457, 0.017439680099487306, 0.017409183502197265, 0.017486751556396483, 0.017541120529174805, 0.017551359176635743, 0.01764496040344238, 0.017485855102539062, 0.017504831314086915, 0.01753411293029785, 0.01749440002441406, 0.01757244873046875, 0.017497983932495118, 0.017510015487670897, 0.0175263671875, 0.017703584671020508, 0.017752384185791014, 0.017624383926391603, 0.017543392181396486, 0.017447263717651366, 0.017600448608398437, 0.017522687911987304, 0.017586240768432616, 0.017502143859863283, 0.01749996757507324, 0.017508319854736328, 0.01754729652404785, 0.017572032928466798, 0.01751219177246094, 0.01751862335205078, 0.017578079223632814, 0.017555583953857423, 0.01773750305175781, 0.01754956817626953, 0.017616384506225585, 0.01761039924621582, 0.0175665283203125, 0.017913055419921876, 0.01759097671508789, 0.01768662452697754, 0.017625247955322266, 0.017641311645507814, 0.01764556884765625, 0.017650976181030273, 0.017742816925048827, 0.017706304550170898, 0.017708959579467772, 0.01762563133239746, 0.017592191696166992, 0.017647743225097656, 0.0179748477935791, 0.017629663467407228, 0.01759228706359863, 0.01761667251586914, 0.01762086486816406, 0.01775446319580078, 0.017696735382080078, 0.017764352798461915, 0.017651391983032227, 0.01782316780090332, 0.01815011215209961, 0.017518304824829103, 0.017680831909179687, 0.01760051155090332, 0.017549152374267577, 0.017559488296508788, 0.017516767501831055, 0.017481279373168946, 0.017557952880859377, 0.017499776840209962, 0.01776063919067383, 0.017596416473388672, 0.01783296012878418, 0.017646623611450196, 0.017579456329345704, 0.017527551651000978, 0.017567520141601563, 0.01764352035522461, 0.017508544921875, 0.017454912185668945, 0.017514495849609374, 0.01756979179382324, 0.017689727783203126, 0.01762393569946289, 0.017489919662475584, 0.017604608535766602, 0.017508352279663086, 0.017604768753051756, 0.017499456405639647, 0.017545984268188475, 0.017520416259765626, 0.017440479278564455, 0.0176944637298584, 0.01751913642883301, 0.017522687911987304, 0.017434623718261717, 0.017456863403320314, 0.01750815963745117, 0.01759280014038086, 0.017565696716308594, 0.017515968322753907, 0.017559104919433594, 0.01746227264404297, 0.017405887603759766, 0.01751862335205078, 0.01742198371887207, 0.017447103500366212, 0.017414335250854493, 0.017534208297729493, 0.017449728012084963, 0.017567743301391603, 0.017459199905395507, 0.017485824584960938, 0.017508352279663086, 0.017514495849609374, 0.01740595245361328, 0.017612800598144532, 0.017577951431274413]",tokens/s,56.778968315798124,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,891.04384,6182.273024,0.0,5779.750912,5773.960192,s,1,7.17562744140625,7.17562744140625,0.0,7.17562744140625,7.17562744140625,7.17562744140625,7.17562744140625,[7.17562744140625],,kWh,5.883056520815444e-06,6.416738406537162e-07,2.1852795259874602e-06,8.71000988745662e-06,,MB,1258.139648,6498.942976,0.0,6085.935104,6038.345728,s,10,2.0956273956298825,0.2095627395629883,0.014331462672824181,0.2155265884399414,0.21899039001464846,0.2209185043334961,0.22246099578857423,"[0.1876812744140625, 0.2180411834716797, 0.21478096008300782, 0.21282473754882814, 0.216876953125, 0.216272216796875, 0.17630776977539062, 0.21143376159667968, 0.22284661865234376, 0.21856192016601564]",tokens/s,1221.5912071671216,kWh,5.714127417788626e-06,6.298131371800396e-07,3.8045756932306803e-06,1.0148516248199346e-05,tokens/kWh,25225362.381956294,MB,1263.43168,6519.914496,0.0,6106.906624,6086.544896,s,10,17.949836669921872,1.7949836669921875,0.0029659739519153567,1.794391845703125,1.7973954956054687,1.800190362548828,1.8024262561035156,"[1.7927791748046875, 1.7946810302734375, 1.792347412109375, 1.7923717041015625, 1.79388427734375, 1.7967744140625, 1.8029852294921875, 1.79507421875, 1.7941026611328126, 1.7948365478515624]",tokens/s,35.097812397127626,kWh,5.3070500908882195e-05,5.853815249923275e-06,3.5111066550368375e-05,9.403538270917386e-05,tokens/kWh,669960.5848879464,,s,630,17.94743587112426,0.028487993446229,0.0003592993373021704,0.028431696891784666,0.028614499282836915,0.02873092041015625,0.030125853271484374,"[0.029904895782470704, 0.02911846351623535, 0.02852579116821289, 0.028414751052856447, 0.02832793617248535, 0.028254207611083985, 0.028157056808471678, 0.028239839553833006, 0.028234464645385742, 0.028306655883789063, 0.02823664093017578, 0.028229536056518553, 0.02829516792297363, 0.02846121597290039, 0.029011999130249023, 0.028368127822875976, 0.028332576751708985, 0.028304800033569336, 0.028242399215698242, 0.028254623413085937, 0.028247232437133788, 0.028387168884277343, 0.028296127319335937, 0.028280319213867186, 0.028346879959106445, 0.02831123161315918, 0.028345951080322264, 0.028361120223999024, 0.0283787841796875, 0.028385856628417968, 0.028413536071777344, 0.02849843215942383, 0.028472320556640625, 0.02861926460266113, 0.02852521514892578, 0.028487264633178713, 0.028535039901733398, 0.02846454429626465, 0.028405696868896484, 0.028425983428955078, 0.02845699119567871, 0.028437376022338867, 0.02842032051086426, 0.028368640899658203, 0.028370912551879884, 0.028425792694091796, 0.02848579216003418, 0.028462591171264647, 0.02844735908508301, 0.028434368133544923, 0.028543264389038085, 0.02852409553527832, 0.028903871536254882, 0.028553056716918945, 0.028573856353759766, 0.02849078369140625, 0.028424543380737306, 0.02849782371520996, 0.02842207908630371, 0.02845907211303711, 0.028441055297851563, 0.02854528045654297, 0.028442079544067383, 0.030184671401977538, 0.02933590316772461, 0.028703487396240235, 0.0283536319732666, 0.028317792892456055, 0.028323328018188477, 0.02832217597961426, 0.02830780792236328, 0.028358943939208986, 0.028327455520629884, 0.02824678421020508, 0.028288415908813477, 0.02833030319213867, 0.028357791900634765, 0.02838812828063965, 0.028332096099853515, 0.028366207122802734, 0.028816255569458007, 0.028393024444580077, 0.028378847122192383, 0.02841779136657715, 0.028270624160766603, 0.028303295135498046, 0.028364639282226562, 0.02836195182800293, 0.02838038444519043, 0.02837753677368164, 0.028360960006713867, 0.02888591957092285, 0.02839232063293457, 0.028442720413208007, 0.0284487361907959, 0.028464223861694334, 0.028513343811035156, 0.02863849639892578, 0.02870425605773926, 0.02859519958496094, 0.028504064559936523, 0.028551071166992188, 0.028498016357421874, 0.028422143936157225, 0.028442176818847656, 0.028456640243530274, 0.028435359954833983, 0.028380544662475585, 0.028378591537475586, 0.028439552307128906, 0.02843343925476074, 0.028550111770629882, 0.028385215759277344, 0.02841132736206055, 0.0284202880859375, 0.028493535995483397, 0.02842083168029785, 0.02846303939819336, 0.02863724708557129, 0.028438623428344727, 0.028428192138671874, 0.028428192138671874, 0.0285383358001709, 0.02856368064880371, 0.028549280166625977, 0.028418304443359375, 0.030083967208862306, 0.029138944625854493, 0.02865692710876465, 0.028346527099609376, 0.02828268814086914, 0.02821993637084961, 0.0282108154296875, 0.028267103195190428, 0.028341983795166014, 0.028324127197265625, 0.028315168380737304, 0.028390047073364257, 0.028264255523681642, 0.028207103729248048, 0.02832793617248535, 0.028341440200805663, 0.02832876777648926, 0.02834748840332031, 0.02830611228942871, 0.028276960372924806, 0.028553216934204102, 0.028279039382934572, 0.028314559936523438, 0.028261184692382812, 0.028329984664916992, 0.02838105583190918, 0.028376703262329103, 0.028382879257202148, 0.02833500862121582, 0.028345951080322264, 0.028371295928955077, 0.02838662338256836, 0.028396223068237306, 0.02854911994934082, 0.02855504035949707, 0.028696800231933595, 0.02854115104675293, 0.028536800384521485, 0.028426048278808593, 0.028454656600952147, 0.028391679763793944, 0.02857369613647461, 0.028387327194213868, 0.028415456771850586, 0.028447263717651366, 0.028447872161865236, 0.028474239349365233, 0.028374303817749025, 0.028387264251708986, 0.028400415420532225, 0.02839756774902344, 0.028450815200805665, 0.02845891189575195, 0.028511455535888672, 0.02847427177429199, 0.028495840072631836, 0.028415584564208986, 0.028510623931884766, 0.028495872497558594, 0.02845302391052246, 0.028551359176635743, 0.028523199081420897, 0.028619808197021486, 0.03014041519165039, 0.02936556816101074, 0.028644031524658203, 0.02832784080505371, 0.028262527465820312, 0.02830668830871582, 0.028250848770141602, 0.028253215789794922, 0.028231935501098634, 0.028328672409057617, 0.028299264907836914, 0.02831577682495117, 0.028270463943481445, 0.028233728408813476, 0.028309503555297853, 0.02831942367553711, 0.028309152603149413, 0.028274335861206056, 0.02829974365234375, 0.02836944007873535, 0.028417312622070312, 0.02832044792175293, 0.028360607147216797, 0.02835980796813965, 0.028389663696289064, 0.028375776290893554, 0.02838118362426758, 0.028368736267089845, 0.028342655181884766, 0.02839561653137207, 0.028380863189697264, 0.02835043144226074, 0.0283536319732666, 0.028441600799560547, 0.028575103759765626, 0.02856812858581543, 0.02855936050415039, 0.028528640747070313, 0.02845088005065918, 0.028476991653442384, 0.02837881660461426, 0.028449472427368165, 0.02842624092102051, 0.02842982482910156, 0.028447519302368163, 0.028395231246948243, 0.028424192428588867, 0.028429536819458007, 0.028439327239990233, 0.028460416793823242, 0.02850598335266113, 0.028457599639892577, 0.028469375610351563, 0.028503263473510742, 0.028432735443115233, 0.028449216842651368, 0.028450559616088868, 0.028498176574707032, 0.02858403205871582, 0.028536767959594728, 0.028499935150146486, 0.028534879684448244, 0.028420000076293944, 0.03009769630432129, 0.029147167205810547, 0.028661663055419923, 0.02839257621765137, 0.02826691246032715, 0.028281375885009764, 0.028288991928100585, 0.028314943313598632, 0.028549888610839843, 0.02831782341003418, 0.028343711853027344, 0.028313823699951172, 0.028281055450439452, 0.028239648818969728, 0.028295711517333986, 0.028325599670410158, 0.02831702423095703, 0.028400415420532225, 0.02838310432434082, 0.028454912185668944, 0.028343711853027344, 0.028338783264160155, 0.028319904327392578, 0.028365760803222655, 0.028398143768310548, 0.028422496795654298, 0.028370399475097657, 0.0283503360748291, 0.02841580772399902, 0.02842710494995117, 0.02833967971801758, 0.028383647918701172, 0.02844585609436035, 0.02858083152770996, 0.028536447525024412, 0.028630943298339845, 0.02858995246887207, 0.02857865524291992, 0.02852854347229004, 0.028502944946289063, 0.028387519836425783, 0.02851696014404297, 0.028472543716430664, 0.02848454475402832, 0.02845270347595215, 0.02850217628479004, 0.02841708755493164, 0.02838172721862793, 0.028408031463623046, 0.028500160217285155, 0.028457088470458983, 0.028442432403564453, 0.02839353561401367, 0.028461183547973633, 0.028540800094604492, 0.02858755111694336, 0.02849225616455078, 0.028466751098632812, 0.028517919540405272, 0.02850003242492676, 0.028494016647338867, 0.028614336013793946, 0.02860256004333496, 0.030132831573486327, 0.0295280647277832, 0.028733440399169922, 0.028524192810058593, 0.028301792144775392, 0.028335071563720702, 0.028273151397705077, 0.028281248092651368, 0.02825395202636719, 0.02829689598083496, 0.028361215591430664, 0.028422208786010743, 0.028379135131835938, 0.02836591911315918, 0.028364896774291992, 0.028350656509399413, 0.028304000854492188, 0.028397375106811524, 0.028323007583618165, 0.028433408737182617, 0.028434431076049805, 0.02836908721923828, 0.028359552383422852, 0.028371904373168947, 0.02836275291442871, 0.02843155288696289, 0.028386112213134765, 0.028388832092285158, 0.028418336868286133, 0.028431840896606445, 0.02844745635986328, 0.02840323257446289, 0.028415712356567382, 0.028545856475830078, 0.028649728775024415, 0.028623903274536133, 0.02857040023803711, 0.028604352951049804, 0.02851840019226074, 0.02853068733215332, 0.028384639739990235, 0.02845260810852051, 0.028450815200805665, 0.02837945556640625, 0.028400192260742186, 0.02847065544128418, 0.028406272888183592, 0.028473472595214842, 0.028465024948120116, 0.028432512283325197, 0.028684288024902343, 0.02878463935852051, 0.028589855194091796, 0.028516895294189454, 0.028608224868774415, 0.028528703689575195, 0.028561119079589845, 0.028526016235351562, 0.028508928298950194, 0.028612607955932616, 0.028548736572265625, 0.028617088317871093, 0.029487104415893556, 0.034301952362060545, 0.030795743942260742, 0.02954204750061035, 0.02893814468383789, 0.028921472549438478, 0.02848851203918457, 0.02832793617248535, 0.028337215423583983, 0.028369855880737305, 0.028325887680053712, 0.028284927368164063, 0.028387584686279298, 0.028373855590820313, 0.028402368545532228, 0.02830086326599121, 0.028395807266235352, 0.028550592422485352, 0.028457056045532225, 0.028466079711914064, 0.028725183486938477, 0.028427999496459962, 0.028325151443481446, 0.028316511154174804, 0.028382720947265624, 0.0283920955657959, 0.02838118362426758, 0.02833625602722168, 0.028489599227905272, 0.02836627197265625, 0.02839193534851074, 0.028342655181884766, 0.028310400009155273, 0.028386112213134765, 0.02835660743713379, 0.028442623138427735, 0.028603744506835938, 0.028491935729980468, 0.028602975845336914, 0.028604320526123047, 0.028563135147094725, 0.028551488876342773, 0.02854707145690918, 0.028495872497558594, 0.02846633529663086, 0.028492639541625977, 0.02846918487548828, 0.028546464920043944, 0.028432640075683593, 0.02840617561340332, 0.028387327194213868, 0.02850147247314453, 0.028500511169433595, 0.028356639862060547, 0.028464511871337892, 0.028530975341796876, 0.02852895927429199, 0.028462656021118166, 0.02857414436340332, 0.02854092788696289, 0.028669376373291016, 0.02854710388183594, 0.028453695297241212, 0.028615968704223633, 0.030108768463134764, 0.029218048095703126, 0.028775775909423828, 0.028315967559814453, 0.028331775665283204, 0.028289535522460937, 0.028404640197753905, 0.028227487564086915, 0.028343231201171874, 0.028338048934936525, 0.02824355125427246, 0.028316095352172853, 0.028342111587524414, 0.02826630401611328, 0.028342784881591795, 0.028311487197875976, 0.028257791519165038, 0.02830182456970215, 0.02830156707763672, 0.02833305549621582, 0.02839792060852051, 0.028363168716430662, 0.028301536560058595, 0.028300416946411132, 0.02837174415588379, 0.028525888442993166, 0.028371519088745117, 0.028411775588989257, 0.02838083267211914, 0.028425695419311524, 0.028418560028076172, 0.028436191558837892, 0.0284628791809082, 0.028566207885742188, 0.0286680965423584, 0.028649599075317382, 0.02874982452392578, 0.028618751525878908, 0.028642528533935546, 0.02853971290588379, 0.028510175704956054, 0.028523839950561524, 0.028569984436035156, 0.028492128372192383, 0.02844803237915039, 0.02842470359802246, 0.028727840423583985, 0.02855491256713867, 0.028483135223388672, 0.02843283271789551, 0.02852239990234375, 0.028552831649780272, 0.028499584197998046, 0.028484319686889647, 0.028448896408081056, 0.028528032302856447, 0.028530431747436524, 0.028509023666381837, 0.028511775970458984, 0.028460832595825197, 0.02862067222595215, 0.028537343978881836, 0.02850396728515625, 0.030232032775878905, 0.02931331253051758, 0.02872457695007324, 0.02837558364868164, 0.02837958335876465, 0.02828927993774414, 0.028266176223754883, 0.02837299156188965, 0.02838528060913086, 0.028303455352783204, 0.028400896072387695, 0.028334560394287108, 0.028388736724853515, 0.02828780746459961, 0.028291072845458985, 0.028296287536621095, 0.028250175476074217, 0.028303615570068358, 0.028361312866210936, 0.02854297637939453, 0.02833203125, 0.028284927368164063, 0.028379135131835938, 0.028297216415405273, 0.028401279449462892, 0.02837936019897461, 0.0283668155670166, 0.02836499214172363, 0.02839756774902344, 0.02839347267150879, 0.028331552505493164, 0.028371423721313477, 0.02843449592590332, 0.028553152084350587, 0.028536575317382813, 0.028622367858886718, 0.02849180793762207, 0.028611263275146483, 0.02855062484741211, 0.02857219123840332, 0.028395711898803713, 0.028419679641723632, 0.02833020782470703, 0.028589696884155275, 0.028471807479858398, 0.028523712158203124, 0.028467615127563475, 0.028470687866210938, 0.02846512031555176, 0.028476320266723632, 0.028512287139892577, 0.028461023330688475, 0.028533920288085938, 0.028496992111206054, 0.028555103302001953, 0.02850543975830078, 0.028418752670288087, 0.028520320892333983, 0.028487680435180664, 0.02846112060546875, 0.02851577568054199, 0.028516063690185545, 0.02848643112182617, 0.030154367446899415, 0.029409120559692383, 0.02871993637084961, 0.028372703552246095, 0.028337631225585937, 0.02831987190246582, 0.028252384185791016, 0.028670143127441407, 0.028258304595947265, 0.028325632095336915, 0.028250335693359375, 0.028335391998291017, 0.028347135543823242, 0.028268543243408203, 0.028243967056274414, 0.02837718391418457, 0.028285951614379884, 0.02834547233581543, 0.028306560516357424, 0.02832828712463379, 0.028316192626953125, 0.028241695404052733, 0.028309503555297853, 0.028327392578125, 0.02845939254760742, 0.028356767654418944, 0.028370784759521483, 0.028401824951171876, 0.028440351486206054, 0.028498144149780275, 0.028432224273681642, 0.028407455444335938, 0.028465919494628907, 0.028525951385498047, 0.028545408248901366, 0.028618335723876953, 0.028664224624633788, 0.028628992080688476, 0.02855353546142578, 0.028550783157348634, 0.028567104339599608, 0.028555744171142577, 0.028481504440307618, 0.028481439590454103, 0.02856559944152832, 0.028471359252929686, 0.028473535537719728, 0.0285483512878418, 0.028473920822143554, 0.02846089553833008, 0.02849510383605957, 0.028452959060668945, 0.02849056053161621, 0.028516288757324218, 0.028544832229614257, 0.028507423400878907, 0.028468191146850588, 0.028497983932495117, 0.02847702407836914, 0.02853852844238281, 0.028475231170654296, 0.028557952880859376, 0.028476863861083983]",tokens/s,35.10250737341318,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,891.625472,11731.337216,0.0,11328.815104,11314.254848,s,1,7.52739501953125,7.52739501953125,0.0,7.52739501953125,7.52739501953125,7.52739501953125,7.52739501953125,[7.52739501953125],,kWh,7.3615748499984575e-06,7.948287845474198e-07,2.298612949980239e-06,1.0455016584526117e-05,,MB,1216.598016,12184.322048,0.0,11771.314176,11713.906688,s,10,3.7588512878417966,0.3758851287841797,0.012953368573984944,0.37577534484863284,0.38157590026855465,0.3945351119995117,0.4049024813842773,"[0.3513080749511719, 0.37062600708007815, 0.3755125427246094, 0.3713526611328125, 0.37683657836914064, 0.3786960754394531, 0.40749432373046873, 0.3784300537109375, 0.3725568237304687, 0.37603814697265625]",tokens/s,681.0591332198896,kWh,1.0743790470833877e-05,1.1848536126158198e-06,7.120769585499834e-06,1.904941366894953e-05,tokens/kWh,13438733.83448431,MB,1221.869568,12289.179648,0.0,11876.171776,11829.476864,s,10,33.137520507812496,3.3137520507812495,0.00504468508139212,3.31397802734375,3.3197455810546876,3.3207935668945314,3.3216319555664064,"[3.3068212890625, 3.321841552734375, 3.318758544921875, 3.3195126953125, 3.3121728515625, 3.309017822265625, 3.3086474609375, 3.308986083984375, 3.315783203125, 3.31597900390625]",tokens/s,19.011681934726266,kWh,9.689161045874698e-05,1.06874104933491e-05,6.448956548050124e-05,0.0001720685864325973,tokens/kWh,366133.0711557763,,s,630,33.13445639038088,0.05259437522282676,0.00029282804703407224,0.05255743980407715,0.05287781715393066,0.05299591159820557,0.05386064651489258,"[0.05357638549804687, 0.05243494415283203, 0.05223014450073242, 0.05225676727294922, 0.05230969619750977, 0.052187232971191405, 0.05228976058959961, 0.05211545562744141, 0.052133472442626956, 0.052151840209960935, 0.052091777801513674, 0.05220048141479492, 0.05208982467651367, 0.052133888244628904, 0.052101119995117184, 0.05206000137329102, 0.052137950897216796, 0.0524917106628418, 0.05253401565551758, 0.05254348754882812, 0.052498432159423826, 0.05244723129272461, 0.052498432159423826, 0.052295425415039065, 0.05239628982543945, 0.05237571334838867, 0.05226633453369141, 0.052219425201416016, 0.05221449661254883, 0.05226521682739258, 0.05227308654785156, 0.05245753479003906, 0.05246691131591797, 0.052405086517333985, 0.05235500717163086, 0.05242675018310547, 0.05253529739379883, 0.05255168151855469, 0.05294899368286133, 0.05258649444580078, 0.05259196853637695, 0.05255974578857422, 0.05254633712768555, 0.052547584533691405, 0.05281916809082031, 0.05279199981689453, 0.05270127868652344, 0.05285683059692383, 0.05272681427001953, 0.05277385711669922, 0.052686847686767575, 0.05267273712158203, 0.05289350509643555, 0.052620288848876956, 0.052587486267089846, 0.05299971389770508, 0.05273811340332031, 0.052844158172607424, 0.052730655670166014, 0.05284454345703125, 0.052512767791748044, 0.0524716796875, 0.05248422241210938, 0.05389388656616211, 0.052700286865234376, 0.052400222778320314, 0.05241632080078125, 0.05239091110229492, 0.0524738883972168, 0.05237247848510742, 0.05232915115356445, 0.05240854263305664, 0.05262422561645508, 0.05257516860961914, 0.05248716735839844, 0.05252399826049805, 0.052465087890625, 0.05243337631225586, 0.05242611312866211, 0.052350719451904296, 0.052433727264404296, 0.05262963104248047, 0.05268051147460937, 0.05241062545776367, 0.05240019226074219, 0.052294593811035156, 0.0525035514831543, 0.052400127410888675, 0.052459358215332035, 0.052546878814697266, 0.0525382080078125, 0.05244518280029297, 0.05258879852294922, 0.05259408187866211, 0.05272172927856445, 0.05267814254760742, 0.052701438903808594, 0.05275907135009766, 0.052795230865478514, 0.05274844741821289, 0.05286502456665039, 0.05283951950073242, 0.05283955383300781, 0.053192577362060546, 0.05309167861938477, 0.05269152069091797, 0.05264617538452149, 0.052756191253662106, 0.05284249496459961, 0.05320851135253906, 0.05276752090454102, 0.052868545532226564, 0.05288995361328125, 0.052962337493896484, 0.05286624145507812, 0.05317814254760742, 0.05292851257324219, 0.05356911849975586, 0.05277328109741211, 0.052908031463623044, 0.053087745666503906, 0.05288547134399414, 0.053301921844482424, 0.053162879943847656, 0.05289263916015625, 0.05284662246704101, 0.05414374542236328, 0.05294899368286133, 0.052569313049316405, 0.05262771224975586, 0.05240886306762695, 0.052485504150390626, 0.05254412841796875, 0.05263257598876953, 0.052499454498291014, 0.05249155044555664, 0.05233737564086914, 0.05252505493164063, 0.0524318733215332, 0.05236550521850586, 0.052388481140136715, 0.052510913848876954, 0.05238579177856445, 0.05249955368041992, 0.052640670776367186, 0.05262851333618164, 0.052628448486328125, 0.052531200408935545, 0.05240188980102539, 0.05248886489868164, 0.05242841720581055, 0.0525021743774414, 0.05245708847045898, 0.052437889099121095, 0.0523875846862793, 0.05239004898071289, 0.05247174453735352, 0.052397598266601564, 0.05263407897949219, 0.05268479919433594, 0.052709537506103514, 0.05273110580444336, 0.052757118225097654, 0.05284662246704101, 0.05272777557373047, 0.05285273742675781, 0.052819969177246094, 0.05287526321411133, 0.05270502471923828, 0.0527262077331543, 0.05284828948974609, 0.05285833740234375, 0.05277356719970703, 0.05275033569335937, 0.05284454345703125, 0.05276841735839844, 0.05279328155517578, 0.052689247131347657, 0.05290371322631836, 0.05289334487915039, 0.05288351821899414, 0.05280403137207031, 0.05298543930053711, 0.05292700958251953, 0.05292236709594727, 0.05284454345703125, 0.05281951904296875, 0.05273846435546875, 0.05267609786987305, 0.0540324478149414, 0.053030017852783204, 0.052700031280517576, 0.052625438690185544, 0.05246271896362305, 0.052728031158447264, 0.052510784149169924, 0.05253337478637695, 0.0524312629699707, 0.052565185546875, 0.052407135009765626, 0.05254143905639649, 0.05248819351196289, 0.052614208221435546, 0.05253011322021484, 0.052529312133789065, 0.05250851058959961, 0.05262150573730469, 0.053407550811767575, 0.05294681549072266, 0.05276873779296875, 0.05252316665649414, 0.052590335845947266, 0.05260927963256836, 0.05261916732788086, 0.05253532791137695, 0.05254950332641602, 0.05255311965942383, 0.05251356887817383, 0.05244927978515625, 0.052482048034667966, 0.052857856750488284, 0.052825088500976565, 0.05268479919433594, 0.05275875091552734, 0.05282588958740234, 0.052794559478759766, 0.05295523071289063, 0.05277974319458008, 0.05292851257324219, 0.05275801467895508, 0.052734462738037106, 0.05278310394287109, 0.052667743682861326, 0.0525871696472168, 0.05257033538818359, 0.05259772872924805, 0.052650432586669925, 0.05267526245117188, 0.05264352035522461, 0.05264108657836914, 0.0527367057800293, 0.05272576141357422, 0.05266841506958008, 0.05264096069335938, 0.05272454452514649, 0.05271340942382813, 0.05275859069824219, 0.052870174407958985, 0.05261616134643555, 0.05253324890136719, 0.05254326248168945, 0.0525805778503418, 0.05382627105712891, 0.05268051147460937, 0.052361408233642576, 0.05244927978515625, 0.05232825469970703, 0.052346046447753904, 0.052153022766113284, 0.05219535827636719, 0.05223561477661133, 0.05224534225463867, 0.05209004974365234, 0.05221033477783203, 0.05233427047729492, 0.05227372741699219, 0.05227315139770508, 0.05221376037597656, 0.052125823974609374, 0.05232217788696289, 0.05241759872436524, 0.052746463775634765, 0.052948768615722654, 0.05236601638793945, 0.05230387115478516, 0.052283649444580076, 0.05235302352905274, 0.052393985748291017, 0.05329919815063477, 0.052340896606445315, 0.05222995376586914, 0.052322334289550784, 0.0522507209777832, 0.05236316680908203, 0.05250371170043945, 0.05247881698608398, 0.05246105575561524, 0.052523521423339846, 0.052440608978271484, 0.05264003372192383, 0.0525456657409668, 0.05310060882568359, 0.05266563034057617, 0.052853473663330076, 0.052703231811523435, 0.05266195297241211, 0.05275052642822266, 0.05318576049804687, 0.05264886474609375, 0.05284659194946289, 0.052864574432373045, 0.05289353561401367, 0.05284012985229492, 0.052835487365722654, 0.05287705612182617, 0.05282815933227539, 0.05286707305908203, 0.052805633544921876, 0.05278515243530273, 0.05272934341430664, 0.05279929733276367, 0.05268764877319336, 0.05255574417114258, 0.05261033630371094, 0.05263216018676758, 0.05365142440795898, 0.0527011833190918, 0.05229779052734375, 0.05230732727050781, 0.05231209564208984, 0.052262462615966794, 0.05228643035888672, 0.052359169006347656, 0.05234483337402344, 0.052364288330078126, 0.052232353210449216, 0.0522371826171875, 0.052269023895263673, 0.052426624298095706, 0.05219907379150391, 0.05232073593139648, 0.05227084732055664, 0.052344097137451175, 0.052437984466552734, 0.052457473754882813, 0.05250835037231445, 0.0523526725769043, 0.05238441467285156, 0.05224204635620117, 0.052469215393066405, 0.052370208740234375, 0.05245145416259766, 0.05224566268920899, 0.052281822204589844, 0.05242099380493164, 0.052391937255859375, 0.052340736389160154, 0.05244633483886719, 0.052499393463134765, 0.05254067230224609, 0.05257651138305664, 0.052746688842773434, 0.05264595031738281, 0.05254867172241211, 0.052704193115234374, 0.05276870346069336, 0.05267359924316406, 0.0525035514831543, 0.05257209777832031, 0.05262540817260742, 0.05279334259033203, 0.052617374420166015, 0.05254316711425781, 0.05268204879760742, 0.05252796936035156, 0.05261907196044922, 0.052604225158691405, 0.05266435241699219, 0.052720481872558594, 0.05266390228271484, 0.05270774459838867, 0.05328851318359375, 0.052756927490234376, 0.0526192626953125, 0.052674015045166014, 0.05270991897583008, 0.0526005744934082, 0.052545791625976564, 0.054061344146728516, 0.052746688842773434, 0.052295425415039065, 0.052312255859375, 0.052194847106933596, 0.05242022323608399, 0.052247200012207035, 0.052348129272460936, 0.05230672073364258, 0.052372608184814456, 0.0522322883605957, 0.05240003204345703, 0.05223011016845703, 0.05239104080200195, 0.05229750442504883, 0.0522784652709961, 0.052316638946533205, 0.05219363021850586, 0.052555774688720705, 0.052610271453857424, 0.05254995346069336, 0.0523985595703125, 0.05235059356689453, 0.052357505798339844, 0.0523686408996582, 0.05227315139770508, 0.05247375869750977, 0.05237436676025391, 0.05238547134399414, 0.05233081436157227, 0.05236479949951172, 0.0525497932434082, 0.052545440673828124, 0.052550079345703125, 0.05251881790161133, 0.052549854278564456, 0.052589504241943356, 0.052679359436035154, 0.05257392120361328, 0.0526976318359375, 0.05262851333618164, 0.05259670257568359, 0.052534080505371096, 0.052491775512695314, 0.05252742385864258, 0.05253398513793945, 0.05250831985473633, 0.05252719879150391, 0.0527740478515625, 0.05260515213012695, 0.052683296203613283, 0.05260287857055664, 0.052754432678222656, 0.05252268981933594, 0.05263980865478515, 0.05273379135131836, 0.05282665634155274, 0.05273126220703125, 0.052652542114257815, 0.052655872344970704, 0.05254121780395508, 0.05245180892944336, 0.05256345748901367, 0.05387468719482422, 0.05278841781616211, 0.05241740798950195, 0.05225641632080078, 0.052328510284423826, 0.0530676155090332, 0.05232064056396484, 0.05224857711791992, 0.05220742416381836, 0.0522059211730957, 0.052285057067871094, 0.05229385757446289, 0.05210521697998047, 0.052203712463378904, 0.052272480010986326, 0.05230409622192383, 0.05229593658447266, 0.05234207916259766, 0.05301273727416992, 0.052773311614990236, 0.05277920150756836, 0.05238150405883789, 0.05227481460571289, 0.05221209716796875, 0.05252268981933594, 0.0524147834777832, 0.052335777282714845, 0.052302047729492186, 0.05228607940673828, 0.05229590225219727, 0.052299552917480466, 0.052305374145507816, 0.05243548965454101, 0.052496383666992184, 0.05258601760864258, 0.052528671264648434, 0.052698143005371095, 0.05277999877929687, 0.05259360122680664, 0.0526929931640625, 0.05262745666503906, 0.05265190505981445, 0.05255795288085938, 0.05249008178710937, 0.05255190277099609, 0.05251391983032227, 0.052480831146240234, 0.05254143905639649, 0.05252924728393555, 0.05246464157104492, 0.05246044921875, 0.05257529449462891, 0.05266732788085938, 0.05264579010009766, 0.052604801177978514, 0.05261648178100586, 0.05274832153320313, 0.05289433670043946, 0.05284483337402344, 0.05272108840942383, 0.05252166366577148, 0.05248806381225586, 0.052700382232666015, 0.05394428634643555, 0.05274214553833008, 0.05237680053710937, 0.052268928527832034, 0.05217283248901367, 0.052259872436523434, 0.052313953399658206, 0.052383743286132815, 0.05242038345336914, 0.05231024169921875, 0.05229363250732422, 0.052457473754882813, 0.05236105728149414, 0.052394176483154295, 0.052254688262939455, 0.05233868789672851, 0.05233571243286133, 0.05240063858032227, 0.052574176788330075, 0.052789791107177735, 0.05255158233642578, 0.05246879959106445, 0.052404510498046876, 0.05239260864257812, 0.05255750274658203, 0.052523326873779294, 0.05244313430786133, 0.05255737686157227, 0.052469696044921875, 0.05238211059570313, 0.05259382247924805, 0.05260502243041992, 0.05265903854370117, 0.0527564811706543, 0.05274214553833008, 0.05280339050292969, 0.052815937042236326, 0.05287129592895508, 0.052813793182373045, 0.05294307327270508, 0.05271532821655273, 0.052711166381835935, 0.052733760833740234, 0.05266070556640625, 0.052719776153564456, 0.052741950988769534, 0.052805633544921876, 0.05279852676391601, 0.05297248077392578, 0.054046718597412106, 0.05259632110595703, 0.05264835357666016, 0.052789249420166016, 0.0526376953125, 0.05262099075317383, 0.052729728698730466, 0.052689342498779296, 0.05286876678466797, 0.052703582763671875, 0.05268070220947266, 0.05269913482666016, 0.0527209587097168, 0.05245721435546875, 0.053721408843994144, 0.05282774353027344, 0.05237382507324219, 0.052328639984130856, 0.052363071441650394, 0.052348926544189454, 0.05220761489868164, 0.05227110290527344, 0.05228467178344726, 0.05238800048828125, 0.05220412826538086, 0.05233625411987305, 0.052203136444091795, 0.052257537841796875, 0.05224576187133789, 0.052462337493896484, 0.0522751350402832, 0.052426815032958984, 0.052475296020507815, 0.052620033264160156, 0.052458911895751956, 0.052294078826904296, 0.052227264404296876, 0.05226153564453125, 0.05237571334838867, 0.05240739059448242, 0.05230684661865234, 0.05231820678710938, 0.05256809616088867, 0.052424095153808595, 0.0523309440612793, 0.05245759963989258, 0.05255987167358398, 0.05257830429077148, 0.05264998245239258, 0.05266841506958008, 0.052877311706542966, 0.05290140914916992, 0.052776992797851564, 0.05290140914916992, 0.05284124755859375, 0.052828289031982424, 0.052647937774658204, 0.05275033569335937, 0.05283225631713867, 0.05274137496948242, 0.05275283050537109, 0.052768768310546874, 0.052923969268798825, 0.053176193237304686, 0.05292736053466797, 0.05280972671508789, 0.0530247688293457, 0.05284864044189453, 0.052838401794433595, 0.05308006286621094, 0.05299126434326172, 0.05283023834228515, 0.05285065460205078, 0.05344905471801758, 0.0529224967956543, 0.052894046783447266, 0.05288236618041992]",tokens/s,19.013440044934402,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.5488,576.585728,0.0,174.063616,172.57984,s,1,7.17860302734375,7.17860302734375,0.0,7.17860302734375,7.17860302734375,7.17860302734375,7.17860302734375,[7.17860302734375],,kWh,3.972858779176628e-06,4.3105511939712364e-07,8.930562700043554e-07,5.296970168578106e-06,,MB,1334.030336,652.0832,0.0,234.881024,215.589888,s,24,0.18621270322799682,0.007758862634499868,0.0004447106004053401,0.007625296115875244,0.007956178998947143,0.008353861117362976,0.009400440015792847,"[0.007635647773742676, 0.007561888217926025, 0.0076044158935546875, 0.008420384407043457, 0.007631072044372559, 0.007976895809173584, 0.007536064147949219, 0.007580031871795654, 0.007602719783782959, 0.007631807804107666, 0.007631392002105713, 0.007632800102233887, 0.007698783874511719, 0.007702527999877929, 0.009693183898925782, 0.0075953278541564945, 0.007567455768585205, 0.00752297592163086, 0.007594528198242187, 0.007724544048309326, 0.007618527889251709, 0.007907839775085449, 0.007619520187377929, 0.007522367954254151]",tokens/s,32994.526654163616,kWh,2.2645853893198577e-07,2.497427511463177e-08,1.077548056245077e-07,3.5918761967112524e-07,tokens/kWh,712719442.3749778,MB,1373.646848,654.180352,0.0,236.978176,215.592448,s,24,10.008611206054688,0.4170254669189453,0.005938232338745928,0.4168939666748047,0.42363447570800783,0.4255839813232422,0.4306494128417969,"[0.41041812133789063, 0.41581881713867186, 0.415381591796875, 0.4149851989746094, 0.4224813232421875, 0.4148576354980469, 0.42377093505859376, 0.41901617431640625, 0.41732550048828126, 0.4259039306640625, 0.4164624328613281, 0.43206689453125, 0.41019940185546877, 0.4098469543457031, 0.4233160705566406, 0.4055888977050781, 0.41422329711914063, 0.4071998291015625, 0.4183034973144531, 0.41843682861328124, 0.4186194152832031, 0.42165240478515625, 0.415028564453125, 0.4177074890136719]",tokens/s,151.0699105871271,kWh,1.1623008903234026e-05,1.2818120064003312e-06,4.180159735815889e-06,1.7084980645450244e-05,tokens/kWh,3687449.304590052,,s,1512,9.995854311466214,0.006611014756260726,0.00033837475583547934,0.006558608055114747,0.006696943998336792,0.006797974538803101,0.008763735265731811,"[0.006622496128082275, 0.006661920070648193, 0.006714655876159668, 0.006705984115600586, 0.0066053118705749515, 0.0065437121391296385, 0.006490111827850342, 0.006494207859039307, 0.006526815891265869, 0.006628896236419678, 0.0065071358680725095, 0.006534815788269043, 0.006535776138305664, 0.0066044158935546875, 0.006717760086059571, 0.006729536056518555, 0.00655785608291626, 0.00671065616607666, 0.006587135791778565, 0.006526815891265869, 0.006582143783569336, 0.006500351905822754, 0.00643071985244751, 0.006469567775726318, 0.006385727882385254, 0.0064020161628723146, 0.006467103958129883, 0.006392320156097412, 0.006555647850036621, 0.006377471923828125, 0.0064471039772033695, 0.006403647899627686, 0.006338912010192871, 0.006540736198425293, 0.006371359825134277, 0.006362016201019287, 0.006381279945373535, 0.006339744091033935, 0.00639299201965332, 0.006354944229125976, 0.006439807891845703, 0.006474431991577149, 0.006473855972290039, 0.006470687866210937, 0.0065231361389160155, 0.006378111839294434, 0.0063976640701293944, 0.006332799911499023, 0.006494207859039307, 0.006398240089416504, 0.0064232640266418456, 0.006558559894561768, 0.006572192192077637, 0.00663759994506836, 0.006466784000396728, 0.006476223945617676, 0.006616896152496338, 0.006537504196166992, 0.006464735984802246, 0.006474751949310303, 0.006620160102844238, 0.006654367923736572, 0.006546016216278076, 0.006421088218688965, 0.006492032051086426, 0.006439072132110596, 0.0064357762336730955, 0.006516767978668213, 0.007066559791564941, 0.006455584049224854, 0.006544928073883057, 0.006534463882446289, 0.006386559963226318, 0.006410463809967041, 0.006409152030944824, 0.0064596481323242185, 0.006549759864807129, 0.006441311836242676, 0.0064737281799316405, 0.006387104034423828, 0.006494751930236816, 0.006477280139923096, 0.006406752109527588, 0.006437119960784912, 0.0065493760108947755, 0.006481887817382813, 0.006491424083709717, 0.00651852798461914, 0.0065170879364013674, 0.006508351802825928, 0.006485055923461914, 0.006488031864166259, 0.00643449592590332, 0.006455647945404053, 0.006586016178131104, 0.006475776195526123, 0.006529024124145508, 0.006528736114501953, 0.006485504150390625, 0.006517024040222168, 0.006500864028930664, 0.006464704036712647, 0.006490943908691406, 0.006527135848999024, 0.006536831855773926, 0.00652342414855957, 0.006462240219116211, 0.0065188159942626955, 0.006469823837280273, 0.006565855979919434, 0.006533152103424073, 0.006470335960388184, 0.0065157442092895505, 0.006609888076782227, 0.006463488101959228, 0.006548831939697266, 0.008819040298461913, 0.008895071983337402, 0.006690656185150147, 0.00663267183303833, 0.006525599956512451, 0.00659449577331543, 0.006555712223052979, 0.00670684814453125, 0.00667251205444336, 0.006696671962738037, 0.006492159843444824, 0.006535168170928955, 0.006559743881225586, 0.006489664077758789, 0.0065008001327514645, 0.006479743957519531, 0.006524191856384277, 0.006830944061279297, 0.006524928092956543, 0.006593664169311523, 0.006615968227386475, 0.006668352127075196, 0.006635359764099121, 0.006596223831176758, 0.00649571180343628, 0.006605792045593262, 0.006541600227355957, 0.006448863983154297, 0.00658841609954834, 0.0074670081138610836, 0.006567455768585205, 0.0066358399391174315, 0.006613183975219727, 0.006562911987304688, 0.0066835198402404785, 0.006773856163024903, 0.00660368013381958, 0.006700096130371094, 0.00663647985458374, 0.00660262393951416, 0.006568064212799072, 0.006551551818847656, 0.006580063819885254, 0.006637728214263916, 0.0066501121520996095, 0.006532383918762207, 0.006562272071838379, 0.006580448150634766, 0.006473855972290039, 0.006711167812347412, 0.006502528190612793, 0.0065287680625915525, 0.006555552005767822, 0.006549503803253174, 0.0065443840026855465, 0.006508927822113037, 0.006421120166778565, 0.006518208026885987, 0.006523392200469971, 0.006530943870544433, 0.006543680191040039, 0.00658400011062622, 0.006551743984222412, 0.00654691219329834, 0.006539904117584229, 0.0065251197814941405, 0.006569215774536133, 0.006539807796478271, 0.00657097578048706, 0.006597599983215332, 0.006574399948120117, 0.006528704166412353, 0.006545407772064209, 0.006616479873657226, 0.006576735973358155, 0.0065920958518981936, 0.006477759838104248, 0.006519328117370606, 0.006540480136871338, 0.00656163215637207, 0.006535999774932862, 0.006529471874237061, 0.006590112209320069, 0.006555647850036621, 0.006598656177520752, 0.006610655784606933, 0.006715072154998779, 0.006547071933746338, 0.006622176170349121, 0.006556896209716797, 0.006511648178100586, 0.006510335922241211, 0.006532735824584961, 0.006565567970275879, 0.006601119995117187, 0.006541600227355957, 0.006656000137329102, 0.006625279903411865, 0.0065413122177124024, 0.006612991809844971, 0.006569056034088134, 0.006515327930450439, 0.0065632638931274415, 0.006556672096252441, 0.006547264099121094, 0.006570047855377197, 0.0065924801826477055, 0.006618336200714111, 0.006594336032867432, 0.006547872066497803, 0.006568863868713379, 0.006580063819885254, 0.006587327957153321, 0.006613664150238037, 0.0066003198623657226, 0.006559648036956787, 0.006638304233551025, 0.006618336200714111, 0.0064702401161193844, 0.006666687965393067, 0.006606719970703125, 0.006545279979705811, 0.006705247879028321, 0.006589824199676513, 0.0065213761329650875, 0.006626336097717285, 0.006564832210540771, 0.006582272052764892, 0.006633471965789795, 0.006567615985870362, 0.006581759929656982, 0.006574175834655762, 0.006531680107116699, 0.006568064212799072, 0.006606560230255127, 0.006631328105926514, 0.006529024124145508, 0.0065474557876586915, 0.0065372161865234375, 0.006548768043518067, 0.006679103851318359, 0.006553760051727295, 0.0065814399719238284, 0.006607615947723388, 0.006674655914306641, 0.006633312225341797, 0.0065998082160949705, 0.00666099214553833, 0.0066111359596252445, 0.0066046080589294434, 0.0067696962356567384, 0.006611936092376709, 0.006553088188171387, 0.008470720291137695, 0.0078009281158447265, 0.008415616035461426, 0.006653056144714355, 0.006623936176300049, 0.0066900157928466795, 0.006699903964996338, 0.006729663848876953, 0.0067952961921691896, 0.006593952178955078, 0.006638144016265869, 0.0066143999099731446, 0.006617728233337402, 0.006667647838592529, 0.006670976161956787, 0.006541408061981201, 0.006555071830749512, 0.006466015815734864, 0.00653107213973999, 0.006688767910003662, 0.006604800224304199, 0.006641407966613769, 0.006572288036346435, 0.006477151870727539, 0.006513504028320313, 0.006528831958770752, 0.006529056072235108, 0.006701024055480957, 0.0067276802062988285, 0.006751359939575196, 0.006638751983642578, 0.0067785921096801755, 0.00650438404083252, 0.006622719764709473, 0.006777503967285156, 0.0065207362174987795, 0.006576223850250244, 0.00658784008026123, 0.006629856109619141, 0.006647808074951172, 0.006595776081085205, 0.006607583999633789, 0.00660694408416748, 0.0067760000228881834, 0.006608863830566406, 0.006656864166259766, 0.006632160186767578, 0.006551328182220459, 0.006653952121734619, 0.006574207782745361, 0.006555327892303467, 0.006585984230041504, 0.006523744106292724, 0.0068113598823547365, 0.006586368083953857, 0.0065574078559875485, 0.006607327938079834, 0.0065634241104125975, 0.006612512111663818, 0.006507199764251709, 0.006516736030578613, 0.006510528087615966, 0.00651910400390625, 0.006627071857452393, 0.006522816181182861, 0.006590176105499268, 0.0065865921974182125, 0.006553728103637696, 0.006567935943603515, 0.006513728141784668, 0.006603712081909179, 0.006639200210571289, 0.006495039939880371, 0.006549088001251221, 0.006633471965789795, 0.006606880187988281, 0.006622208118438721, 0.006552703857421875, 0.0064776639938354495, 0.0065474557876586915, 0.006687903881072998, 0.006906015872955322, 0.006617311954498291, 0.006609568119049072, 0.0065066561698913575, 0.006558752059936524, 0.0064999680519104005, 0.006527008056640625, 0.00658735990524292, 0.0066112642288208, 0.006565408229827881, 0.006617248058319092, 0.00651635217666626, 0.006588799953460693, 0.006546879768371582, 0.006480192184448242, 0.00660214376449585, 0.0065155520439147945, 0.006576064109802246, 0.006565951824188233, 0.006523903846740723, 0.006590943813323974, 0.006562335968017578, 0.006498271942138672, 0.0065326719284057614, 0.006662208080291748, 0.006607071876525879, 0.006549088001251221, 0.00655625581741333, 0.006490367889404297, 0.006504447937011719, 0.00655951976776123, 0.006516575813293457, 0.006525472164154052, 0.0064975681304931645, 0.00655622386932373, 0.00654534387588501, 0.006528160095214844, 0.006576352119445801, 0.006529183864593506, 0.006496416091918945, 0.006512063980102539, 0.006490240097045899, 0.006602880001068115, 0.006683328151702881, 0.006518784046173095, 0.006559296131134033, 0.006513088226318359, 0.006481376171112061, 0.006646304130554199, 0.0068055038452148435, 0.007266304016113281, 0.007497632026672364, 0.007421023845672607, 0.007492032051086426, 0.006990496158599853, 0.006861887931823731, 0.006607647895812988, 0.0066146240234375, 0.006577919960021972, 0.006626016139984131, 0.006642975807189941, 0.006527359962463379, 0.006590176105499268, 0.006535808086395264, 0.0065548157691955565, 0.006609055995941162, 0.00658406400680542, 0.006496895790100097, 0.0065660481452941896, 0.006520927906036377, 0.006834303855895996, 0.006518432140350342, 0.006485663890838623, 0.008970848083496094, 0.008851455688476563, 0.0066744318008422855, 0.006569151878356934, 0.00666707181930542, 0.006573887825012207, 0.006537343978881836, 0.0069122881889343265, 0.006653279781341553, 0.006551807880401612, 0.006609344005584717, 0.00662659215927124, 0.00662556791305542, 0.006607391834259033, 0.006520800113677979, 0.00657535982131958, 0.006624800205230713, 0.0065157442092895505, 0.006504703998565673, 0.006557439804077149, 0.006553215980529785, 0.006659488201141358, 0.006611743927001953, 0.006501952171325684, 0.006619520187377929, 0.006648064136505127, 0.006561312198638916, 0.006591008186340332, 0.006608992099761963, 0.006678239822387695, 0.006860928058624267, 0.006738976001739502, 0.006581024169921875, 0.006623424053192138, 0.006812704086303711, 0.0066301760673522945, 0.00669920015335083, 0.006964799880981446, 0.006620704174041748, 0.006615968227386475, 0.006688767910003662, 0.006616799831390381, 0.006622591972351074, 0.0066323518753051754, 0.006700287818908692, 0.00662988805770874, 0.006689087867736816, 0.006660031795501709, 0.006651264190673828, 0.006609055995941162, 0.006615776062011719, 0.00659555196762085, 0.006628128051757812, 0.006574079990386963, 0.0066087360382080075, 0.0066921281814575195, 0.00666918420791626, 0.0067420158386230465, 0.00658841609954834, 0.006506175994873047, 0.00661023998260498, 0.00654204797744751, 0.006582816123962402, 0.0066102719306945805, 0.006576543807983399, 0.006590208053588867, 0.006604159832000732, 0.006510848045349121, 0.0065417919158935545, 0.006588768005371093, 0.006593887805938721, 0.00663804817199707, 0.006659808158874511, 0.00663375997543335, 0.006639616012573242, 0.006887072086334228, 0.0071740798950195315, 0.006689311981201172, 0.006635488033294678, 0.006585440158843994, 0.006624383926391602, 0.006506559848785401, 0.006535039901733398, 0.00667084789276123, 0.006623072147369385, 0.006590208053588867, 0.006562047958374023, 0.0065634560585021975, 0.006532800197601318, 0.006556640148162842, 0.006534912109375, 0.006801248073577881, 0.006621439933776855, 0.006529119968414307, 0.006595903873443603, 0.006584799766540527, 0.006564000129699707, 0.006560991764068604, 0.006640063762664795, 0.0065742721557617186, 0.006612991809844971, 0.00658128023147583, 0.006638271808624268, 0.006574368000030518, 0.006670335769653321, 0.006659167766571045, 0.006765376091003418, 0.006654047966003418, 0.006603936195373535, 0.0066631360054016115, 0.006627200126647949, 0.006557024002075195, 0.006619808197021485, 0.0065474557876586915, 0.006506527900695801, 0.006616479873657226, 0.0064694080352783205, 0.006572832107543945, 0.006590464115142822, 0.0064880638122558594, 0.006551136016845703, 0.006515071868896484, 0.006467455863952636, 0.0067422399520874025, 0.006600639820098877, 0.006500480175018311, 0.006543231964111328, 0.0066007041931152345, 0.00651043176651001, 0.006578335762023926, 0.006526976108551025, 0.00656819200515747, 0.006780672073364258, 0.006516736030578613, 0.00653542423248291, 0.00656876802444458, 0.0065487041473388675, 0.007270112037658692, 0.006731552124023437, 0.006755648136138916, 0.006980512142181396, 0.007049215793609619, 0.0066622400283813474, 0.006536704063415527, 0.006931551933288574, 0.006600416183471679, 0.006542719841003418, 0.006559743881225586, 0.006730495929718017, 0.006572095870971679, 0.0067153282165527345, 0.006756351947784424, 0.008638175964355468, 0.008636704444885254, 0.006620351791381836, 0.006746943950653076, 0.006581823825836181, 0.006627264022827148, 0.006632224082946777, 0.006578112125396728, 0.006594207763671875, 0.00652627182006836, 0.006591296195983887, 0.006549503803253174, 0.006530623912811279, 0.006556128025054932, 0.006580192089080811, 0.006574079990386963, 0.0065862398147583006, 0.006549632072448731, 0.00666431999206543, 0.0065797758102417, 0.006557568073272705, 0.006648255825042725, 0.006574079990386963, 0.006684864044189453, 0.006564735889434814, 0.006581535816192627, 0.006567584037780762, 0.006549791812896728, 0.006544991970062256, 0.006552896022796631, 0.006560768127441406, 0.0065393919944763184, 0.007236544132232666, 0.008462400436401367, 0.007307007789611817, 0.007280767917633057, 0.007117280006408691, 0.006585887908935547, 0.006751039981842041, 0.007096320152282715, 0.0067513279914855956, 0.006556575775146485, 0.006574079990386963, 0.006512639999389648, 0.006602752208709717, 0.006620480060577393, 0.006525631904602051, 0.006619232177734375, 0.0067758398056030275, 0.006500415802001953, 0.0067469758987426754, 0.006616384029388428, 0.006548128128051758, 0.006655136108398438, 0.006642591953277588, 0.006493120193481445, 0.006543360233306885, 0.006723584175109864, 0.006554687976837158, 0.006572991847991943, 0.006569983959197998, 0.00656550407409668, 0.00671782398223877, 0.006819839954376221, 0.006626399993896484, 0.0066119999885559085, 0.006637407779693604, 0.006575935840606689, 0.006627359867095947, 0.006576416015625, 0.006838175773620605, 0.0066455998420715335, 0.006649824142456054, 0.00659065580368042, 0.006721536159515381, 0.006729728221893311, 0.006604127883911133, 0.006845088005065918, 0.006679935932159424, 0.006592351913452149, 0.006630239963531494, 0.006601759910583496, 0.006562719821929932, 0.00656828784942627, 0.006564832210540771, 0.006650559902191162, 0.006617087841033936, 0.006609951972961426, 0.006721727848052979, 0.006587135791778565, 0.006718527793884277, 0.006600831985473633, 0.006658912181854248, 0.00653107213973999, 0.006553823947906494, 0.006573919773101807, 0.006576064109802246, 0.006645247936248779, 0.00658892822265625, 0.0065372161865234375, 0.0066317119598388674, 0.006682112216949463, 0.006512639999389648, 0.006508768081665039, 0.006450623989105225, 0.0064982080459594724, 0.006484896183013916, 0.00646937608718872, 0.006582272052764892, 0.006524960041046143, 0.006541344165802002, 0.006564032077789306, 0.006505856037139893, 0.006494592189788819, 0.006589695930480957, 0.006537375926971435, 0.006572735786437988, 0.006580192089080811, 0.006678400039672852, 0.0065195198059082035, 0.00654860782623291, 0.006508480072021485, 0.006669023990631104, 0.006631616115570068, 0.006526783943176269, 0.00655731201171875, 0.006732160091400146, 0.006508351802825928, 0.0065742721557617186, 0.006539519786834717, 0.006536960124969482, 0.00653926420211792, 0.006473855972290039, 0.006887199878692627, 0.006593664169311523, 0.007564256191253662, 0.009583776473999023, 0.008594079971313477, 0.00906668758392334, 0.008756416320800781, 0.006662975788116455, 0.006694911956787109, 0.006770688056945801, 0.006639616012573242, 0.006696959972381592, 0.006819392204284668, 0.007008512020111084, 0.006684864044189453, 0.006907680034637451, 0.006576352119445801, 0.006649856090545654, 0.009000960350036622, 0.008915167808532715, 0.006633600234985351, 0.006657695770263672, 0.006680575847625733, 0.00659222412109375, 0.006566304206848145, 0.0065327038764953615, 0.0065006399154663086, 0.006696800231933594, 0.006553760051727295, 0.006598688125610351, 0.006564000129699707, 0.006493824005126953, 0.006627871990203858, 0.006583775997161865, 0.0065147199630737306, 0.006617184162139893, 0.006543424129486084, 0.006440959930419922, 0.006463488101959228, 0.006586495876312256, 0.006458816051483155, 0.0065, 0.00650435209274292, 0.006574687957763672, 0.006486303806304931, 0.006452479839324951, 0.006968063831329346, 0.006549632072448731, 0.006518784046173095, 0.006520800113677979, 0.006577888011932373, 0.006479455947875977, 0.006501247882843017, 0.00645027208328247, 0.006493152141571045, 0.006493984222412109, 0.006434815883636475, 0.00649561595916748, 0.006465216159820556, 0.006447328090667725, 0.006502816200256348, 0.00646943998336792, 0.006564064025878906, 0.006508831977844238, 0.006496352195739746, 0.00644268798828125, 0.00644323205947876, 0.0064471039772033695, 0.00659449577331543, 0.006481696128845215, 0.006510240077972412, 0.006487840175628662, 0.006428639888763427, 0.00647052812576294, 0.006496255874633789, 0.006451200008392334, 0.006604800224304199, 0.006569503784179687, 0.00650812816619873, 0.006499551773071289, 0.006456639766693115, 0.006547808170318603, 0.006548927783966064, 0.006484543800354004, 0.0066375679969787596, 0.006463039875030518, 0.006572735786437988, 0.006565087795257568, 0.006453375816345214, 0.006553919792175293, 0.006574207782745361, 0.006448287963867188, 0.006509119987487793, 0.006510848045349121, 0.006514560222625732, 0.006522367954254151, 0.00646617603302002, 0.006494207859039307, 0.006518879890441895, 0.006513919830322266, 0.006526976108551025, 0.00645801591873169, 0.006481919765472412, 0.0065301117897033694, 0.006450111865997314, 0.006486015796661377, 0.006516736030578613, 0.006526976108551025, 0.006553599834442139, 0.00652288007736206, 0.006440959930419922, 0.0064404158592224125, 0.006466464042663574, 0.006635519981384277, 0.006512639999389648, 0.006604320049285889, 0.006467775821685791, 0.006473279953002929, 0.006503136157989502, 0.00651855993270874, 0.006492288112640381, 0.0065680317878723145, 0.006589888095855713, 0.006445631980895996, 0.006475776195526123, 0.006463488101959228, 0.00643071985244751, 0.006467199802398681, 0.006420639991760254, 0.006486207962036133, 0.006497312068939209, 0.006452223777770996, 0.006571424007415771, 0.00648854398727417, 0.0064759039878845214, 0.006512639999389648, 0.006434815883636475, 0.006489151954650879, 0.006603936195373535, 0.006501311779022217, 0.006529888153076172, 0.006594560146331787, 0.006529248237609863, 0.006571807861328125, 0.006488287925720215, 0.006434783935546875, 0.006419519901275635, 0.006370048046112061, 0.006459136009216309, 0.006574463844299316, 0.0065166401863098146, 0.006505824089050293, 0.006555295944213867, 0.006584320068359375, 0.006490623950958252, 0.006467967987060547, 0.006550687789916992, 0.006509503841400146, 0.006426943778991699, 0.0065032958984375, 0.0064663681983947756, 0.006524735927581787, 0.006487648010253907, 0.006472640037536621, 0.006471551895141601, 0.006553376197814941, 0.006413407802581787, 0.0064358081817626955, 0.006395840167999268, 0.006463232040405273, 0.006553631782531738, 0.006424799919128418, 0.006477248191833496, 0.006410016059875488, 0.008606016159057617, 0.006528607845306397, 0.006544320106506347, 0.006433951854705811, 0.006510784149169922, 0.006463136196136475, 0.006521920204162598, 0.006710752010345459, 0.006425055980682373, 0.0065261759757995605, 0.006416416168212891, 0.006479904174804687, 0.006564576148986817, 0.006387712001800537, 0.0064832639694213864, 0.0064058880805969234, 0.006364031791687012, 0.006471519947052002, 0.006528255939483643, 0.00658351993560791, 0.006520576000213623, 0.006588736057281494, 0.0065493440628051755, 0.006539103984832764, 0.006545407772064209, 0.006530079841613769, 0.00648086404800415, 0.006465695858001709, 0.00653718376159668, 0.0064858880043029785, 0.006474080085754394, 0.006464799880981445, 0.006390175819396973, 0.006447072029113769, 0.006405344009399414, 0.0064943041801452635, 0.006496960163116455, 0.006574079990386963, 0.006538976192474366, 0.006560031890869141, 0.00642252779006958, 0.0065064959526062015, 0.006368480205535889, 0.006922976016998291, 0.006471295833587646, 0.006410367965698242, 0.0064629120826721195, 0.006452095985412598, 0.006438752174377441, 0.006475232124328613, 0.006490431785583496, 0.006451136112213135, 0.006468031883239746, 0.0063836159706115725, 0.00642416000366211, 0.006472095966339111, 0.006381696224212647, 0.0070032958984375, 0.008524224281311036, 0.008080960273742676, 0.008549087524414063, 0.01063532829284668, 0.007970751762390137, 0.006400000095367431, 0.006512639999389648, 0.006496255874633789, 0.006442016124725342, 0.006488863945007324, 0.006420095920562744, 0.006371039867401123, 0.006427519798278809, 0.006410399913787842, 0.0064692158699035645, 0.006426015853881836, 0.006386655807495117, 0.006430240154266358, 0.006402368068695068, 0.006500351905822754, 0.006445055961608887, 0.0063836159706115725, 0.0064143362045288085, 0.006352447986602784, 0.006366720199584961, 0.006382175922393799, 0.006436992168426514, 0.0064858880043029785, 0.006441311836242676, 0.0064975681304931645, 0.006495232105255127, 0.006532767772674561, 0.006565248012542725, 0.0064559998512268065, 0.006459392070770263, 0.006491648197174072, 0.006464000225067139, 0.006471680164337158, 0.006471680164337158, 0.006481919765472412, 0.006452832221984863, 0.006386079788208008, 0.006387712001800537, 0.006385503768920898, 0.006349279880523682, 0.00647654390335083, 0.0063640961647033695, 0.006410304069519043, 0.0064019842147827145, 0.006403456211090088, 0.006489727973937988, 0.006360064029693604, 0.006432767868041992, 0.0064143362045288085, 0.006405600070953369, 0.006480415821075439, 0.0065064959526062015, 0.006409599781036377, 0.0064065918922424315, 0.006373568058013916, 0.006397759914398194, 0.00637497615814209, 0.00635148811340332, 0.006391808032989502, 0.006342688083648682, 0.006377439975738525, 0.006385663986206055, 0.00645088005065918, 0.0064737281799316405, 0.006387712001800537, 0.006487648010253907, 0.00650435209274292, 0.006455808162689209, 0.006516448020935059, 0.006459584236145019, 0.006491807937622071, 0.006609344005584717, 0.0064654722213745115, 0.006510655879974365, 0.0065146880149841305, 0.006447135925292969, 0.00663865613937378, 0.006459680080413819, 0.006603392124176025, 0.006546720027923584, 0.006456352233886718, 0.006500192165374756, 0.006514527797698975, 0.0064074559211730955, 0.006490943908691406, 0.0064851841926574704, 0.0065359678268432616, 0.006649792194366455, 0.0064505281448364255, 0.006519455909729004, 0.006709248065948486, 0.008631487846374512, 0.00891817569732666, 0.006674079895019531, 0.0065322241783142086, 0.006590464115142822, 0.006653984069824218, 0.006503263950347901, 0.00646943998336792, 0.006456736087799072, 0.006446944236755371, 0.0065682239532470705, 0.006451648235321045, 0.006456799983978271, 0.006431488037109375, 0.006391808032989502, 0.006471680164337158, 0.006400000095367431, 0.006481311798095703, 0.006473343849182129, 0.006425055980682373, 0.006461952209472656, 0.006545152187347412, 0.006494048118591308, 0.006496543884277344, 0.006492288112640381, 0.0064737281799316405, 0.006500256061553955, 0.006396224021911621, 0.006422304153442383, 0.0064563841819763185, 0.006517504215240479, 0.006474976062774658, 0.006429664134979248, 0.006432864189147949, 0.006417664051055908, 0.006371551990509033, 0.0065446081161499026, 0.00642310380935669, 0.00646943998336792, 0.00649455976486206, 0.006445087909698486, 0.006498400211334228, 0.006515520095825195, 0.006487232208251953, 0.0064852161407470705, 0.006401631832122803, 0.006463935852050781, 0.00658080005645752, 0.006423647880554199, 0.006581056118011475, 0.006461696147918701, 0.00648524808883667, 0.006523392200469971, 0.006436704158782959, 0.006453695774078369, 0.006426400184631348, 0.006548927783966064, 0.006469823837280273, 0.006461184024810791, 0.0064596481323242185, 0.0064412798881530765, 0.006360864162445069, 0.006402431964874267, 0.006395328044891358, 0.006433184146881103, 0.006426112174987793, 0.00643123197555542, 0.006461440086364746, 0.00653926420211792, 0.006436895847320556, 0.00643068790435791, 0.006388895988464356, 0.0064287037849426265, 0.006374207973480225, 0.00637772798538208, 0.006455008029937744, 0.006386784076690674, 0.006409152030944824, 0.006430463790893555, 0.006404064178466797, 0.006435103893280029, 0.006359039783477783, 0.006444831848144531, 0.006452928066253662, 0.006416831970214844, 0.0064672322273254396, 0.006660831928253174, 0.006563072204589844, 0.006522463798522949, 0.006503359794616699, 0.00643833589553833, 0.006399903774261475, 0.00644371223449707, 0.006416192054748535, 0.006373792171478271, 0.006475200176239013, 0.006541247844696045, 0.0064494719505310055, 0.006474016189575195, 0.006488736152648926, 0.006481120109558106, 0.006509344100952149, 0.006506559848785401, 0.006542751789093018, 0.006675072193145752, 0.006502304077148437, 0.00653657579421997, 0.006517375946044922, 0.006477248191833496, 0.006607456207275391, 0.006516191959381103, 0.006531583786010742, 0.006523168087005616, 0.006459167957305908, 0.006518720149993896, 0.006463456153869629, 0.00646457576751709, 0.006443999767303467, 0.006395904064178467, 0.006455264091491699, 0.0065495362281799314, 0.00653926420211792, 0.00655731201171875, 0.006567647933959961, 0.006524608135223388, 0.0066212801933288575, 0.006621600151062012, 0.006610752105712891, 0.006695583820343017, 0.006608895778656006, 0.0065803837776184084, 0.006593472003936768, 0.006525856018066406, 0.006561791896820069, 0.006569983959197998, 0.00654969596862793, 0.006645567893981933, 0.006578432083129883, 0.00648902416229248, 0.0065933442115783695, 0.006481855869293213, 0.006516511917114258, 0.006541600227355957, 0.00652288007736206, 0.006672383785247803, 0.006530816078186035, 0.00644326400756836, 0.00653107213973999, 0.006584256172180176, 0.006544832229614258, 0.0065985918045043945, 0.006598527908325195, 0.0065352959632873535, 0.006749152183532715, 0.006604512214660644, 0.006633471965789795, 0.009072704315185547, 0.008951744079589844, 0.0066471037864685055, 0.00664358377456665, 0.006638400077819824, 0.006602655887603759, 0.006628960132598877, 0.006584832191467285, 0.006567679882049561, 0.006646080017089844, 0.006571839809417725, 0.0065634560585021975, 0.0066179518699646, 0.006749216079711914, 0.006578239917755127, 0.006609568119049072, 0.006604928016662598, 0.006550816059112549, 0.006601215839385987, 0.006633471965789795, 0.0065961918830871585, 0.006673120021820069, 0.006608191967010498, 0.006639264106750488, 0.006665152072906494, 0.006711071968078614, 0.0068626880645751955, 0.006901919841766357, 0.0068178877830505375, 0.006623136043548584, 0.006723584175109864, 0.006757664203643799, 0.006635488033294678, 0.006697375774383545, 0.006669792175292968, 0.006602848052978515, 0.006631616115570068, 0.006607327938079834, 0.006604928016662598, 0.006629280090332031, 0.006585631847381592, 0.006538047790527344, 0.006577888011932373, 0.006578464031219483, 0.0066007041931152345, 0.006625343799591064, 0.006684127807617187, 0.006551807880401612, 0.006526495933532715, 0.006570687770843506, 0.006694431781768799, 0.006572224140167236, 0.006557983875274658, 0.006576128005981445, 0.006615039825439453, 0.006551551818847656, 0.0066061439514160155, 0.006623487949371338, 0.006508063793182373, 0.006606016159057617, 0.006706240177154541, 0.006595200061798096, 0.006694943904876709, 0.00665993595123291, 0.006578688144683838, 0.0071058239936828615, 0.00653926420211792, 0.006502783775329589, 0.006554048061370849, 0.0065779838562011715, 0.006576767921447754, 0.006703104019165039, 0.006620543956756592, 0.00672166395187378, 0.006644000053405761, 0.006692543983459472, 0.006588223934173584, 0.007222015857696534, 0.006698368072509765, 0.006658656120300293, 0.006753376007080078, 0.006736639976501465, 0.006580160140991211, 0.006633120059967041, 0.00660364818572998, 0.006557727813720703, 0.0066230401992797856, 0.006658944129943847, 0.006590464115142822, 0.006669280052185059, 0.006636960029602051, 0.006574912071228027, 0.006602528095245361, 0.006750207901000976, 0.006639520168304444, 0.00667248010635376, 0.006750207901000976, 0.006612991809844971, 0.006631423950195312, 0.006617087841033936, 0.00653107213973999, 0.006692863941192627, 0.0066286721229553225, 0.006617280006408691, 0.00662988805770874, 0.006606847763061524, 0.0065450558662414554, 0.006637983798980713, 0.006584256172180176, 0.006590623855590821, 0.006590303897857666, 0.0065559039115905765, 0.0065586562156677245, 0.006533023834228516, 0.0065033278465271, 0.006569983959197998, 0.00668287992477417, 0.006702847957611084, 0.00668668794631958, 0.00666428804397583, 0.006585311889648437, 0.0066284799575805665, 0.006632991790771484, 0.0065493760108947755, 0.006648255825042725, 0.006592512130737305, 0.006569119930267334, 0.00658515214920044, 0.0068338561058044435, 0.00659830379486084, 0.0066096000671386716, 0.0065912318229675295, 0.006520448207855224, 0.006560128211975097, 0.006488031864166259, 0.006526144027709961, 0.006602719783782959, 0.006576799869537354, 0.006605375766754151, 0.006567584037780762, 0.0065129919052124025, 0.0065915842056274414, 0.006599423885345459, 0.006647615909576416, 0.006711328029632568, 0.006640927791595459, 0.0065764799118041995, 0.006730207920074463, 0.006725503921508789, 0.006615039825439453, 0.0066722240447998045, 0.006785024166107178, 0.008764639854431152, 0.008882464408874512, 0.006681248188018799, 0.00671449613571167, 0.006703264236450195, 0.0066789121627807615, 0.006621056079864502, 0.006667935848236084, 0.006678463935852051, 0.006545375823974609, 0.0066152639389038086, 0.006593440055847168, 0.006538911819458008, 0.006668416023254395, 0.006756351947784424, 0.006559455871582031, 0.006631328105926514, 0.006647456169128418, 0.00653385591506958, 0.006586656093597412, 0.006690656185150147, 0.006613152027130127, 0.006655712127685547, 0.006699007987976074, 0.0066109437942504885, 0.006603871822357178, 0.006607776165008545, 0.00657203197479248, 0.006598336219787598, 0.00650275182723999, 0.006576096057891846, 0.006679679870605469, 0.006564352035522461, 0.006619679927825928, 0.006606688022613525, 0.0065640959739685055, 0.006573887825012207, 0.006543551921844483, 0.006557439804077149, 0.006692863941192627, 0.006564064025878906, 0.006507904052734375, 0.006617216110229493, 0.00661513614654541, 0.006771103858947754, 0.006647808074951172, 0.006629695892333984, 0.0065450558662414554, 0.00665331220626831, 0.0066704959869384765, 0.0066728959083557126, 0.006624576091766358, 0.006605535984039307, 0.006574431896209717, 0.006570943832397461, 0.006548160076141357, 0.006641215801239013, 0.0066154561042785645, 0.006525279998779297, 0.00656934404373169, 0.006575808048248291, 0.006565824031829834, 0.006605152130126953, 0.006596960067749023, 0.006615039825439453, 0.006625376224517822, 0.006603807926177979, 0.0065567679405212406, 0.006655263900756836, 0.0066128320693969725, 0.0065747199058532714, 0.006600800037384033, 0.006516799926757812, 0.006523776054382324, 0.006525023937225342, 0.006446144104003906, 0.0065352959632873535, 0.006563136100769043, 0.0064774718284606935, 0.006506944179534912, 0.006441088199615479, 0.006580063819885254, 0.006561888217926025, 0.006528416156768799, 0.006605247974395752, 0.006609312057495117, 0.006509600162506103, 0.0065198721885681155, 0.0064941120147705075, 0.006561791896820069, 0.006559552192687989, 0.006584479808807373, 0.006723648071289063, 0.0067645120620727535, 0.006600031852722168, 0.006547296047210693, 0.006587200164794922, 0.00655075216293335, 0.006541535854339599, 0.0065349440574646, 0.006506591796875, 0.006511295795440674, 0.0065413122177124024, 0.006598656177520752, 0.006579872131347656, 0.0064430079460144046, 0.006454976081848145, 0.00650486421585083, 0.0065207362174987795, 0.006510655879974365, 0.006594592094421386, 0.00649616003036499, 0.006576096057891846, 0.006586688041687012, 0.006575839996337891, 0.00662883186340332, 0.006635200023651123, 0.006529888153076172, 0.006543007850646973, 0.00648140811920166, 0.006535168170928955, 0.006574944019317627, 0.006565023899078369, 0.0066538238525390625, 0.006640384197235108, 0.006572256088256836, 0.0066109437942504885, 0.006596288204193115, 0.006565248012542725, 0.006693439960479736, 0.006602816104888916, 0.0065129599571228025, 0.0065905280113220215, 0.006555583953857422, 0.006608895778656006, 0.006645343780517578, 0.006605216026306152, 0.006585696220397949, 0.006595392227172852, 0.006535007953643799, 0.006581567764282227, 0.006547167778015137, 0.006553952217102051, 0.006607552051544189, 0.006522560119628906, 0.006516096115112305, 0.006523168087005616, 0.006451807975769043, 0.006522463798522949, 0.0065253438949584965, 0.006585824012756348, 0.006714144229888916, 0.006523871898651123, 0.006505248069763184, 0.008885343551635743, 0.008884672164916993, 0.00654099178314209, 0.006621503829956054, 0.00655353593826294, 0.006519328117370606, 0.006471007823944092, 0.0065419840812683105, 0.0064430079460144046, 0.006487103939056396, 0.006415296077728272, 0.0064345598220825195, 0.0064266881942749024, 0.006385183811187744]",tokens/s,151.2627088077493,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1015, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 840, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 467, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 397, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 252, in _flash_attention_forward attn_output_unpad = flash_attn_varlen_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 1124, in flash_attn_varlen_func return FlashAttnVarlenFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 620, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_varlen_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 90, in _flash_attn_varlen_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.varlen_fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,890.18368,6182.273024,0.0,5779.750912,5773.960192,s,1,7.355791015625,7.355791015625,0.0,7.355791015625,7.355791015625,7.355791015625,7.355791015625,[7.355791015625],,kWh,5.8339473500003195e-06,6.362910557352844e-07,1.883612617992192e-06,8.353851023727796e-06,,MB,1208.87296,6498.942976,0.0,6085.935104,6038.345728,s,10,1.9628923187255858,0.1962892318725586,0.005783484219194861,0.1967867660522461,0.20221447906494142,0.202492440032959,0.20271480880737305,"[0.18078274536132813, 0.19466166687011718, 0.1965543975830078, 0.19538697814941405, 0.19701913452148437, 0.19786326599121093, 0.19961558532714843, 0.2021527099609375, 0.19608543395996095, 0.20277040100097657]",tokens/s,1304.1978796178123,kWh,5.6316638179479325e-06,6.208970028854713e-07,3.7353074754229635e-06,9.987868296256367e-06,tokens/kWh,25631094.88497695,MB,1214.296064,6519.914496,0.0,6106.906624,6086.544896,s,10,15.969459716796875,1.5969459716796874,0.0006870539544739388,1.5971036987304688,1.5976605346679686,1.5977116760253904,1.597752589111328,"[1.595568603515625, 1.5976129150390626, 1.597649169921875, 1.5961444091796875, 1.596425537109375, 1.5967086181640624, 1.5973802490234374, 1.5977628173828125, 1.597169677734375, 1.5970377197265626]",tokens/s,39.45030146119209,kWh,4.678063920496624e-05,5.1593917243430544e-06,3.099160919497794e-05,8.293164012428723e-05,tokens/kWh,759661.8118921046,,s,630,15.967007671356189,0.025344456621200318,0.0003141162814657257,0.025300335884094236,0.025513158607482912,0.025596540832519533,0.027272398796081544,"[0.02710598373413086, 0.026234880447387695, 0.02554265594482422, 0.025235456466674806, 0.025118719100952147, 0.02509619140625, 0.0251014404296875, 0.025078655242919922, 0.0251463680267334, 0.02511769676208496, 0.025065311431884764, 0.02510857582092285, 0.02510771179199219, 0.02509292793273926, 0.025108480453491212, 0.025118688583374023, 0.025157663345336916, 0.025148479461669922, 0.02512326431274414, 0.025113088607788086, 0.025149343490600586, 0.02512086486816406, 0.025124671936035157, 0.025153472900390626, 0.025163040161132813, 0.02516886329650879, 0.025198591232299804, 0.025221120834350585, 0.02527846336364746, 0.025284608840942382, 0.02531328010559082, 0.025235456466674806, 0.02528998374938965, 0.025280927658081053, 0.02528291130065918, 0.02525916862487793, 0.02540127944946289, 0.025436288833618165, 0.025514240264892577, 0.025547487258911133, 0.025519935607910157, 0.025472896575927734, 0.02551308822631836, 0.025445024490356447, 0.025429567337036132, 0.025404191970825194, 0.025360191345214844, 0.025349695205688475, 0.025344640731811523, 0.025397247314453125, 0.025478687286376953, 0.025315807342529296, 0.02533171272277832, 0.025363487243652345, 0.02532406425476074, 0.02530963134765625, 0.025377824783325197, 0.025299936294555663, 0.025366527557373047, 0.025605215072631835, 0.025320352554321288, 0.02531123161315918, 0.02534809684753418, 0.027326879501342775, 0.02624847984313965, 0.025598848342895508, 0.025282560348510744, 0.025143072128295897, 0.025090272903442384, 0.025133056640625, 0.02512895965576172, 0.025118719100952147, 0.02508595275878906, 0.025122304916381837, 0.025058847427368164, 0.025107423782348634, 0.02512281608581543, 0.025163040161132813, 0.025088735580444336, 0.0251329288482666, 0.025188480377197266, 0.02518016052246094, 0.026449792861938475, 0.02523478317260742, 0.02517625617980957, 0.02517452812194824, 0.02519049644470215, 0.025175264358520508, 0.025166624069213866, 0.025162975311279298, 0.025215200424194336, 0.02522480010986328, 0.025233823776245116, 0.02524627113342285, 0.02519590377807617, 0.025233760833740234, 0.025249183654785155, 0.025246591567993165, 0.025268224716186522, 0.025407615661621093, 0.025441247940063475, 0.0255631046295166, 0.025594816207885743, 0.025507776260375977, 0.02547030448913574, 0.025479808807373047, 0.02541168022155762, 0.025415679931640626, 0.025362432479858397, 0.025782047271728517, 0.025501087188720704, 0.025338560104370116, 0.025321407318115233, 0.02535238456726074, 0.02532147216796875, 0.025317375183105468, 0.02532147216796875, 0.02535424041748047, 0.02536857604980469, 0.025395200729370116, 0.02536038398742676, 0.02532921600341797, 0.025412031173706055, 0.025399295806884766, 0.025341951370239257, 0.025337440490722656, 0.027508800506591796, 0.026219648361206056, 0.025625120162963866, 0.025307455062866212, 0.02522710418701172, 0.025187711715698242, 0.025192224502563476, 0.025136127471923828, 0.025166847229003905, 0.025144319534301757, 0.02510374450683594, 0.02511311912536621, 0.02511267280578613, 0.025141248703002928, 0.025157632827758788, 0.02517196846008301, 0.025155328750610353, 0.025151744842529297, 0.025168928146362304, 0.02517910385131836, 0.025243648529052733, 0.025239551544189453, 0.02529689598083496, 0.02521049690246582, 0.025250175476074218, 0.025249792098999024, 0.025200639724731445, 0.025205888748168946, 0.02527702331542969, 0.025286527633666993, 0.02525430488586426, 0.02523971176147461, 0.025257568359375, 0.025230943679809572, 0.02522585678100586, 0.025246976852416992, 0.02536467170715332, 0.025426528930664063, 0.025492511749267577, 0.025480031967163086, 0.025510015487670897, 0.02554444885253906, 0.02550601577758789, 0.025530399322509764, 0.02550160026550293, 0.025460832595825194, 0.025446399688720703, 0.025397247314453125, 0.025380863189697265, 0.025343135833740236, 0.025340768814086916, 0.025409536361694338, 0.02535219192504883, 0.025341951370239257, 0.025395200729370116, 0.025374719619750977, 0.025374624252319337, 0.025423967361450195, 0.025444351196289062, 0.025358335494995117, 0.02535628890991211, 0.025366527557373047, 0.025380607604980468, 0.027598304748535155, 0.026225183486938478, 0.025597951889038087, 0.02532467269897461, 0.02515648078918457, 0.025121824264526367, 0.025098751068115235, 0.025149471282958986, 0.025169439315795898, 0.02511964797973633, 0.02509116744995117, 0.025121503829956055, 0.025110719680786132, 0.025107616424560546, 0.02513702392578125, 0.025082847595214845, 0.025112512588500977, 0.025174079895019533, 0.025197952270507813, 0.025171615600585937, 0.02516067123413086, 0.025208831787109375, 0.025178112030029298, 0.025158912658691406, 0.025197311401367186, 0.025202560424804687, 0.025217119216918944, 0.02522870445251465, 0.02525606346130371, 0.02525391960144043, 0.02524799919128418, 0.025237728118896484, 0.025241439819335937, 0.02522528076171875, 0.02524736022949219, 0.02526051139831543, 0.025390176773071288, 0.02549033546447754, 0.02550783920288086, 0.025479167938232423, 0.02553241539001465, 0.025495552062988282, 0.025433984756469727, 0.025405567169189455, 0.025415679931640626, 0.025384960174560548, 0.02537264060974121, 0.025350175857543945, 0.025333759307861328, 0.02533977508544922, 0.025344255447387696, 0.02535001564025879, 0.025332832336425783, 0.025332639694213867, 0.02534809684753418, 0.025333248138427734, 0.025311744689941407, 0.025308351516723632, 0.025398080825805663, 0.025390239715576173, 0.025384832382202148, 0.02537366485595703, 0.025374719619750977, 0.027237728118896486, 0.02618003273010254, 0.025600191116333007, 0.025335840225219727, 0.025171648025512694, 0.02511417579650879, 0.02514588737487793, 0.025107807159423828, 0.025140064239501953, 0.02510374450683594, 0.025160320281982423, 0.02520627212524414, 0.02521343994140625, 0.025167871475219726, 0.025149471282958986, 0.025114591598510743, 0.025138336181640623, 0.025142112731933595, 0.025126943588256834, 0.025225183486938477, 0.025265151977539063, 0.025230335235595702, 0.02518822479248047, 0.025170047760009764, 0.025223167419433593, 0.025194496154785157, 0.025194496154785157, 0.025250911712646484, 0.025314207077026366, 0.02524515151977539, 0.02532406425476074, 0.025239551544189453, 0.02527743911743164, 0.025264511108398436, 0.025238016128540038, 0.025255327224731446, 0.02539388847351074, 0.025441471099853515, 0.02547999954223633, 0.025572864532470704, 0.02553228759765625, 0.025477760314941405, 0.025483264923095703, 0.02542521667480469, 0.025399200439453123, 0.02531407928466797, 0.02532713508605957, 0.02530073547363281, 0.025327840805053712, 0.025328128814697266, 0.025298944473266603, 0.025362432479858397, 0.025411231994628906, 0.025385311126708984, 0.025368511199951174, 0.025343488693237305, 0.025349727630615236, 0.025352256774902344, 0.02535424041748047, 0.025383840560913085, 0.025345792770385744, 0.025398944854736327, 0.025369184494018555, 0.02728656005859375, 0.026225631713867187, 0.025617536544799806, 0.02532441520690918, 0.02525935935974121, 0.025142976760864258, 0.025111520767211914, 0.025085216522216798, 0.02511907196044922, 0.025096416473388672, 0.025102495193481445, 0.025116672515869142, 0.02511052894592285, 0.025095455169677733, 0.025103071212768554, 0.025153535842895508, 0.02516377639770508, 0.02518124771118164, 0.02521388816833496, 0.025188352584838865, 0.025165824890136718, 0.02515692710876465, 0.025196704864501953, 0.025163616180419922, 0.02518806457519531, 0.025282751083374022, 0.025245792388916017, 0.025208864212036133, 0.025229984283447266, 0.025227264404296876, 0.025235456466674806, 0.025448287963867187, 0.02522332763671875, 0.025267520904541017, 0.02525868797302246, 0.025286272048950197, 0.025313568115234376, 0.025385055541992187, 0.02551126480102539, 0.025542720794677735, 0.025582176208496094, 0.025550111770629883, 0.025510623931884767, 0.025475072860717773, 0.025427679061889648, 0.02543027114868164, 0.025425952911376955, 0.025359647750854492, 0.025371360778808593, 0.025374719619750977, 0.02533580780029297, 0.02533990478515625, 0.025391103744506836, 0.025364479064941405, 0.025339839935302734, 0.025384063720703124, 0.02540230369567871, 0.025391103744506836, 0.025361984252929688, 0.025331615447998047, 0.025341791152954103, 0.02532761573791504, 0.025385663986206054, 0.027403263092041014, 0.026210111618041994, 0.02568544006347656, 0.025346271514892577, 0.02522368049621582, 0.025169919967651368, 0.025126272201538086, 0.025127552032470704, 0.025108480453491212, 0.02512076759338379, 0.025128831863403322, 0.0251060791015625, 0.025156095504760743, 0.025221023559570312, 0.02523347282409668, 0.025161535263061523, 0.02517625617980957, 0.025192447662353516, 0.025163488388061525, 0.025166112899780272, 0.025190399169921874, 0.025231359481811523, 0.025157312393188476, 0.02512928009033203, 0.025255519866943358, 0.025261823654174804, 0.02524211120605469, 0.02519868850708008, 0.025204511642456056, 0.025237791061401366, 0.025243648529052733, 0.025214975357055663, 0.02525388717651367, 0.025295007705688478, 0.02533113670349121, 0.025276832580566407, 0.025341951370239257, 0.02549964714050293, 0.02551308822631836, 0.025485759735107423, 0.02552262306213379, 0.025486719131469725, 0.025455232620239257, 0.02548284721374512, 0.02546646308898926, 0.025438207626342774, 0.025400127410888672, 0.025405439376831054, 0.025421823501586914, 0.025384000778198242, 0.025344959259033205, 0.02539638328552246, 0.025420032501220702, 0.025410112380981446, 0.02538287925720215, 0.025359647750854492, 0.02537142372131348, 0.025364479064941405, 0.02532966423034668, 0.025425920486450194, 0.02539910316467285, 0.025344192504882814, 0.025333120346069337, 0.027189247131347655, 0.02613043212890625, 0.025632032394409178, 0.025352928161621095, 0.025214975357055663, 0.025120607376098635, 0.025104543685913087, 0.025097728729248047, 0.02518272018432617, 0.025100288391113282, 0.025108480453491212, 0.025165279388427733, 0.025202335357666014, 0.025170528411865234, 0.02514352035522461, 0.02513839912414551, 0.025503744125366212, 0.025275136947631834, 0.025204832077026368, 0.025186304092407227, 0.025214975357055663, 0.025247743606567383, 0.025189983367919923, 0.02518671989440918, 0.02520579147338867, 0.02519343948364258, 0.025192447662353516, 0.025181440353393553, 0.025266944885253908, 0.025276416778564452, 0.025233407974243165, 0.025241439819335937, 0.025245023727416993, 0.025237632751464845, 0.025305791854858397, 0.02546892738342285, 0.025556991577148438, 0.025565183639526368, 0.025554943084716796, 0.025556991577148438, 0.02557353591918945, 0.025517343521118164, 0.025502143859863283, 0.025542783737182616, 0.02546073532104492, 0.025411584854125976, 0.025345279693603517, 0.02531609535217285, 0.02536240005493164, 0.025339935302734377, 0.025393152236938478, 0.025398975372314454, 0.02539347267150879, 0.02536857604980469, 0.025382623672485352, 0.025356512069702148, 0.025405120849609376, 0.02535443115234375, 0.025338048934936522, 0.02532352066040039, 0.02530659294128418, 0.02536697578430176, 0.025413728713989257, 0.027490304946899413, 0.026365055084228515, 0.025688959121704102, 0.025372671127319335, 0.025192352294921876, 0.02517616081237793, 0.025100288391113282, 0.02507980728149414, 0.02507529640197754, 0.02511852836608887, 0.02507632064819336, 0.02509823989868164, 0.025174016952514647, 0.025279552459716796, 0.02518931198120117, 0.025124671936035157, 0.02515760040283203, 0.025190624237060547, 0.025169919967651368, 0.02515558433532715, 0.025177183151245116, 0.025146112442016602, 0.025151168823242188, 0.02518400001525879, 0.0252044792175293, 0.025228063583374025, 0.025235488891601564, 0.025202207565307617, 0.025239456176757814, 0.02520751953125, 0.025231359481811523, 0.02529280090332031, 0.025249792098999024, 0.025289823532104492, 0.025297727584838867, 0.025237600326538087, 0.025397247314453125, 0.025434112548828124, 0.02551398468017578, 0.02553558349609375, 0.02553968048095703, 0.02551379203796387, 0.02555904006958008, 0.02546073532104492, 0.025486976623535156, 0.025368959426879882, 0.025366527557373047, 0.025362432479858397, 0.025386240005493162, 0.02532419204711914, 0.02537001609802246, 0.02531193542480469, 0.02538425636291504, 0.025365184783935547, 0.02539904022216797, 0.025377023696899415, 0.025378528594970702, 0.025334047317504882, 0.025333759307861328, 0.025419776916503906, 0.025391103744506836, 0.02538652801513672, 0.025381343841552734, 0.0273255672454834, 0.026249439239501952, 0.025653919219970702, 0.025362432479858397, 0.025200639724731445, 0.02513475227355957, 0.025146751403808593, 0.025066207885742188, 0.025085407257080077, 0.02508060836791992, 0.02510233688354492, 0.025124864578247072, 0.025143295288085937, 0.025169919967651368, 0.025148799896240234, 0.025117311477661133, 0.02511395263671875, 0.025148063659667968, 0.025124864578247072, 0.025181951522827147, 0.025219327926635744, 0.025198591232299804, 0.025187936782836914, 0.025161407470703126, 0.02522700881958008, 0.02520982360839844, 0.025192384719848634, 0.02520275115966797, 0.025255104064941407, 0.025244480133056642, 0.02528665542602539, 0.025210880279541017, 0.0252476806640625, 0.025264192581176757, 0.025229312896728515, 0.025259263992309572, 0.02541632080078125, 0.025512159347534178, 0.025542560577392577, 0.02556460762023926, 0.02553708839416504, 0.025517120361328124, 0.025489824295043945, 0.025461280822753906, 0.025441791534423826, 0.025413503646850587, 0.025420383453369142, 0.02538902473449707, 0.025366432189941408, 0.02534979248046875, 0.025426431655883788, 0.025394336700439453, 0.025362560272216797, 0.02537750434875488, 0.025433120727539064, 0.025357215881347657, 0.02540140724182129, 0.025339807510375977, 0.025411008834838867, 0.025427839279174805, 0.025398048400878906, 0.025378080368041993, 0.025379552841186523]",tokens/s,39.45635982440091,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,890.089472,3361.603584,0.0,2959.081472,2942.567424,s,1,7.48332470703125,7.48332470703125,0.0,7.48332470703125,7.48332470703125,7.48332470703125,7.48332470703125,[7.48332470703125],,kWh,5.939693812471584e-06,6.202933417767656e-07,2.4655575280030373e-06,9.025544682251387e-06,,MB,1306.169344,3556.63872,0.0,3141.533696,3105.830912,s,10,0.2907832317352295,0.029078323173522953,0.0014734068505134553,0.029094096183776855,0.02984312343597412,0.03139293813705444,0.03263278989791871,"[0.03294275283813477, 0.029394079208374023, 0.029498720169067384, 0.02763212776184082, 0.028015167236328124, 0.028924192428588867, 0.027534496307373046, 0.029366783142089844, 0.028210912704467773, 0.029263999938964842]",tokens/s,8803.80888789003,kWh,1.0363963198728796e-06,1.1423835506864908e-07,6.855442836595857e-07,1.836178958601114e-06,tokens/kWh,139419961.65506256,MB,1338.63424,3598.58176,0.0,3183.476736,3163.048448,s,10,10.929469970703124,1.0929469970703125,0.0029309554058756172,1.093070068359375,1.0972176757812502,1.097291943359375,1.097351357421875,"[1.0913719482421875, 1.094571044921875, 1.092044921875, 1.097201171875, 1.0887528076171875, 1.09409521484375, 1.09108056640625, 1.0886890869140624, 1.0942969970703125, 1.0973662109375]",tokens/s,57.64231949845142,kWh,3.186588406720802e-05,3.514398218279867e-06,2.1061750891939394e-05,5.644203317742727e-05,tokens/kWh,1116189.415111209,,s,630,10.927079431533816,0.017344570526244146,0.0002578413138874363,0.017286144256591796,0.017511583900451662,0.017704074478149413,0.018600428123474122,"[0.017333696365356446, 0.017281759262084962, 0.017302848815917968, 0.017285823822021484, 0.017228799819946287, 0.01725289535522461, 0.017549152374267577, 0.017164928436279297, 0.017205024719238283, 0.01732441520690918, 0.01715184020996094, 0.017144927978515623, 0.017195775985717775, 0.017211519241333007, 0.01739369583129883, 0.017276927947998046, 0.017311071395874022, 0.017199775695800782, 0.017337503433227538, 0.017177440643310546, 0.01722572708129883, 0.017164159774780273, 0.01719487953186035, 0.017271039962768554, 0.01721343994140625, 0.01717865562438965, 0.017231840133666992, 0.017227680206298827, 0.017180383682250975, 0.017264768600463866, 0.01743596839904785, 0.0172589111328125, 0.01725289535522461, 0.017224767684936523, 0.017216447830200196, 0.017283071517944337, 0.017243360519409178, 0.01721180725097656, 0.01721945571899414, 0.017227935791015624, 0.01721993637084961, 0.017274560928344725, 0.01727519989013672, 0.017224895477294923, 0.01735353660583496, 0.017780736923217775, 0.01903817558288574, 0.017422111511230468, 0.017830080032348632, 0.017326143264770506, 0.017339712142944337, 0.017308351516723632, 0.017346559524536134, 0.017354751586914064, 0.017367040634155274, 0.01735638427734375, 0.017197471618652344, 0.017250303268432618, 0.017344160079956053, 0.017375616073608397, 0.017405920028686524, 0.017391424179077148, 0.017296960830688476, 0.018595680236816407, 0.017528831481933595, 0.017424448013305664, 0.017610143661499024, 0.01725289535522461, 0.017293312072753905, 0.01729520034790039, 0.017643680572509767, 0.017371231079101563, 0.017325983047485352, 0.017360895156860352, 0.017299455642700197, 0.017254400253295898, 0.017265727996826172, 0.017198015213012695, 0.01715100860595703, 0.017476320266723633, 0.01724435234069824, 0.017243200302124024, 0.01724723243713379, 0.017198528289794922, 0.017261119842529298, 0.0171888313293457, 0.017217567443847656, 0.01737286376953125, 0.017309568405151368, 0.017190784454345704, 0.017374975204467773, 0.017199071884155273, 0.01725116729736328, 0.017188863754272463, 0.01723103904724121, 0.017166912078857424, 0.01718230438232422, 0.01719161605834961, 0.017204704284667968, 0.01712384033203125, 0.01726025581359863, 0.01735628890991211, 0.017949472427368163, 0.017321823120117187, 0.018255231857299804, 0.01742304039001465, 0.017363040924072266, 0.017514495849609374, 0.017858367919921875, 0.017555648803710938, 0.017354751586914064, 0.017204351425170898, 0.017250175476074218, 0.017324159622192383, 0.017333120346069337, 0.017329408645629884, 0.017389631271362303, 0.017283775329589843, 0.017297407150268555, 0.017365087509155275, 0.017661855697631835, 0.017479391098022462, 0.017400096893310547, 0.017326080322265625, 0.017352703094482422, 0.0171909122467041, 0.017736799240112306, 0.017441503524780272, 0.01736307144165039, 0.01758211135864258, 0.017539072036743163, 0.017572864532470703, 0.01740083122253418, 0.01751148796081543, 0.017425344467163085, 0.017442815780639647, 0.017266687393188478, 0.017375232696533204, 0.01723391914367676, 0.017514495849609374, 0.01743824005126953, 0.017256256103515624, 0.01723459243774414, 0.017262304306030273, 0.017170816421508788, 0.01735875129699707, 0.017139711380004884, 0.017237535476684572, 0.017297887802124025, 0.01728486442565918, 0.017251840591430666, 0.017285120010375975, 0.017236991882324217, 0.017233055114746095, 0.017276832580566406, 0.01716703987121582, 0.017254400253295898, 0.01719055938720703, 0.01738979148864746, 0.01718284797668457, 0.01721958351135254, 0.017270784378051757, 0.01751433563232422, 0.017408159255981444, 0.017369087219238282, 0.01723187255859375, 0.01732316780090332, 0.017249120712280273, 0.017252351760864256, 0.0172476806640625, 0.01761542320251465, 0.01726464080810547, 0.017223520278930662, 0.017211551666259765, 0.017391456604003906, 0.017361055374145506, 0.0172359676361084, 0.017184959411621094, 0.017206432342529297, 0.017193632125854494, 0.0174653434753418, 0.01731376075744629, 0.0172108154296875, 0.017392223358154296, 0.017421472549438478, 0.017211679458618165, 0.017271360397338866, 0.01728044891357422, 0.017709632873535157, 0.01743257522583008, 0.017649663925170898, 0.01749305534362793, 0.01756460762023926, 0.017364992141723632, 0.017362943649291994, 0.017329471588134766, 0.017245920181274414, 0.017369216918945312, 0.017455039978027345, 0.017424928665161134, 0.01725062370300293, 0.017360767364501952, 0.017313983917236327, 0.017214624404907227, 0.017438623428344728, 0.017240032196044922, 0.017140768051147462, 0.017198368072509764, 0.01720992088317871, 0.017280160903930666, 0.01717753601074219, 0.017183935165405274, 0.01724028778076172, 0.017362943649291994, 0.017177183151245116, 0.017262592315673828, 0.017200319290161133, 0.01728998374938965, 0.017578048706054686, 0.017211135864257814, 0.017725696563720705, 0.017502208709716797, 0.017341888427734375, 0.017146432876586914, 0.017108991622924806, 0.01724006462097168, 0.017143808364868163, 0.01749331283569336, 0.017281728744506834, 0.017242111206054688, 0.017346559524536134, 0.017295360565185547, 0.017663904190063476, 0.01747337532043457, 0.01715020751953125, 0.017221216201782227, 0.01718272018432617, 0.017205663681030273, 0.017270336151123045, 0.01722617530822754, 0.017625087738037108, 0.017215360641479494, 0.018291839599609373, 0.020232448577880858, 0.018571840286254884, 0.01750239944458008, 0.017554431915283202, 0.017320959091186524, 0.01729046440124512, 0.017263391494750976, 0.017346559524536134, 0.017462783813476563, 0.017479455947875977, 0.01743337631225586, 0.01742255973815918, 0.017441856384277345, 0.017417152404785155, 0.017246208190917968, 0.017219135284423828, 0.01731391906738281, 0.01727110481262207, 0.017338367462158204, 0.017235584259033203, 0.017367424011230467, 0.017158016204833985, 0.017210559844970705, 0.017220544815063476, 0.017221120834350585, 0.017227455139160155, 0.017220672607421876, 0.017216863632202147, 0.01725686454772949, 0.01729254341125488, 0.01723459243774414, 0.01756153678894043, 0.017352256774902344, 0.017275487899780274, 0.017250303268432618, 0.017278976440429687, 0.017294464111328126, 0.017206144332885743, 0.017309696197509765, 0.017180671691894533, 0.01717043113708496, 0.01725347137451172, 0.01717136001586914, 0.017188863754272463, 0.017168512344360353, 0.01729318428039551, 0.01716633605957031, 0.017264575958251954, 0.01718835258483887, 0.01718646430969238, 0.017234848022460936, 0.017389055252075195, 0.0171648006439209, 0.017332223892211913, 0.017301088333129884, 0.01731216049194336, 0.01723391914367676, 0.0172728328704834, 0.01737548828125, 0.017214496612548827, 0.017195743560791017, 0.017259679794311523, 0.017271007537841797, 0.017191551208496095, 0.017251392364501954, 0.017275840759277343, 0.01729555130004883, 0.017135423660278322, 0.017290367126464843, 0.01721228790283203, 0.01718272018432617, 0.017920000076293945, 0.017326112747192382, 0.01747760009765625, 0.017345760345458986, 0.017558271408081055, 0.017301023483276366, 0.017350496292114256, 0.017318431854248046, 0.017309823989868165, 0.017143775939941406, 0.017285152435302733, 0.017250303268432618, 0.017250303268432618, 0.01713968086242676, 0.01721062469482422, 0.017255199432373046, 0.01713283157348633, 0.017420255661010742, 0.017586944580078125, 0.017305599212646485, 0.017374496459960937, 0.01728950309753418, 0.01720364761352539, 0.01721343994140625, 0.01720729637145996, 0.017209344863891602, 0.017274879455566407, 0.017260543823242186, 0.017106847763061525, 0.017320032119750976, 0.017489919662475584, 0.01722777557373047, 0.017254400253295898, 0.01722969627380371, 0.017240192413330076, 0.017298784255981445, 0.017340127944946288, 0.01733024024963379, 0.01742323112487793, 0.017313791275024415, 0.01722777557373047, 0.01726464080810547, 0.01738243293762207, 0.01748681640625, 0.01730316734313965, 0.017246240615844725, 0.01723734474182129, 0.017290048599243164, 0.017356512069702148, 0.017304128646850585, 0.0172010555267334, 0.018876352310180665, 0.017639455795288087, 0.017903648376464843, 0.017494016647338868, 0.017604608535766602, 0.017362943649291994, 0.017804704666137695, 0.017309951782226562, 0.017340768814086915, 0.017713151931762695, 0.017276927947998046, 0.017313791275024415, 0.01734819221496582, 0.017390239715576173, 0.01743235206604004, 0.01732592010498047, 0.017367424011230467, 0.017299455642700197, 0.01760233688354492, 0.017391679763793945, 0.01733238410949707, 0.01725222396850586, 0.017291231155395506, 0.017295520782470705, 0.017224767684936523, 0.017296319961547853, 0.017481056213378907, 0.01729193687438965, 0.01740595245361328, 0.017319936752319336, 0.017356800079345702, 0.01735481643676758, 0.017239967346191407, 0.017174016952514647, 0.01719113540649414, 0.01724006462097168, 0.017256256103515624, 0.017160415649414062, 0.017287200927734375, 0.01716044807434082, 0.01716633605957031, 0.017210592269897462, 0.01721583938598633, 0.017250751495361327, 0.017229536056518554, 0.017295263290405274, 0.017267072677612304, 0.017200639724731445, 0.017177024841308595, 0.017094207763671876, 0.017219839096069337, 0.017164640426635742, 0.017178688049316406, 0.017198879241943358, 0.017250368118286133, 0.018061567306518554, 0.017817344665527344, 0.01733577537536621, 0.01732252883911133, 0.017192960739135742, 0.017329568862915038, 0.017433183670043945, 0.017260543823242186, 0.01714316749572754, 0.018129535675048828, 0.01733737564086914, 0.017202144622802736, 0.017280704498291017, 0.01736729621887207, 0.01719856071472168, 0.017326047897338867, 0.017359167098999023, 0.017273151397705078, 0.01725644874572754, 0.01745715141296387, 0.01718454360961914, 0.017516319274902343, 0.01748249626159668, 0.017234079360961913, 0.017745759963989256, 0.01740611267089844, 0.017464767456054686, 0.017463424682617187, 0.01744326400756836, 0.017297407150268555, 0.017286527633666993, 0.017191200256347655, 0.017190528869628907, 0.017236703872680663, 0.01731977653503418, 0.0171746883392334, 0.017375232696533204, 0.01720524787902832, 0.017182271957397462, 0.017152191162109375, 0.01711087989807129, 0.017226144790649413, 0.01716223907470703, 0.017163743972778322, 0.017187007904052733, 0.017189216613769532, 0.01726464080810547, 0.01716633605957031, 0.017184511184692382, 0.017192991256713867, 0.017188959121704102, 0.01748908805847168, 0.017187776565551757, 0.017180479049682618, 0.017291072845458985, 0.017234272003173828, 0.017207328796386718, 0.01728102493286133, 0.017262592315673828, 0.017246143341064453, 0.017206335067749025, 0.017191232681274413, 0.0172708797454834, 0.017158880233764648, 0.017356672286987306, 0.01718681526184082, 0.01719705581665039, 0.01723936080932617, 0.017229984283447266, 0.017504480361938475, 0.017322303771972657, 0.017212543487548828, 0.017169151306152344, 0.017219711303710937, 0.017320159912109376, 0.01752435111999512, 0.017428640365600587, 0.017298912048339842, 0.01721603202819824, 0.017306751251220703, 0.01726963233947754, 0.017258495330810548, 0.01728883171081543, 0.017286815643310548, 0.017459232330322264, 0.017380704879760744, 0.017316320419311523, 0.01732316780090332, 0.017376256942749024, 0.017229215621948242, 0.017226335525512695, 0.017307647705078123, 0.0173702392578125, 0.01746214485168457, 0.01735807991027832, 0.017373247146606444, 0.01724896049499512, 0.017290367126464843, 0.017339263916015625, 0.017372703552246092, 0.017349088668823242, 0.017427679061889648, 0.01721615982055664, 0.017254528045654298, 0.017285120010375975, 0.01724006462097168, 0.017208831787109375, 0.0173222713470459, 0.017221855163574218, 0.017233024597167967, 0.017200000762939455, 0.01721683120727539, 0.017392160415649414, 0.017241376876831055, 0.017457279205322265, 0.01726335906982422, 0.017330080032348632, 0.017286464691162108, 0.017412895202636718, 0.0172359676361084, 0.017317888259887695, 0.017273984909057617, 0.0171856632232666, 0.017214847564697267, 0.01726268768310547, 0.017478208541870117, 0.019017696380615234, 0.01776006317138672, 0.017354944229125976, 0.01772732734680176, 0.017424543380737303, 0.01726464080810547, 0.017268735885620116, 0.017331775665283204, 0.01732428741455078, 0.017338560104370116, 0.017481056213378907, 0.017187488555908202, 0.017512447357177736, 0.01740185546875, 0.01743788719177246, 0.017357631683349608, 0.01741414451599121, 0.017330175399780275, 0.01735875129699707, 0.017366207122802735, 0.017433631896972657, 0.017697280883789062, 0.018602367401123046, 0.017397727966308594, 0.01735385513305664, 0.017337215423583983, 0.017430368423461913, 0.017333728790283203, 0.017310400009155274, 0.01720524787902832, 0.0172541446685791, 0.017463552474975587, 0.017260543823242186, 0.017165536880493163, 0.01729596710205078, 0.017201343536376954, 0.01841971206665039, 0.017426431655883787, 0.017217536926269532, 0.01723369598388672, 0.017342687606811524, 0.017266687393188478, 0.017293312072753905, 0.01733331108093262, 0.01729206466674805, 0.01714396858215332, 0.017229824066162108, 0.017186336517333985, 0.017297056198120116, 0.017240896224975585, 0.01721855926513672, 0.017861183166503907, 0.01737772750854492, 0.017120384216308595, 0.017193632125854494, 0.017342687606811524, 0.017172479629516603, 0.017536800384521486, 0.017289440155029298, 0.017125375747680666, 0.01764352035522461, 0.01889036750793457, 0.018086271286010744, 0.017302591323852538, 0.017378240585327148, 0.01737932777404785, 0.017149375915527343, 0.017248224258422852, 0.017199712753295897, 0.017139232635498047, 0.01733635139465332, 0.01724575996398926, 0.017296255111694334, 0.017272703170776366, 0.017315967559814453, 0.01727027130126953, 0.017228288650512694, 0.017328128814697266, 0.01732147216796875, 0.017858591079711914, 0.01870604705810547, 0.017437536239624022, 0.017382911682128906, 0.017248191833496095]",tokens/s,57.65493002475303,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 216552 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.143296,1136.525312,0.0,734.0032,709.336064,s,1,7.524234375,7.524234375,0.0,7.524234375,7.524234375,7.524234375,7.524234375,[7.524234375],,kWh,4.582706158309217e-06,4.982187847651207e-07,9.0694516999279e-07,5.9878701130671276e-06,,MB,1242.427392,1283.325952,0.0,870.31808,809.960448,s,18,0.2658582725524902,0.014769904030693901,0.00045475824673768604,0.014645263671874999,0.014809529590606689,0.01518952040672302,0.016325610074996943,"[0.01660963249206543, 0.014620160102844238, 0.014938912391662597, 0.014542336463928223, 0.01455564785003662, 0.014656831741333008, 0.014579456329345703, 0.014633024215698242, 0.014754079818725585, 0.014612799644470214, 0.014633055686950683, 0.014734432220458985, 0.014633695602416992, 0.014696576118469239, 0.014663104057312012, 0.01459552001953125, 0.01472003173828125, 0.014678976058959962]",tokens/s,17332.54322221706,kWh,5.868924519995836e-07,6.471882335564632e-08,3.858983453092603e-07,1.0375096206644901e-06,tokens/kWh,246744699.90556866,MB,1253.9904,1316.880384,0.0,905.969664,809.963008,s,18,9.790983459472654,0.5439435255262586,0.007869681854468749,0.5458271484374999,0.5507809570312501,0.5541415954589843,0.5580780749511718,"[0.5497128295898438, 0.5488601684570312, 0.5332852783203125, 0.5261934204101563, 0.5308587036132812, 0.544200439453125, 0.5451922607421875, 0.5590621948242187, 0.5446378173828125, 0.5401543579101562, 0.5352294921875, 0.5453418579101562, 0.5475609130859375, 0.5468157348632813, 0.5470938110351562, 0.5463124389648437, 0.5532732543945312, 0.547198486328125]",tokens/s,115.82084728198261,kWh,1.529923519405285e-05,1.6869534665076098e-06,7.103347412164465e-06,2.4089536072724916e-05,tokens/kWh,2615243.3907322516,,s,1134,9.782486338615426,0.008626531162800192,0.0002151725687559929,0.00863212823867798,0.008787362861633301,0.00887495527267456,0.009488454685211183,"[0.00839305591583252, 0.008549951553344726, 0.008573375701904296, 0.008495103836059571, 0.008584256172180177, 0.008524736404418946, 0.008488960266113281, 0.008674495697021485, 0.00855123233795166, 0.008581215858459473, 0.008525376319885253, 0.00865724754333496, 0.008642560005187988, 0.009073727607727051, 0.008704959869384766, 0.008694879531860352, 0.008666015625, 0.008785823822021484, 0.008703968048095704, 0.008718463897705078, 0.008642720222473145, 0.008675168037414551, 0.008622079849243165, 0.008665087699890137, 0.009041600227355957, 0.0088722562789917, 0.008736767768859864, 0.008676447868347169, 0.008628383636474609, 0.008570943832397461, 0.008536383628845215, 0.008607168197631836, 0.00873363208770752, 0.00868518352508545, 0.008707743644714355, 0.008714976310729981, 0.008830431938171387, 0.008752863883972168, 0.008717120170593262, 0.00871401596069336, 0.008792096138000489, 0.008798399925231933, 0.00880844783782959, 0.008859487533569336, 0.008759455680847168, 0.008725791931152345, 0.008671968460083009, 0.008683520317077637, 0.00872822380065918, 0.008716640472412109, 0.008746560096740724, 0.00903212833404541, 0.008706048011779785, 0.008625887870788574, 0.008835424423217773, 0.008683584213256835, 0.008765312194824218, 0.008708127975463867, 0.008676544189453125, 0.008700703620910645, 0.008792063713073731, 0.00991436767578125, 0.00870809555053711, 0.008609503746032714, 0.008887519836425781, 0.00871072006225586, 0.008696031570434571, 0.00870809555053711, 0.008701248168945313, 0.008655296325683594, 0.008788224220275878, 0.008734623908996582, 0.008757344245910645, 0.008758496284484864, 0.00877455997467041, 0.008767359733581543, 0.008681471824645997, 0.008673279762268067, 0.008640512466430664, 0.008667136192321777, 0.008650752067565918, 0.008759296417236329, 0.008672320365905761, 0.009146783828735352, 0.008798624038696289, 0.008746496200561523, 0.008675168037414551, 0.008673215866088867, 0.008661855697631836, 0.00866431999206543, 0.008567392349243164, 0.008562848091125489, 0.00871014404296875, 0.008728575706481934, 0.008622079849243165, 0.008679424285888672, 0.008749055862426757, 0.008845312118530273, 0.008988672256469727, 0.009076224327087403, 0.008692064285278321, 0.008722751617431641, 0.00879190444946289, 0.008681280136108398, 0.008637696266174317, 0.008628479957580567, 0.008657567977905274, 0.008749088287353516, 0.008642560005187988, 0.00862822437286377, 0.008626175880432128, 0.008583168029785156, 0.008652895927429198, 0.008558496475219727, 0.008648736000061035, 0.008595423698425293, 0.008570591926574707, 0.00862236785888672, 0.008671232223510742, 0.00869375991821289, 0.00862236785888672, 0.008658592224121093, 0.008843263626098634, 0.008593471527099609, 0.008658111572265625, 0.008653632164001465, 0.008439423561096192, 0.008624223709106446, 0.008755583763122559, 0.008917119979858399, 0.008589280128479005, 0.008597536087036133, 0.00865056037902832, 0.008626367568969727, 0.008646656036376953, 0.008589311599731446, 0.008591232299804687, 0.00853551959991455, 0.008567456245422364, 0.008861696243286133, 0.008597503662109375, 0.00853929615020752, 0.008511391639709473, 0.008567520141601562, 0.008610015869140624, 0.008574239730834961, 0.008565471649169923, 0.008705535888671874, 0.008560735702514649, 0.008537568092346192, 0.008474687576293945, 0.008473471641540527, 0.008458304405212402, 0.00857596778869629, 0.008530912399291992, 0.008538111686706543, 0.008509247779846192, 0.008511679649353027, 0.00849126434326172, 0.008490752220153809, 0.008414496421813964, 0.008399583816528321, 0.008325119972229005, 0.008437760353088379, 0.008357888221740722, 0.008304736137390138, 0.008307807922363282, 0.008290623664855957, 0.008337920188903808, 0.008384511947631837, 0.008283231735229492, 0.008237983703613281, 0.008281824111938476, 0.008286175727844238, 0.008319328308105469, 0.008275936126708985, 0.008811871528625488, 0.008359871864318847, 0.008233152389526367, 0.008260128021240234, 0.00823087978363037, 0.008216927528381348, 0.008314175605773926, 0.008221119880676269, 0.008302528381347657, 0.008226816177368163, 0.008220128059387207, 0.00824169635772705, 0.008239104270935058, 0.008194047927856446, 0.008263680458068847, 0.008265503883361816, 0.008253215789794922, 0.008243647575378418, 0.008255071640014648, 0.008231328010559083, 0.008210559844970704, 0.008193920135498047, 0.008194144248962403, 0.008142751693725586, 0.008198111534118653, 0.008183839797973633, 0.008207615852355956, 0.008184576034545898, 0.008197407722473144, 0.008223072052001953, 0.008439552307128907, 0.008259807586669922, 0.008346015930175782, 0.008377504348754884, 0.008450367927551269, 0.008269696235656739, 0.008210944175720216, 0.008269439697265625, 0.008264224052429199, 0.008532095909118652, 0.008296319961547852, 0.008308095932006836, 0.008276608467102051, 0.008273920059204102, 0.00823904037475586, 0.008458304405212402, 0.008476672172546386, 0.008415231704711914, 0.008562687873840333, 0.008385984420776368, 0.008321599960327148, 0.008645952224731445, 0.008399552345275878, 0.008365856170654298, 0.008421055793762207, 0.00828707218170166, 0.00833299160003662, 0.008278016090393067, 0.008290335655212403, 0.0083372163772583, 0.008288415908813476, 0.00850716781616211, 0.00834563159942627, 0.008392704010009766, 0.008396032333374024, 0.00900806427001953, 0.0088156156539917, 0.008545280456542969, 0.008422816276550293, 0.008333248138427734, 0.008389311790466309, 0.008330719947814941, 0.008440320014953612, 0.008335359573364258, 0.008480768203735351, 0.008407039642333984, 0.008229087829589844, 0.00845241641998291, 0.008361984252929687, 0.008347647666931152, 0.008248767852783202, 0.008368191719055175, 0.008466943740844727, 0.008470527648925781, 0.008511072158813476, 0.008500767707824706, 0.008543423652648926, 0.008614720344543457, 0.008573216438293457, 0.008653408050537109, 0.008480544090270997, 0.008398207664489746, 0.008332127571105958, 0.008335359573364258, 0.008441823959350586, 0.008381823539733886, 0.008426143646240234, 0.008447999954223634, 0.008514816284179688, 0.008428288459777831, 0.008443903923034669, 0.008580800056457519, 0.008458016395568847, 0.008575519561767579, 0.00844159984588623, 0.008382719993591308, 0.008345600128173827, 0.00829967975616455, 0.008317440032958985, 0.008327520370483399, 0.008374591827392579, 0.00834937572479248, 0.008286496162414551, 0.0084169921875, 0.008377440452575683, 0.008408063888549805, 0.008816384315490722, 0.008349023818969726, 0.008344575881958008, 0.008329024314880371, 0.008471551895141602, 0.00837020778656006, 0.008360159873962403, 0.008348416328430176, 0.008411007881164551, 0.008403072357177735, 0.008310784339904785, 0.008369600296020508, 0.008528703689575196, 0.008404735565185548, 0.008333120346069336, 0.008347071647644044, 0.00832588768005371, 0.008318495750427247, 0.008375904083251954, 0.008393600463867187, 0.008361727714538573, 0.008324992179870606, 0.008882335662841797, 0.008501728057861329, 0.008586655616760254, 0.00860547161102295, 0.008625120162963866, 0.008601119995117187, 0.008618304252624512, 0.008689023971557618, 0.008651328086853028, 0.008728575706481934, 0.008753215789794922, 0.008561856269836425, 0.008594240188598633, 0.008542207717895508, 0.008531968116760253, 0.008578559875488282, 0.008606111526489258, 0.008595552444458008, 0.008581119537353516, 0.0085830717086792, 0.00861564826965332, 0.008544639587402343, 0.008860671997070312, 0.008635392189025879, 0.008664704322814942, 0.008778016090393066, 0.008771679878234863, 0.008665056228637695, 0.008657088279724121, 0.008595295906066894, 0.00862822437286377, 0.008808544158935547, 0.008619039535522462, 0.008670080184936523, 0.008677184104919433, 0.008820927619934082, 0.00890060806274414, 0.008671039581298829, 0.008648896217346191, 0.00858521556854248, 0.008589311599731446, 0.008696831703186036, 0.008771583557128907, 0.008641599655151367, 0.008590656280517578, 0.008572735786437989, 0.008536543846130371, 0.00848521614074707, 0.008528191566467285, 0.008730303764343262, 0.008570143699645996, 0.008615776062011719, 0.008589695930480958, 0.00859596824645996, 0.008539327621459961, 0.008661503791809083, 0.008606016159057617, 0.008675328254699707, 0.00858518409729004, 0.008547712326049804, 0.008548064231872559, 0.008501983642578126, 0.008632543563842774, 0.00858521556854248, 0.008395584106445312, 0.008614720344543457, 0.008574239730834961, 0.008553376197814941, 0.00871014404296875, 0.00860979175567627, 0.008553631782531738, 0.008643199920654297, 0.008646880149841309, 0.008691583633422852, 0.008560383796691894, 0.008562687873840333, 0.008618559837341309, 0.008539615631103515, 0.008647007942199707, 0.00862003231048584, 0.00868489646911621, 0.008658783912658691, 0.008512319564819336, 0.008575103759765624, 0.008586239814758301, 0.008468928337097168, 0.008730912208557129, 0.008460448265075684, 0.008497183799743652, 0.008433631896972656, 0.008474687576293945, 0.008974271774291992, 0.008470656394958496, 0.008490880012512208, 0.008474623680114746, 0.00841932773590088, 0.008482943534851075, 0.008507264137268067, 0.00905014419555664, 0.00856879997253418, 0.009584832191467284, 0.009099167823791504, 0.008666048049926758, 0.008593664169311524, 0.008587391853332519, 0.008714624404907226, 0.008673503875732421, 0.008654848098754882, 0.008676351547241211, 0.00867750358581543, 0.00860972785949707, 0.008663999557495116, 0.008554495811462403, 0.008619872093200684, 0.008624287605285644, 0.008630271911621093, 0.008564096450805664, 0.008528512001037597, 0.008581119537353516, 0.00859340763092041, 0.008746815681457519, 0.00887622356414795, 0.008976384162902832, 0.008759296417236329, 0.008763392448425293, 0.008833024024963379, 0.008792032241821289, 0.008505791664123534, 0.008787967681884766, 0.008769536018371582, 0.008701663970947265, 0.00866870403289795, 0.008790111541748047, 0.008882847785949707, 0.009469951629638672, 0.01010051155090332, 0.009614751815795899, 0.009032511711120605, 0.00877670383453369, 0.00880947208404541, 0.008840703964233398, 0.00885977554321289, 0.008751487731933594, 0.008720352172851562, 0.008863648414611817, 0.008737919807434082, 0.00876204776763916, 0.008963680267333985, 0.008769375801086426, 0.008816864013671875, 0.00873459243774414, 0.00869660758972168, 0.008644288063049316, 0.008620448112487793, 0.008890111923217774, 0.008814751625061035, 0.008771583557128907, 0.008811712265014648, 0.009994848251342774, 0.008759008407592774, 0.00870860767364502, 0.008755200386047364, 0.009250816345214843, 0.00903987216949463, 0.010473471641540527, 0.00909721565246582, 0.008712191581726075, 0.008742912292480469, 0.008736703872680664, 0.008726719856262208, 0.008819616317749024, 0.008689984321594239, 0.008690336227416991, 0.008747008323669434, 0.008780832290649415, 0.008776415824890136, 0.008734016418457032, 0.008745920181274415, 0.008702239990234374, 0.008686847686767578, 0.008804287910461426, 0.008845088005065917, 0.008759231567382812, 0.008769984245300294, 0.008673664093017577, 0.008684736251831054, 0.008708928108215332, 0.008663040161132812, 0.00863203239440918, 0.008691231727600097, 0.00843388843536377, 0.008566720008850098, 0.008606111526489258, 0.008631391525268555, 0.008760224342346192, 0.008607999801635742, 0.00864844799041748, 0.008601311683654786, 0.008769344329833985, 0.008629823684692383, 0.008715359687805176, 0.008642368316650391, 0.009059712409973145, 0.008622719764709472, 0.00855235195159912, 0.00856272029876709, 0.008627903938293458, 0.008610176086425782, 0.008586976051330566, 0.008536352157592773, 0.008935423851013183, 0.009055328369140626, 0.009497568130493165, 0.008705599784851074, 0.009554304122924805, 0.00890675163269043, 0.008572928428649903, 0.008684736251831054, 0.008598336219787598, 0.008529151916503906, 0.008461055755615234, 0.008691712379455567, 0.008887680053710937, 0.008690303802490234, 0.008740863800048827, 0.009320768356323242, 0.008492351531982422, 0.008522111892700196, 0.008471648216247558, 0.008453023910522462, 0.008544256210327148, 0.008522080421447755, 0.008468128204345704, 0.00851353645324707, 0.008523776054382324, 0.00845193576812744, 0.008496512413024902, 0.008467231750488281, 0.008488960266113281, 0.008486144065856934, 0.008417823791503905, 0.008464608192443848, 0.008491007804870606, 0.008382080078125, 0.008490528106689453, 0.0085283203125, 0.00849516773223877, 0.00856220817565918, 0.008695808410644532, 0.008554783821105957, 0.008552672386169434, 0.008520095825195313, 0.008565983772277831, 0.008495327949523925, 0.00864508819580078, 0.008525823593139649, 0.008577024459838867, 0.00859340763092041, 0.009076736450195312, 0.008670975685119629, 0.008569055557250977, 0.008728608131408692, 0.008544511795043945, 0.008543999671936035, 0.008609631538391112, 0.008821151733398437, 0.008496319770812989, 0.008568639755249024, 0.008567520141601562, 0.00859273624420166, 0.008721247673034668, 0.00852121639251709, 0.00850550365447998, 0.008512960433959962, 0.008495776176452637, 0.008515680313110351, 0.00849846363067627, 0.008613632202148438, 0.008496095657348633, 0.008382464408874512, 0.008456192016601562, 0.008488320350646972, 0.008475263595581055, 0.008406911849975586, 0.008424639701843262, 0.008501888275146485, 0.008495424270629883, 0.008619839668273926, 0.008587455749511719, 0.008566911697387696, 0.008575936317443848, 0.008623040199279786, 0.008715744018554688, 0.009085087776184083, 0.008517760276794434, 0.008516063690185547, 0.008572704315185547, 0.008582943916320801, 0.008683391571044922, 0.008570879936218261, 0.008568639755249024, 0.008591391563415528, 0.008569343566894531, 0.00849715232849121, 0.008483936309814453, 0.008490143775939941, 0.008543999671936035, 0.008572928428649903, 0.008511487960815429, 0.008454143524169922, 0.008425567626953125, 0.008558496475219727, 0.008500896453857421, 0.008622431755065918, 0.00852400016784668, 0.008476320266723633, 0.008305919647216798, 0.00857487964630127, 0.008464703559875488, 0.00848249626159668, 0.008465279579162597, 0.008409343719482421, 0.008412896156311035, 0.008445504188537598, 0.008356287956237793, 0.008396800041198731, 0.008400896072387695, 0.008417280197143554, 0.008396256446838378, 0.008343135833740235, 0.008373184204101563, 0.008323231697082519, 0.008319904327392578, 0.008461312294006347, 0.008325056076049804, 0.008267775535583496, 0.008283295631408692, 0.008268351554870606, 0.008304800033569336, 0.00830678367614746, 0.008312864303588867, 0.008351743698120117, 0.008351807594299316, 0.008349632263183594, 0.008357088088989257, 0.008358495712280273, 0.008405023574829102, 0.008587231636047363, 0.008530464172363281, 0.008392191886901856, 0.008501407623291015, 0.008435711860656739, 0.008520863533020019, 0.008559071540832519, 0.008503583908081055, 0.008538047790527343, 0.00862019157409668, 0.008632575988769531, 0.008666848182678223, 0.00867740821838379, 0.008568479537963867, 0.008507488250732421, 0.008440064430236817, 0.008482848167419434, 0.008455167770385743, 0.008450655937194825, 0.008499199867248536, 0.008527775764465333, 0.008582655906677245, 0.008855615615844727, 0.00871491241455078, 0.00852403163909912, 0.008596832275390626, 0.008585472106933594, 0.008632736206054687, 0.008775679588317872, 0.009004351615905761, 0.008911359786987304, 0.008845503807067872, 0.00858521556854248, 0.008730624198913574, 0.008680576324462891, 0.008657792091369628, 0.00860540771484375, 0.00860745620727539, 0.008659520149230958, 0.008630271911621093, 0.008646656036376953, 0.008709216117858886, 0.008665023803710937, 0.00868233585357666, 0.008734848022460938, 0.008665087699890137, 0.008599007606506347, 0.008657440185546875, 0.008689151763916016, 0.00867788791656494, 0.008676575660705567, 0.008674079895019532, 0.00869696044921875, 0.008778079986572266, 0.00867302417755127, 0.008649503707885742, 0.008652128219604493, 0.008648799896240235, 0.00866755199432373, 0.008646816253662109, 0.008601471900939942, 0.008666784286499023, 0.008640640258789063, 0.008724831581115722, 0.008890368461608887, 0.008802207946777343, 0.008656191825866699, 0.00869593620300293, 0.00864633560180664, 0.008674271583557129, 0.008572928428649903, 0.008679424285888672, 0.008656703948974609, 0.008656895637512207, 0.00866528034210205, 0.008677375793457032, 0.008681119918823242, 0.00870025634765625, 0.00852566432952881, 0.008549856185913087, 0.008680031776428223, 0.00864470386505127, 0.008630271911621093, 0.009064000129699707, 0.008524224281311036, 0.00854748821258545, 0.008492064476013184, 0.00850710391998291, 0.008539392471313476, 0.008517855644226075, 0.008615615844726563, 0.008497119903564453, 0.008647263526916504, 0.008524191856384278, 0.008464351654052734, 0.00843558406829834, 0.00858521556854248, 0.008570879936218261, 0.008627360343933105, 0.00860860824584961, 0.008544032096862792, 0.008556768417358399, 0.009021183967590333, 0.00867142391204834, 0.008630335807800293, 0.00869536018371582, 0.008577471733093261, 0.008623935699462891, 0.008630399703979491, 0.008689727783203125, 0.008649888038635254, 0.008711008071899414, 0.008691712379455567, 0.008666303634643555, 0.008636575698852539, 0.008735360145568848, 0.008687616348266602, 0.008697983741760253, 0.008632224082946777, 0.008768927574157715, 0.009025247573852539, 0.00871718406677246, 0.008749055862426757, 0.008636351585388183, 0.008590527534484863, 0.00862502384185791, 0.008728287696838378, 0.00873686408996582, 0.008687040328979493, 0.008685471534729004, 0.008631135940551758, 0.00857907199859619, 0.008607744216918945, 0.008671199798583985, 0.008723551750183106, 0.00870905590057373, 0.00862435245513916, 0.008574975967407226, 0.00858191967010498, 0.008565759658813477, 0.008839167594909669, 0.008656607627868653, 0.009003007888793945, 0.008714688301086427, 0.008648544311523438, 0.00884067153930664, 0.008725024223327637, 0.008730400085449218, 0.008724703788757324, 0.0086560640335083, 0.008734975814819336, 0.008622591972351074, 0.008632384300231933, 0.008874272346496582, 0.008707807540893555, 0.008693568229675293, 0.008677568435668945, 0.008744992256164551, 0.00847599983215332, 0.00870252799987793, 0.008671327590942383, 0.008740575790405273, 0.008710432052612304, 0.008711199760437011, 0.008694751739501953, 0.008699456214904784, 0.008747103691101075, 0.008722528457641602, 0.00874931240081787, 0.008714240074157715, 0.0086725435256958, 0.00871116828918457, 0.008711999893188476, 0.00875273609161377, 0.008785951614379882, 0.008857888221740723, 0.008816639900207519, 0.008740032196044922, 0.008731295585632324, 0.008732831954956055, 0.008769536018371582, 0.008697855949401855, 0.008663040161132812, 0.008681376457214356, 0.008669280052185058, 0.008645855903625488, 0.008720735549926757, 0.008653247833251953, 0.008675552368164063, 0.008681247711181641, 0.00861695957183838, 0.00882960033416748, 0.00869820785522461, 0.008642560005187988, 0.008664128303527831, 0.009044927597045898, 0.008591360092163085, 0.008662752151489258, 0.008663328170776367, 0.008736127853393555, 0.008663007736206055, 0.008567456245422364, 0.008546527862548827, 0.008615712165832519, 0.008639776229858398, 0.008577759742736817, 0.00859939193725586, 0.00854751968383789, 0.00860262393951416, 0.008722304344177246, 0.008656831741333007, 0.008671232223510742, 0.008607359886169433, 0.00854911994934082, 0.008603584289550782, 0.008554335594177245, 0.00859340763092041, 0.008577024459838867, 0.008533408164978027, 0.008521920204162597, 0.008581536293029785, 0.008445216178894042, 0.008637151718139648, 0.008597503662109375, 0.008644607543945313, 0.008738816261291504, 0.008603455543518067, 0.008614080429077148, 0.008615872383117677, 0.008638527870178223, 0.008610912322998047, 0.008745599746704101, 0.008673312187194825, 0.008599807739257812, 0.008644607543945313, 0.00866425609588623, 0.008571968078613282, 0.008663007736206055, 0.00865839958190918, 0.008695839881896972, 0.008655263900756835, 0.008609663963317872, 0.00864252758026123, 0.008564607620239258, 0.008621952056884766, 0.008562975883483887, 0.008668479919433593, 0.008692416191101074, 0.008665087699890137, 0.00860364818572998, 0.008850624084472656, 0.008565823554992675, 0.008605152130126954, 0.008694047927856446, 0.008666848182678223, 0.008581119537353516, 0.008620160102844238, 0.008605855941772461, 0.009000864028930664, 0.008738911628723145, 0.008580160140991212, 0.008760064125061036, 0.008781472206115722, 0.008723072052001953, 0.008810400009155273, 0.008767264366149902, 0.008683135986328125, 0.008680031776428223, 0.008666367530822753, 0.008729344367980956, 0.00868883228302002, 0.008706879615783692, 0.008665247917175293, 0.008804191589355468, 0.00862934398651123, 0.008666015625, 0.008800479888916015, 0.008654591560363769, 0.008755264282226562, 0.008835040092468261, 0.008701120376586914, 0.008741696357727051, 0.008734880447387695, 0.00869155216217041, 0.008515583992004394, 0.008775679588317872, 0.008695808410644532, 0.008644384384155273, 0.008644831657409669, 0.008663328170776367, 0.008652064323425293, 0.00871673583984375, 0.008613887786865235, 0.00860979175567627, 0.008674431800842285, 0.008657631874084472, 0.008680959701538087, 0.008665760040283204, 0.008572192192077636, 0.008550911903381348, 0.008664959907531739, 0.008636768341064454, 0.008644607543945313, 0.008761247634887696, 0.00862012767791748, 0.008548352241516113, 0.0086080961227417, 0.00911251163482666, 0.008650752067565918, 0.008640992164611816, 0.008710623741149903, 0.008627103805541993, 0.008582015991210938, 0.008581119537353516, 0.00857907199859619, 0.008653823852539062, 0.008685824394226074, 0.008618816375732422, 0.00859126377105713, 0.008525152206420899, 0.00857910442352295, 0.008569503784179687, 0.008654848098754882, 0.008583040237426757, 0.008578207969665528, 0.008586175918579101, 0.008556575775146484, 0.008865792274475098, 0.008672831535339355, 0.008624320030212402, 0.008681728363037109, 0.008597503662109375, 0.00961740779876709, 0.008871616363525391, 0.008683039665222167, 0.008739295959472657, 0.008709792137145997, 0.008651103973388672, 0.008593728065490722, 0.0086179838180542, 0.00862822437286377, 0.00866329574584961, 0.008581119537353516, 0.00855628776550293, 0.008554495811462403, 0.008652288436889649, 0.008748928070068359, 0.01022969627380371, 0.010399744033813477, 0.010291135787963868, 0.008769472122192383, 0.008791520118713378, 0.008641471862792969, 0.0086364164352417, 0.00876524829864502, 0.008714431762695313, 0.008677375793457032, 0.008748736381530762, 0.00864902400970459, 0.0086527681350708, 0.008622112274169922, 0.008614975929260254, 0.008731231689453126, 0.008735039710998534, 0.008775232315063476, 0.00865328025817871, 0.008792415618896484, 0.008724127769470215, 0.008712415695190429, 0.008628000259399414, 0.008635968208312987, 0.008607904434204101, 0.008601856231689454, 0.008618207931518554, 0.008629823684692383, 0.008607808113098145, 0.008622271537780762, 0.008757247924804687, 0.008714176177978516, 0.008680800437927245, 0.0086943359375, 0.008597503662109375, 0.008616095542907714, 0.00886291217803955, 0.008735136032104492, 0.008702207565307617, 0.008703488349914551, 0.008576704025268554, 0.00866547203063965, 0.008591168403625489, 0.008654751777648927, 0.008648927688598634, 0.008673184394836426, 0.008718463897705078, 0.008684160232543946, 0.00880031967163086, 0.008738752365112304, 0.008750176429748536, 0.008861599922180175, 0.008685919761657715, 0.008686240196228028, 0.008648736000061035, 0.008650431632995606, 0.008898847579956055, 0.00890880012512207, 0.008787967681884766, 0.008719488143920899, 0.008665792465209961, 0.008728416442871093, 0.008675680160522461, 0.008489472389221191, 0.008702336311340332, 0.008668479919433593, 0.008643199920654297, 0.008572064399719238, 0.008601759910583496, 0.008704095840454102, 0.008673248291015625, 0.008676032066345214, 0.008628095626831055, 0.008595583915710449, 0.00870195198059082, 0.008914943695068359, 0.008673279762268067, 0.008691712379455567, 0.008701248168945313, 0.008730719566345215, 0.008748831748962403, 0.008663871765136718, 0.008681471824645997, 0.008697855949401855, 0.00869974422454834, 0.008759455680847168, 0.00871628761291504, 0.008622271537780762, 0.008666943550109864, 0.008695967674255371, 0.008654144287109375, 0.008636096000671386, 0.0086659517288208, 0.008658944129943847, 0.00868556785583496, 0.00871235179901123, 0.00864019203186035, 0.008665247917175293, 0.008679424285888672, 0.008669440269470214, 0.008670111656188965, 0.008690784454345703, 0.008705792427062988, 0.00868671989440918, 0.00875609588623047, 0.008711615562438964, 0.008720383644104004, 0.008620608329772949, 0.0086364164352417, 0.008748031616210938, 0.008674304008483886, 0.00865385627746582, 0.008622431755065918, 0.008679936408996582, 0.008721792221069335, 0.008769856452941895, 0.00868943977355957, 0.008614560127258301, 0.00864793586730957, 0.00868182373046875, 0.008648223876953125, 0.008643263816833497, 0.008699808120727539, 0.008703424453735351, 0.008686431884765625, 0.008622079849243165]",tokens/s,115.92144990007753,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,890.073088,3361.603584,0.0,2959.081472,2942.567424,s,1,7.2142451171875,7.2142451171875,0.0,7.2142451171875,7.2142451171875,7.2142451171875,7.2142451171875,[7.2142451171875],,kWh,5.693647866686054e-06,6.198256191733766e-07,1.8186125659985786e-06,8.13208605185801e-06,,MB,1234.681856,3554.541568,0.0,3141.533696,3105.830912,s,10,2.4492884979248046,0.24492884979248047,0.0015972124399009026,0.2455691375732422,0.24606031646728516,0.24638543624877932,0.24664553207397463,"[0.2408026580810547, 0.2439381103515625, 0.24407405090332032, 0.2457489013671875, 0.24497152709960937, 0.2456297607421875, 0.24591635131835937, 0.24550851440429688, 0.24671055603027345, 0.24598806762695313]",tokens/s,1045.2014951154172,kWh,7.170753921646547e-06,7.905734606851327e-07,4.759062072829105e-06,1.2720389455160785e-05,tokens/kWh,20125169.980242886,MB,1259.999232,3596.484608,0.0,3183.476736,3163.057152,s,10,11.556245117187498,1.15562451171875,0.0026583802286832977,1.1560046997070312,1.158799609375,1.1588767333984373,1.1589384326171874,"[1.1529503173828124, 1.152674072265625, 1.1573560791015625, 1.158953857421875, 1.1546533203125, 1.15409912109375, 1.151314453125, 1.1575909423828126, 1.158782470703125, 1.1578704833984375]",tokens/s,54.51597760443889,kWh,3.40113942971062e-05,3.749308295390254e-06,2.2425709674970824e-05,6.0186412267467285e-05,tokens/kWh,1046747.8892084344,,s,630,11.553842758178707,0.018339432949490017,0.00022066744925829945,0.018288432121276857,0.01850968360900879,0.018639933204650877,0.019212787113189703,"[0.018479103088378905, 0.018522111892700196, 0.018247583389282226, 0.01822105598449707, 0.018153600692749024, 0.018276351928710938, 0.018547712326049806, 0.018418655395507813, 0.018281984329223632, 0.01830121612548828, 0.018198463439941408, 0.018207008361816407, 0.018182144165039063, 0.018435583114624024, 0.018315391540527345, 0.018265695571899415, 0.018616960525512694, 0.018380352020263672, 0.0182479362487793, 0.01864249610900879, 0.018281248092651366, 0.018397184371948243, 0.018331647872924805, 0.018212608337402344, 0.018358463287353514, 0.018217023849487306, 0.018301023483276366, 0.01823753547668457, 0.01828022384643555, 0.018274623870849608, 0.018384096145629882, 0.01828505516052246, 0.018199775695800783, 0.018244384765625, 0.018289920806884765, 0.018252288818359375, 0.018264320373535155, 0.018378335952758788, 0.018385311126708985, 0.018286624908447267, 0.018261983871459962, 0.018274303436279296, 0.01827840042114258, 0.01820899200439453, 0.018163103103637696, 0.01818867111206055, 0.018298240661621095, 0.01833024024963379, 0.01819647979736328, 0.018241792678833007, 0.018245344161987306, 0.01819241523742676, 0.018234655380249022, 0.018185184478759765, 0.01865513610839844, 0.018337215423583984, 0.018325023651123047, 0.018170751571655274, 0.018182239532470702, 0.018175872802734375, 0.018165504455566406, 0.01832374382019043, 0.01827840042114258, 0.01907711982727051, 0.01862860870361328, 0.01860940742492676, 0.018343967437744142, 0.018217695236206054, 0.018218015670776366, 0.018671615600585938, 0.01820483207702637, 0.018330432891845702, 0.018135040283203126, 0.01818009567260742, 0.01817807960510254, 0.018654848098754884, 0.018265600204467772, 0.018379615783691405, 0.01823539161682129, 0.018183231353759766, 0.018494400024414062, 0.01840332794189453, 0.018405376434326173, 0.018286880493164064, 0.018244415283203124, 0.018172832489013673, 0.01820262336730957, 0.018280448913574218, 0.01812895965576172, 0.01812268829345703, 0.018192319869995116, 0.01818844795227051, 0.01825155258178711, 0.018319488525390625, 0.018241535186767577, 0.018550111770629884, 0.018194368362426758, 0.01828681564331055, 0.018341535568237304, 0.018207584381103516, 0.018228448867797852, 0.01816160011291504, 0.01832192039489746, 0.0181824951171875, 0.01829478454589844, 0.018146623611450197, 0.018227903366088868, 0.018124191284179688, 0.018258527755737306, 0.018173152923583985, 0.01817884826660156, 0.018122112274169922, 0.018350719451904297, 0.018269216537475586, 0.01822015953063965, 0.018509664535522462, 0.018423807144165038, 0.018192575454711913, 0.01824348831176758, 0.018173791885375976, 0.01815135955810547, 0.018263872146606446, 0.01838483238220215, 0.01818867111206055, 0.01840332794189453, 0.018214879989624025, 0.01965225601196289, 0.018534784317016603, 0.018490623474121094, 0.01848784065246582, 0.01826665687561035, 0.018228992462158203, 0.01815750312805176, 0.01822105598449707, 0.018392255783081055, 0.018320192337036134, 0.018176000595092775, 0.01866864013671875, 0.018434656143188476, 0.018350400924682618, 0.018485151290893554, 0.01830512046813965, 0.018370559692382812, 0.018257919311523436, 0.01827987289428711, 0.0185533447265625, 0.018251840591430663, 0.018305023193359374, 0.01827020835876465, 0.018313152313232422, 0.018343231201171876, 0.018467584609985353, 0.01830860710144043, 0.019081727981567383, 0.018505727767944336, 0.018425247192382813, 0.018358047485351563, 0.018248512268066407, 0.018313215255737304, 0.018224159240722657, 0.018409696578979493, 0.018406143188476564, 0.01856697654724121, 0.018346176147460938, 0.018559200286865234, 0.018749280929565428, 0.018374591827392577, 0.018435232162475584, 0.018392127990722658, 0.01826793670654297, 0.018398784637451173, 0.018309568405151366, 0.01832569694519043, 0.018196384429931642, 0.018189823150634766, 0.0181760311126709, 0.018452735900878908, 0.018323616027832033, 0.01829270362854004, 0.01813827133178711, 0.018179967880249025, 0.018213632583618165, 0.01821023941040039, 0.01815376091003418, 0.01816422462463379, 0.01821900749206543, 0.01821696090698242, 0.018151424407958985, 0.018259967803955078, 0.01873302459716797, 0.018313056945800783, 0.01824166488647461, 0.018292448043823243, 0.018114336013793947, 0.018298847198486328, 0.018344383239746093, 0.018216320037841797, 0.01822591972351074, 0.018272607803344727, 0.01859452819824219, 0.019100608825683593, 0.01877952003479004, 0.018291328430175783, 0.018290687561035156, 0.018362367630004883, 0.01817580795288086, 0.01827449607849121, 0.018382272720336913, 0.018436031341552736, 0.018355871200561525, 0.018125568389892578, 0.0183055362701416, 0.01827974319458008, 0.01814569664001465, 0.018095775604248045, 0.018137439727783203, 0.018216672897338866, 0.01875119972229004, 0.02043961524963379, 0.01836390495300293, 0.018332160949707032, 0.018247264862060547, 0.018454944610595703, 0.01817388725280762, 0.018200639724731446, 0.0181362247467041, 0.018150239944458007, 0.01813199996948242, 0.018259967803955078, 0.018197471618652344, 0.018218751907348632, 0.018277631759643555, 0.0182608642578125, 0.020865407943725586, 0.018343679428100584, 0.018342016220092773, 0.018392959594726564, 0.018294048309326173, 0.018289472579956053, 0.01831235122680664, 0.018203392028808593, 0.01827561569213867, 0.01824959945678711, 0.01824444770812988, 0.018315263748168945, 0.018282815933227538, 0.018334527969360352, 0.018229183197021485, 0.018219968795776368, 0.018675712585449217, 0.018478496551513672, 0.01837321662902832, 0.019398656845092774, 0.018589696884155273, 0.018284543991088868, 0.018291872024536134, 0.018223968505859375, 0.018272512435913085, 0.018530271530151368, 0.01843356704711914, 0.018346399307250977, 0.018990463256835937, 0.018213151931762695, 0.018323295593261717, 0.01824627113342285, 0.018230207443237306, 0.01817888069152832, 0.01828041648864746, 0.018367935180664062, 0.018180639266967773, 0.018310335159301756, 0.018271072387695313, 0.01836412811279297, 0.018227487564086913, 0.01863680076599121, 0.018272127151489258, 0.018175136566162108, 0.01826710319519043, 0.018165567398071288, 0.01823347282409668, 0.018210880279541014, 0.018282495498657226, 0.018272256851196288, 0.01846633529663086, 0.018235872268676758, 0.01820163154602051, 0.01820911979675293, 0.01825436782836914, 0.018281824111938478, 0.01814374351501465, 0.018337215423583984, 0.018350912094116212, 0.018364416122436524, 0.018333215713500977, 0.0183034553527832, 0.018165760040283203, 0.01838275146484375, 0.018393184661865233, 0.01820467185974121, 0.01822105598449707, 0.018251039505004882, 0.01821897506713867, 0.01820572853088379, 0.01815727996826172, 0.018146335601806642, 0.01814790344238281, 0.01823910331726074, 0.01842252731323242, 0.01852630424499512, 0.018484352111816406, 0.018508832931518556, 0.018167583465576172, 0.018313215255737304, 0.018460479736328125, 0.018249919891357422, 0.01923904037475586, 0.01852524757385254, 0.018446783065795898, 0.018442623138427733, 0.018274303436279296, 0.01823481559753418, 0.01830131149291992, 0.018167999267578124, 0.01823468780517578, 0.018417951583862304, 0.01824732780456543, 0.01817795181274414, 0.018379615783691405, 0.018194271087646485, 0.018270368576049804, 0.018208255767822267, 0.018357887268066406, 0.01827315139770508, 0.018296415328979493, 0.018204063415527345, 0.01844326400756836, 0.018223104476928712, 0.018481151580810547, 0.018330783843994142, 0.018254783630371092, 0.018279647827148436, 0.018341760635375976, 0.018205184936523438, 0.01848966407775879, 0.0183023681640625, 0.018280479431152345, 0.018354240417480468, 0.018347648620605467, 0.01826700782775879, 0.01831907272338867, 0.018339616775512695, 0.018509855270385744, 0.018452447891235353, 0.018321184158325194, 0.018445024490356444, 0.018394784927368166, 0.01823369598388672, 0.01830828857421875, 0.0182640323638916, 0.018320575714111328, 0.018124448776245118, 0.018274303436279296, 0.01814681625366211, 0.018387327194213866, 0.01819251251220703, 0.018253311157226563, 0.018131135940551758, 0.018274944305419923, 0.018215744018554688, 0.018242431640625, 0.018431999206542968, 0.018268159866333008, 0.018156831741333007, 0.018133535385131835, 0.018169727325439453, 0.018323776245117187, 0.018122112274169922, 0.018614912033081056, 0.018833087921142577, 0.018403648376464844, 0.018292736053466797, 0.01835126495361328, 0.018322111129760742, 0.018221120834350586, 0.018206815719604492, 0.018163711547851562, 0.018191648483276368, 0.018155391693115235, 0.018195423126220703, 0.01820044708251953, 0.01824687957763672, 0.018273056030273436, 0.018198528289794923, 0.018294048309326173, 0.01839516830444336, 0.01836016082763672, 0.018282943725585938, 0.01833625602722168, 0.018207807540893555, 0.018215776443481446, 0.01822719955444336, 0.018321407318115233, 0.018266111373901366, 0.018224767684936524, 0.018303359985351562, 0.018198463439941408, 0.01827337646484375, 0.018234079360961913, 0.018229503631591797, 0.01831513595581055, 0.01821072006225586, 0.018278112411499025, 0.018210655212402345, 0.01824835205078125, 0.01811984062194824, 0.01824444770812988, 0.01818828773498535, 0.018243200302124025, 0.01821468734741211, 0.01815216064453125, 0.01824345588684082, 0.01832691192626953, 0.018288415908813478, 0.01828950309753418, 0.01829814338684082, 0.018236127853393555, 0.018242816925048828, 0.018336511611938475, 0.018268159866333008, 0.018277759552001952, 0.01828233528137207, 0.01824620819091797, 0.018273792266845702, 0.01841961669921875, 0.018349184036254882, 0.018154176712036132, 0.018312192916870116, 0.01820467185974121, 0.018233343124389647, 0.01848320007324219, 0.018259967803955078, 0.019148511886596678, 0.01851215934753418, 0.018399200439453124, 0.0182807674407959, 0.01837788772583008, 0.018249664306640625, 0.01833843231201172, 0.018190336227416993, 0.01831292724609375, 0.01835651206970215, 0.018233440399169923, 0.018494720458984374, 0.018257696151733397, 0.01823833656311035, 0.018261600494384765, 0.01833193588256836, 0.018472543716430666, 0.019034656524658203, 0.01836031913757324, 0.018484832763671875, 0.01842198371887207, 0.01850592041015625, 0.018343936920166014, 0.018296831130981444, 0.018315231323242188, 0.018251808166503906, 0.018372480392456054, 0.018443424224853514, 0.018328544616699218, 0.01824563217163086, 0.01827849578857422, 0.01838275146484375, 0.01842812728881836, 0.01842767906188965, 0.018259424209594727, 0.018215456008911134, 0.01827168083190918, 0.01835420799255371, 0.018354719161987304, 0.018372608184814454, 0.018249727249145507, 0.018782207489013672, 0.018321311950683594, 0.018505823135375975, 0.018294240951538084, 0.018387487411499023, 0.018341312408447264, 0.018268287658691405, 0.018674112319946288, 0.018257919311523436, 0.018266368865966796, 0.018409215927124023, 0.018339839935302735, 0.018335744857788085, 0.018397087097167968, 0.018323551177978514, 0.018224704742431642, 0.01823583984375, 0.018221376419067382, 0.018267839431762696, 0.018458528518676756, 0.018251871109008787, 0.01833580780029297, 0.018531744003295898, 0.018340192794799804, 0.019412351608276368, 0.01825027275085449, 0.018311519622802735, 0.018406976699829103, 0.01825017547607422, 0.018204992294311523, 0.01824287986755371, 0.018376768112182616, 0.01842857551574707, 0.018408256530761717, 0.01830588722229004, 0.018355871200561525, 0.018305631637573243, 0.01834163284301758, 0.018350080490112306, 0.018814111709594728, 0.018399648666381836, 0.018319711685180665, 0.018275583267211914, 0.018279264450073242, 0.018288000106811524, 0.01842854309082031, 0.018337791442871093, 0.018343711853027345, 0.01821833610534668, 0.018143104553222655, 0.018389856338500977, 0.018242752075195313, 0.01832035255432129, 0.01861561584472656, 0.018424287796020507, 0.01825404739379883, 0.018278656005859376, 0.018195423126220703, 0.01832963180541992, 0.01825868797302246, 0.018282495498657226, 0.018229248046875, 0.0183723201751709, 0.01822870445251465, 0.018286783218383788, 0.01824246406555176, 0.018453535079956056, 0.019060415267944338, 0.018260576248168944, 0.018280256271362306, 0.0183404483795166, 0.018413055419921876, 0.01832931137084961, 0.018209568023681642, 0.018724000930786133, 0.018527072906494142, 0.018525856018066406, 0.018551136016845705, 0.01837286376953125, 0.018897760391235353, 0.018422880172729493, 0.01827724838256836, 0.018620576858520508, 0.018369312286376952, 0.018266111373901366, 0.01925334358215332, 0.01856716728210449, 0.018513919830322266, 0.01843814468383789, 0.01837004852294922, 0.018438655853271483, 0.018356224060058594, 0.01879654312133789, 0.018558879852294922, 0.018603872299194336, 0.018312576293945313, 0.018350976943969727, 0.018404863357543946, 0.018287103652954103, 0.01823744010925293, 0.018315263748168945, 0.018257919311523436, 0.01820876884460449, 0.01830233573913574, 0.018307584762573242, 0.018276384353637695, 0.01824336051940918, 0.018630271911621095, 0.01826652717590332, 0.018215295791625976, 0.018304927825927735, 0.018333696365356447, 0.018276351928710938, 0.01820419120788574, 0.018299360275268555, 0.01822332763671875, 0.01832668876647949, 0.01824371147155762, 0.01825654411315918, 0.018214752197265625, 0.01817977523803711, 0.01829327964782715, 0.0183621768951416, 0.01835977554321289, 0.018352640151977538, 0.018425600051879883, 0.018323711395263672, 0.0183110408782959, 0.01844646453857422, 0.018323360443115236, 0.01828188705444336, 0.01847488021850586, 0.018288448333740236, 0.018275327682495117, 0.018792448043823243, 0.01845039939880371, 0.018423839569091795, 0.01842995262145996, 0.018351232528686524, 0.018321823120117188, 0.018444095611572266, 0.0182893123626709, 0.018280384063720703, 0.018343839645385742, 0.018253664016723632, 0.018302879333496093, 0.01847542381286621, 0.018529760360717774]",tokens/s,54.52731296295658,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.495552,576.585728,0.0,174.063616,172.57984,s,1,7.156181640625,7.156181640625,0.0,7.156181640625,7.156181640625,7.156181640625,7.156181640625,[7.156181640625],,kWh,3.916740741594064e-06,4.2461246951947413e-07,8.936118259944559e-07,5.234965037107994e-06,,MB,1208.475648,647.888896,0.0,234.881024,215.589888,s,29,0.29287353324890136,0.010099087353410394,9.584351436484095e-05,0.01006924819946289,0.01017704963684082,0.010356608009338379,0.010383823204040528,"[0.010386143684387207, 0.010119423866271973, 0.0100382080078125, 0.010377856254577636, 0.010011775970458985, 0.010013567924499511, 0.010018943786621094, 0.010044256210327148, 0.010046272277832032, 0.010084768295288087, 0.010040767669677734, 0.01006553554534912, 0.010102175712585449, 0.010118207931518554, 0.010068991661071777, 0.01002079963684082, 0.010115967750549317, 0.010047391891479492, 0.01005561637878418, 0.010080351829528808, 0.01009347152709961, 0.010324735641479493, 0.01006924819946289, 0.010053088188171386, 0.010105728149414062, 0.010072735786437989, 0.010093759536743165, 0.010063615798950196, 0.010140128135681152]",tokens/s,25348.825199888037,kWh,3.195796379604222e-07,3.5232683455437867e-08,2.1110489779212171e-07,5.659172192079817e-07,tokens/kWh,452362980.50495756,MB,1233.125376,652.0832,0.0,239.075328,215.592448,s,29,9.937518737792969,0.34267305992389546,0.002220787107794558,0.34222491455078125,0.3455348388671875,0.3477741027832031,0.34885826293945316,"[0.3489383544921875, 0.344791748046875, 0.3397941589355469, 0.34003768920898436, 0.3407797546386719, 0.34202984619140625, 0.3409314880371094, 0.3414396667480469, 0.3419881591796875, 0.3410867004394531, 0.34223199462890624, 0.34151693725585935, 0.3426039733886719, 0.34164321899414063, 0.3402996520996094, 0.3416507873535156, 0.34114602661132815, 0.3426809387207031, 0.3429288940429687, 0.34196585083007813, 0.346456787109375, 0.34865231323242185, 0.34439285278320314, 0.34530435180664065, 0.34251788330078126, 0.3424970092773437, 0.3426786804199219, 0.34222491455078125, 0.34230810546875]",tokens/s,183.8487099452514,kWh,9.770422533307052e-06,1.0775193864089368e-06,3.693246884690788e-06,1.4541188804406778e-05,tokens/kWh,4332520.597002877,,s,1827,9.924065372467053,0.0054318912821385,0.0001407195749520907,0.005409215927124024,0.005494783878326416,0.00556069450378418,0.006046374921798707,"[0.005272607803344726, 0.0056432318687438966, 0.005591040134429932, 0.005555424213409424, 0.005577248096466064, 0.005490047931671143, 0.005440544128417968, 0.005453504085540771, 0.00543503999710083, 0.005488895893096924, 0.005447679996490478, 0.005439455986022949, 0.0055155520439147945, 0.005422783851623535, 0.00545414400100708, 0.0054906878471374515, 0.005750688076019287, 0.007095615863800048, 0.0071421761512756345, 0.0072000322341918945, 0.006369344234466553, 0.0054561920166015625, 0.005451327800750733, 0.005426176071166992, 0.005442527770996094, 0.005399360179901123, 0.00536518383026123, 0.0054152002334594725, 0.005556320190429688, 0.005386335849761963, 0.005433440208435059, 0.00540169620513916, 0.005378015995025635, 0.005467072010040284, 0.005373280048370361, 0.005410783767700196, 0.005393087863922119, 0.005371903896331787, 0.005416512012481689, 0.005384511947631836, 0.005400703907012939, 0.0054373440742492675, 0.005380191802978516, 0.005371007919311523, 0.0054301438331604, 0.005355616092681884, 0.005357471942901612, 0.005421216011047363, 0.0053574080467224125, 0.005356768131256104, 0.0054230718612670896, 0.0053686718940734865, 0.0053812160491943355, 0.005497727870941162, 0.005392384052276611, 0.005390175819396973, 0.0053927040100097655, 0.005343071937561035, 0.005377056121826172, 0.005567455768585205, 0.005451680183410645, 0.0054438719749450686, 0.005387616157531738, 0.005120607852935791, 0.005389632225036621, 0.006074880123138428, 0.005375775814056396, 0.005365983963012695, 0.005416255950927734, 0.0053705921173095705, 0.005406688213348389, 0.005387680053710937, 0.005354080200195312, 0.0054061121940612794, 0.00539247989654541, 0.00538812780380249, 0.005434336185455323, 0.005375679969787598, 0.0053821439743042, 0.005420544147491455, 0.0053621759414672855, 0.005378047943115235, 0.005420832157135009, 0.005401855945587159, 0.005364223957061768, 0.005414527893066406, 0.005351967811584472, 0.00539680004119873, 0.00555017614364624, 0.006592415809631348, 0.006451200008392334, 0.006731776237487793, 0.0054098558425903324, 0.0054011201858520505, 0.0054560317993164065, 0.005433599948883056, 0.005440512180328369, 0.005399104118347168, 0.005378079891204834, 0.005440127849578857, 0.005383967876434326, 0.00538812780380249, 0.0054204797744750975, 0.005419104099273681, 0.00538483190536499, 0.005433119773864746, 0.005410272121429444, 0.005393152236938477, 0.005416959762573242, 0.0053637118339538575, 0.005361152172088623, 0.005511680126190186, 0.005375520229339599, 0.005396959781646729, 0.005478559970855713, 0.005377888202667236, 0.005406720161437988, 0.005368127822875977, 0.005377439975738525, 0.005496543884277344, 0.005391039848327637, 0.0053851518630981445, 0.005415840148925781, 0.005354688167572021, 0.0053787841796875, 0.0054141440391540525, 0.005142240047454834, 0.005371903896331787, 0.005360896110534668, 0.0054423041343688965, 0.005381504058837891, 0.005350304126739502, 0.005424863815307617, 0.005373888015747071, 0.00536787223815918, 0.005414271831512451, 0.005339776039123535, 0.00536575984954834, 0.005400703907012939, 0.005370975971221924, 0.00535427188873291, 0.005409887790679932, 0.005361728191375733, 0.005409503936767578, 0.005367167949676514, 0.005341856002807617, 0.005412960052490234, 0.005447679996490478, 0.005352543830871582, 0.005387167930603027, 0.005340479850769043, 0.005361824035644531, 0.005406847953796387, 0.00539689588546753, 0.0053712959289550785, 0.005384384155273437, 0.005360032081604004, 0.005375616073608399, 0.005409152030944824, 0.00536950397491455, 0.00534768009185791, 0.005404799938201904, 0.005357664108276367, 0.005357344150543213, 0.005418591976165772, 0.005368480205535889, 0.005437183856964112, 0.00541103982925415, 0.005351359844207764, 0.005381984233856201, 0.005541056156158447, 0.005368639945983886, 0.005420959949493408, 0.005361375808715821, 0.005398880004882813, 0.005429279804229736, 0.005359327793121338, 0.005372191905975342, 0.005550303936004639, 0.005379936218261719, 0.005357503890991211, 0.00542310380935669, 0.005384191989898681, 0.005363552093505859, 0.005430431842803955, 0.005385216236114502, 0.005384511947631836, 0.005476319789886474, 0.0053955202102661136, 0.005162720203399658, 0.0054403839111328125, 0.0054081277847290036, 0.0054197759628295894, 0.005453695774078369, 0.005381855964660644, 0.005466015815734864, 0.005443967819213867, 0.005398208141326904, 0.005357888221740723, 0.005433343887329102, 0.005383456230163574, 0.005365471839904785, 0.0054448962211608885, 0.005387968063354493, 0.005396575927734375, 0.005402560234069824, 0.005375679969787598, 0.005496640205383301, 0.005380288124084472, 0.0053556480407714845, 0.00539408016204834, 0.0053738560676574705, 0.005364640235900879, 0.005424352169036866, 0.005375967979431152, 0.0053777599334716795, 0.005415328025817871, 0.005353536128997803, 0.005421440124511719, 0.005400383949279785, 0.005351903915405273, 0.005368639945983886, 0.00539737606048584, 0.0053637118339538575, 0.005381631851196289, 0.0054297599792480465, 0.0053637118339538575, 0.005375232219696045, 0.005415584087371826, 0.005345344066619873, 0.005417151927947998, 0.005400415897369385, 0.005404672145843506, 0.0054081277847290036, 0.005397088050842285, 0.0053528637886047364, 0.005410880088806152, 0.0054421119689941405, 0.00536678409576416, 0.005377024173736572, 0.005359615802764893, 0.005398528099060058, 0.00539792013168335, 0.005372831821441651, 0.005348480224609375, 0.005415487766265869, 0.005375391960144043, 0.005382912158966064, 0.005378015995025635, 0.005359039783477783, 0.005366208076477051, 0.005410816192626953, 0.005123968124389648, 0.005355199813842773, 0.005353919982910156, 0.005416895866394043, 0.005353536128997803, 0.005376224040985107, 0.005414527893066406, 0.005350944042205811, 0.005356160163879395, 0.0054514241218566895, 0.005388768196105957, 0.005616608142852783, 0.005423903942108154, 0.00565225601196289, 0.005465568065643311, 0.0056984319686889644, 0.00536352014541626, 0.005389599800109863, 0.005378880023956299, 0.005399968147277832, 0.005396927833557129, 0.005366015911102295, 0.0054414401054382324, 0.00536518383026123, 0.0053678078651428224, 0.0053951039314270016, 0.00537824010848999, 0.00537500810623169, 0.005413663864135742, 0.00535475206375122, 0.005385216236114502, 0.005385568141937256, 0.005400608062744141, 0.005375616073608399, 0.005413631916046143, 0.005378047943115235, 0.005371359825134278, 0.005414495944976807, 0.005446464061737061, 0.0053695359230041505, 0.005425600051879883, 0.0053678078651428224, 0.00541107177734375, 0.005404448032379151, 0.005384160041809082, 0.005419007778167725, 0.005402304172515869, 0.005394271850585938, 0.005412992000579834, 0.005366112232208252, 0.0053821439743042, 0.005405951976776123, 0.005374559879302979, 0.005398687839508057, 0.00551910400390625, 0.005394591808319092, 0.005381760120391846, 0.00539302396774292, 0.005373792171478271, 0.005364895820617676, 0.005416895866394043, 0.0053769278526306155, 0.005415135860443115, 0.00511030387878418, 0.005386303901672363, 0.005390336036682129, 0.005384191989898681, 0.005426976203918457, 0.005392416000366211, 0.005396448135375977, 0.005383456230163574, 0.0053853440284729005, 0.005419072151184082, 0.005385983943939209, 0.00547430419921875, 0.005593183994293213, 0.0053820481300354, 0.005388288021087646, 0.005431007862091064, 0.005375391960144043, 0.005383039951324463, 0.005576032161712647, 0.005611648082733154, 0.005437888145446777, 0.005410912036895752, 0.005396480083465576, 0.005453824043273926, 0.005404928207397461, 0.005398272037506103, 0.00545084810256958, 0.005395359992980957, 0.005399712085723877, 0.005435872077941895, 0.005390719890594482, 0.005346816062927246, 0.005427711963653564, 0.005406720161437988, 0.0055368318557739256, 0.00540342378616333, 0.005376160144805908, 0.005429247856140137, 0.005388063907623291, 0.005392096042633057, 0.0054440641403198245, 0.0053985600471496585, 0.0054906878471374515, 0.005509119987487793, 0.005386240005493164, 0.005395711898803711, 0.005411456108093262, 0.005375872135162354, 0.0053656320571899415, 0.005433440208435059, 0.005538176059722901, 0.005461919784545899, 0.005405888080596923, 0.005378880023956299, 0.005459424018859863, 0.005405216217041015, 0.005398496150970459, 0.005431583881378174, 0.005399295806884766, 0.005397183895111084, 0.005443679809570313, 0.0053846721649169925, 0.005691135883331299, 0.0051019201278686525, 0.005408383846282959, 0.005368031978607178, 0.005359615802764893, 0.005404672145843506, 0.005365056037902832, 0.005413695812225342, 0.00539961576461792, 0.005731135845184326, 0.005449728012084961, 0.005445631980895996, 0.005396480083465576, 0.005439360141754151, 0.0053712639808654785, 0.00539631986618042, 0.0054148478507995606, 0.0053916478157043455, 0.005362592220306397, 0.0054198079109191895, 0.0053821439743042, 0.005393663883209229, 0.005385087966918945, 0.005357439994812012, 0.0053955202102661136, 0.005383103847503662, 0.005375648021697998, 0.005423520088195801, 0.005380127906799316, 0.005375904083251953, 0.0054065918922424315, 0.005383456230163574, 0.005423967838287354, 0.0054345598220825195, 0.005444416046142578, 0.00536575984954834, 0.005402624130249023, 0.0053637118339538575, 0.005386240005493164, 0.005493824005126953, 0.00539353609085083, 0.005591072082519531, 0.005397791862487793, 0.005390848159790039, 0.005420256137847901, 0.005374752044677735, 0.005355519771575928, 0.005416319847106933, 0.00559987211227417, 0.005378047943115235, 0.0054414401054382324, 0.005372255802154541, 0.005377632141113281, 0.005404831886291504, 0.005358975887298584, 0.00536025619506836, 0.0054243202209472655, 0.005440512180328369, 0.005459775924682617, 0.005402527809143067, 0.005376416206359863, 0.005387968063354493, 0.005375360012054443, 0.00538483190536499, 0.005091872215270996, 0.005423200130462646, 0.005395679950714111, 0.005408864021301269, 0.00562275218963623, 0.005498591899871826, 0.005420256137847901, 0.005537983894348145, 0.005421664237976074, 0.005386240005493164, 0.00542310380935669, 0.005379551887512207, 0.005409311771392822, 0.00542310380935669, 0.005367712020874023, 0.00542464017868042, 0.005392992019653321, 0.0053842878341674805, 0.005447519779205322, 0.005388351917266845, 0.005398272037506103, 0.005437600135803223, 0.0053981437683105465, 0.005363327980041504, 0.005417823791503906, 0.005388288021087646, 0.005383679866790772, 0.005442048072814941, 0.005459968090057373, 0.00537395191192627, 0.005510816097259522, 0.005402207851409912, 0.005430016040802002, 0.005402207851409912, 0.00540451192855835, 0.005417183876037597, 0.005411168098449707, 0.005406720161437988, 0.005430560111999511, 0.005511616230010986, 0.005392127990722656, 0.005435935974121094, 0.0053731842041015625, 0.005395008087158203, 0.005447872161865234, 0.005375999927520752, 0.0054271998405456545, 0.0054020161628723145, 0.005378975868225098, 0.005406303882598877, 0.005399744033813477, 0.005403552055358887, 0.005422239780426026, 0.00538259220123291, 0.0053825597763061525, 0.005453824043273926, 0.005385663986206055, 0.005513792037963867, 0.005406303882598877, 0.005388000011444092, 0.005382175922393799, 0.005429728031158447, 0.005369791984558106, 0.005151199817657471, 0.0054917440414428715, 0.005446656227111817, 0.005416255950927734, 0.0055378880500793455, 0.005519040107727051, 0.005380767822265625, 0.005409215927124024, 0.0053860158920288085, 0.005381728172302246, 0.005440063953399658, 0.005377344131469727, 0.005344128131866455, 0.005414720058441162, 0.0053574080467224125, 0.005362880229949951, 0.005413760185241699, 0.005367584228515625, 0.005358975887298584, 0.005399136066436767, 0.005368288040161133, 0.005389632225036621, 0.005408927917480468, 0.005380191802978516, 0.0055606718063354495, 0.005415840148925781, 0.005390528202056885, 0.005451839923858642, 0.005394368171691894, 0.005371903896331787, 0.005417664051055909, 0.005410848140716552, 0.005496831893920898, 0.005433343887329102, 0.00536575984954834, 0.005389984130859375, 0.005417312145233154, 0.005390048027038574, 0.005372255802154541, 0.0054289278984069825, 0.00537395191192627, 0.005433599948883056, 0.005402656078338623, 0.005394207954406738, 0.005455808162689209, 0.005511168003082275, 0.0054366722106933595, 0.0054501757621765136, 0.005396768093109131, 0.00589625597000122, 0.005417183876037597, 0.005422976016998291, 0.005611648082733154, 0.0054069762229919435, 0.005417952060699463, 0.00542790412902832, 0.005378335952758789, 0.005392159938812256, 0.005414944171905517, 0.005380032062530518, 0.005360991954803467, 0.005557087898254395, 0.005387392044067383, 0.005111807823181152, 0.005457280158996582, 0.005413663864135742, 0.005385568141937256, 0.005401088237762451, 0.005405824184417724, 0.005388576030731201, 0.005460576057434082, 0.005390336036682129, 0.005383935928344727, 0.005430975914001465, 0.005392960071563721, 0.005398208141326904, 0.005429887771606445, 0.00538592004776001, 0.005369855880737305, 0.005419104099273681, 0.005391359806060791, 0.005400800228118897, 0.005409471988677979, 0.00537395191192627, 0.0053814401626586916, 0.005509439945220947, 0.005384575843811035, 0.005405951976776123, 0.005389120101928711, 0.0053935999870300295, 0.005413631916046143, 0.005505023956298828, 0.005373119831085205, 0.005438271999359131, 0.00539631986618042, 0.005382527828216552, 0.0054191360473632814, 0.005461120128631592, 0.005419551849365234, 0.005447648048400879, 0.005396512031555176, 0.005425151824951172, 0.005425151824951172, 0.005388288021087646, 0.005422272205352783, 0.00538431978225708, 0.005362368106842041, 0.005412831783294678, 0.005363743782043457, 0.005365888118743897, 0.005401567935943603, 0.005453055858612061, 0.005371583938598633, 0.0054167361259460445, 0.005377439975738525, 0.005344319820404053, 0.005415999889373779, 0.0053582401275634765, 0.005378047943115235, 0.005437727928161621, 0.005588736057281494, 0.005437407970428467, 0.005496831893920898, 0.005371583938598633, 0.005494463920593262, 0.005425792217254638, 0.005112095832824707, 0.00538976001739502, 0.0054380159378051756, 0.0054089918136596676, 0.0060249919891357424, 0.005637728214263916, 0.005392799854278565, 0.005604512214660644, 0.00540553617477417, 0.005395808219909668, 0.005444255828857422, 0.0054019842147827145, 0.005378687858581543, 0.0054776639938354495, 0.005382880210876465, 0.00537395191192627, 0.005431615829467774, 0.005373727798461914, 0.005490464210510254, 0.005408895969390869, 0.005390336036682129, 0.005422560214996338, 0.00545801591873169, 0.00539628791809082, 0.00541270399093628, 0.0053435201644897465, 0.005345791816711426, 0.0054271998405456545, 0.005373248100280762, 0.005391263961791992, 0.005429247856140137, 0.005377823829650879, 0.0053918719291687015, 0.005421472072601318, 0.005600639820098877, 0.005417695999145508, 0.005419072151184082, 0.005388351917266845, 0.005416800022125244, 0.00543120002746582, 0.005390463829040527, 0.005429471969604492, 0.005379871845245361, 0.005365888118743897, 0.005425024032592774, 0.005404416084289551, 0.00537824010848999, 0.005410880088806152, 0.0053637118339538575, 0.005388288021087646, 0.005421055793762207, 0.0054126400947570805, 0.005406655788421631, 0.005452064037322998, 0.005461440086364746, 0.005471936225891113, 0.005501823902130127, 0.005380095958709717, 0.00543891191482544, 0.005382719993591308, 0.005477695941925049, 0.005417119979858398, 0.005389056205749512, 0.005116352081298828, 0.005378399848937988, 0.005424511909484863, 0.005384384155273437, 0.005386528015136719, 0.005419167995452881, 0.005370048046112061, 0.005384191989898681, 0.005417952060699463, 0.005372896194458008, 0.005383743762969971, 0.005464128017425537, 0.0053678078651428224, 0.00541926383972168, 0.005486591815948487, 0.005380095958709717, 0.0054336638450622554, 0.005430496215820312, 0.0053928961753845215, 0.00546127986907959, 0.0053974719047546384, 0.005361375808715821, 0.0055584959983825686, 0.005367424011230469, 0.005372064113616943, 0.005399807929992676, 0.005413631916046143, 0.005368896007537842, 0.005413824081420898, 0.005383327960968017, 0.005407584190368652, 0.005392384052276611, 0.005377888202667236, 0.005405983924865723, 0.005394720077514649, 0.005411424160003662, 0.005453824043273926, 0.005414432048797608, 0.00539081621170044, 0.005473919868469238, 0.005398528099060058, 0.005386623859405517, 0.005723455905914307, 0.005401279926300049, 0.0054570560455322265, 0.005407584190368652, 0.005400576114654541, 0.005447679996490478, 0.005400479793548584, 0.00539247989654541, 0.005456064224243164, 0.005434656143188476, 0.005459839820861816, 0.0054925761222839355, 0.005389120101928711, 0.005380032062530518, 0.005440959930419922, 0.005487232208251953, 0.005441408157348632, 0.005445663928985596, 0.005375807762145996, 0.005435232162475586, 0.0053796801567077635, 0.005109824180603027, 0.005372608184814453, 0.005426623821258545, 0.005378719806671143, 0.005351424217224121, 0.005434432029724121, 0.005366528034210205, 0.005381984233856201, 0.006535520076751709, 0.005421055793762207, 0.005431136131286621, 0.005398687839508057, 0.005456160068511963, 0.005441504001617432, 0.005379839897155762, 0.00537772798538208, 0.005427743911743164, 0.005363071918487549, 0.005462431907653808, 0.005422560214996338, 0.005376543998718262, 0.005431007862091064, 0.005388576030731201, 0.005373280048370361, 0.005415584087371826, 0.005373727798461914, 0.005498528003692627, 0.005534048080444336, 0.0053946561813354495, 0.005380224227905273, 0.005452000141143799, 0.00540227222442627, 0.005396480083465576, 0.005429247856140137, 0.0053673281669616695, 0.005372384071350098, 0.005437439918518067, 0.005432479858398438, 0.005442399978637696, 0.005394495964050293, 0.005348480224609375, 0.005409599781036377, 0.005371103763580323, 0.005368607997894287, 0.005435391902923584, 0.005389696121215821, 0.0056797761917114255, 0.005422272205352783, 0.00539247989654541, 0.005486303806304931, 0.0054585919380187985, 0.005585216045379639, 0.005463136196136475, 0.005384799957275391, 0.005467584133148193, 0.005419904232025146, 0.005425055980682373, 0.005384096145629883, 0.005425504207611084, 0.00538812780380249, 0.0053637118339538575, 0.0054436478614807125, 0.005399519920349121, 0.00514035177230835, 0.005410975933074951, 0.00541539192199707, 0.005394336223602295, 0.005359231948852539, 0.005437983989715576, 0.005391488075256348, 0.0054048957824707035, 0.00549129581451416, 0.0054579200744628905, 0.005457664012908935, 0.0054167361259460445, 0.005411295890808105, 0.00543552017211914, 0.005375616073608399, 0.005383840084075928, 0.005435999870300293, 0.0053821439743042, 0.005395840167999268, 0.005447391986846924, 0.005401055812835693, 0.005401088237762451, 0.005416063785552978, 0.005372735977172851, 0.005402624130249023, 0.005455872058868408, 0.005404448032379151, 0.005426752090454102, 0.005382336139678955, 0.005370528221130371, 0.005417888164520264, 0.005387392044067383, 0.005377823829650879, 0.005408768177032471, 0.005398655891418457, 0.005368735790252686, 0.005421599864959717, 0.0053818879127502444, 0.0055730237960815426, 0.00541315221786499, 0.005399680137634278, 0.005460351943969727, 0.00542300796508789, 0.005420991897583008, 0.005496607780456543, 0.005972479820251465, 0.005413248062133789, 0.005412864208221436, 0.0053944320678710935, 0.005417151927947998, 0.005400415897369385, 0.005482463836669922, 0.005418655872344971, 0.005386144161224365, 0.0053569917678833005, 0.005406911849975586, 0.005358399868011475, 0.005379903793334961, 0.005429247856140137, 0.005385727882385254, 0.005362368106842041, 0.005419072151184082, 0.00536956787109375, 0.005101344108581543, 0.0053654398918151856, 0.005407264232635498, 0.00548035192489624, 0.005351583957672119, 0.00538431978225708, 0.005365568161010742, 0.005359744071960449, 0.005408639907836914, 0.005359712123870849, 0.005347104072570801, 0.005480607986450195, 0.005428415775299072, 0.005395071983337402, 0.0053864002227783205, 0.00537500810623169, 0.0054159040451049805, 0.005378335952758789, 0.005354207992553711, 0.005417247772216797, 0.005353951930999756, 0.005374207973480225, 0.005409023761749268, 0.005373151779174805, 0.005392864227294922, 0.0054293122291564945, 0.005380191802978516, 0.005379360198974609, 0.0054230718612670896, 0.005364511966705322, 0.005357439994812012, 0.005476352214813233, 0.00552345609664917, 0.005429535865783691, 0.005400320053100586, 0.005388256072998047, 0.00542310380935669, 0.005380095958709717, 0.0053731842041015625, 0.005409567832946777, 0.005383456230163574, 0.00536246395111084, 0.005404607772827148, 0.005371327877044678, 0.0053929281234741215, 0.005409952163696289, 0.005366623878479004, 0.005390336036682129, 0.005493824005126953, 0.005453023910522461, 0.005512832164764404, 0.005427296161651611, 0.005355008125305176, 0.005405183792114258, 0.005395455837249756, 0.005371103763580323, 0.005403615951538086, 0.005366112232208252, 0.00536950397491455, 0.0054280319213867185, 0.0053861761093139645, 0.005382175922393799, 0.0054169921875, 0.005119999885559082, 0.005377376079559326, 0.005452447891235351, 0.005418144226074219, 0.0053686718940734865, 0.005387392044067383, 0.005411712169647217, 0.00536575984954834, 0.005451519966125488, 0.005409023761749268, 0.00536956787109375, 0.005363999843597412, 0.0054206719398498535, 0.005379807949066162, 0.005368127822875977, 0.005407072067260742, 0.005357759952545166, 0.005393919944763184, 0.005371615886688232, 0.005629759788513183, 0.005425087928771973, 0.005419871807098389, 0.005392384052276611, 0.005423136234283447, 0.005370175838470459, 0.005387712001800537, 0.005425087928771973, 0.005364223957061768, 0.005373727798461914, 0.005679103851318359, 0.005387616157531738, 0.005419328212738037, 0.005398880004882813, 0.00538431978225708, 0.005424287796020508, 0.005395167827606201, 0.005642240047454834, 0.005470208168029785, 0.005408031940460205, 0.005573376178741455, 0.005607391834259033, 0.005439360141754151, 0.005428639888763428, 0.005380608081817627, 0.005375552177429199, 0.005420991897583008, 0.005460063934326172, 0.005367455959320068, 0.00542310380935669, 0.005402719974517823, 0.005450496196746826, 0.005442848205566406, 0.005352287769317627, 0.0054018239974975586, 0.005411295890808105, 0.0053738560676574705, 0.00540121603012085, 0.005402400016784668, 0.005420032024383545, 0.00542521619796753, 0.005401535987854004, 0.005388288021087646, 0.005436831951141357, 0.00513369607925415, 0.005390207767486572, 0.005364384174346924, 0.0054150080680847165, 0.005437248229980469, 0.005365952014923096, 0.005406144142150879, 0.0053604478836059575, 0.005360479831695557, 0.005397247791290283, 0.005371520042419434, 0.005365952014923096, 0.005415264129638672, 0.005369279861450195, 0.00536633586883545, 0.0053976960182189945, 0.005385024070739746, 0.0053779840469360355, 0.00542521619796753, 0.0053731842041015625, 0.005365920066833496, 0.005444287776947021, 0.0053957757949829105, 0.005518239974975586, 0.005473184108734131, 0.0053864002227783205, 0.005438079833984375, 0.00543942403793335, 0.005410880088806152, 0.005450751781463623, 0.005470719814300537, 0.005425663948059082, 0.005445631980895996, 0.005367839813232422, 0.0054068160057067875, 0.005406239986419678, 0.005474112033843994, 0.005419551849365234, 0.005414048194885254, 0.0053952960968017575, 0.005437439918518067, 0.005472095966339111, 0.005380512237548828, 0.0054980158805847164, 0.005384384155273437, 0.005381951808929444, 0.005410880088806152, 0.005369408130645752, 0.005350368022918701, 0.005453023910522461, 0.005396480083465576, 0.005395232200622558, 0.005361343860626221, 0.005366079807281494, 0.005429247856140137, 0.00539628791809082, 0.005406047821044922, 0.005473055839538574, 0.005594912052154541, 0.0054002561569213865, 0.005507679939270019, 0.005382304191589356, 0.005392223834991455, 0.00511683177947998, 0.005481696128845215, 0.005427968025207519, 0.005399680137634278, 0.005433407783508301, 0.005356031894683838, 0.005365280151367188, 0.005422111988067627, 0.005359392166137695, 0.005386271953582764, 0.005396448135375977, 0.005390336036682129, 0.005398431777954102, 0.005488736152648926, 0.005407936096191406, 0.005423935890197754, 0.005470016002655029, 0.005411007881164551, 0.005451807975769043, 0.005399648189544678, 0.005390880107879639, 0.005429440021514893, 0.005410975933074951, 0.0057190718650817875, 0.005429632186889648, 0.005388895988464356, 0.005451776027679443, 0.0053812160491943355, 0.005417888164520264, 0.005451776027679443, 0.005392127990722656, 0.005372032165527344, 0.005703968048095703, 0.005418848037719726, 0.005588287830352783, 0.005495071887969971, 0.005752480030059815, 0.00555244779586792, 0.005378143787384033, 0.005431647777557373, 0.005511168003082275, 0.005379104137420655, 0.0054200000762939455, 0.005402112007141113, 0.0053888001441955566, 0.005425151824951172, 0.005383999824523926, 0.005381375789642334, 0.005421792030334473, 0.005402847766876221, 0.005408768177032471, 0.005439487934112549, 0.005394400119781494, 0.005419040203094482, 0.005432384014129639, 0.005383103847503662, 0.005371647834777832, 0.005521088123321533, 0.005470240116119384, 0.005434144020080567, 0.005400224208831787, 0.00540067195892334, 0.0054447040557861325, 0.0051512961387634275, 0.005392767906188965, 0.005391776084899902, 0.005438047885894776, 0.005498688220977783, 0.00538643217086792, 0.006053887844085694, 0.0054152321815490725, 0.005440735816955566, 0.005417439937591553, 0.005421055793762207, 0.005437439918518067, 0.005382400035858154, 0.005398079872131347, 0.005408959865570068, 0.00536575984954834, 0.0053821439743042, 0.005478367805480957, 0.005358784198760986, 0.0053825597763061525, 0.005419456005096436, 0.0053812160491943355, 0.005563360214233398, 0.005415135860443115, 0.0053862080574035645, 0.005461887836456299, 0.005399775981903076, 0.005382815837860108, 0.005618815898895264, 0.005482463836669922, 0.0053853440284729005, 0.005441376209259033, 0.0054002561569213865, 0.005386752128601074, 0.005434400081634522, 0.005382527828216552, 0.005416416168212891, 0.005482975959777832, 0.005390751838684082, 0.005412864208221436, 0.005358975887298584, 0.0056564478874206545, 0.005560256004333496, 0.005412928104400635, 0.005406943798065186, 0.005448224067687989, 0.005407072067260742, 0.005412511825561524, 0.0055642881393432615, 0.0054126400947570805, 0.0054579520225524905, 0.005390463829040527, 0.0055001602172851565, 0.005441792011260986, 0.005417791843414307, 0.005406208038330078, 0.0054637761116027835, 0.005407040119171143, 0.005388639926910401, 0.005517024040222168, 0.005368224143981934, 0.005433536052703857, 0.005399456024169922, 0.005111551761627197, 0.00538316822052002, 0.005384352207183838, 0.005402368068695068, 0.005352575778961182, 0.005417407989501953, 0.005396927833557129, 0.005363808155059814, 0.0054223999977111815, 0.005362368106842041, 0.005392384052276611, 0.005431263923645019, 0.005386271953582764, 0.005488639831542969, 0.005408576011657715, 0.005370048046112061, 0.005401919841766358, 0.0054074559211730955, 0.005392576217651367, 0.005381631851196289, 0.005422688007354736, 0.0054002881050109865, 0.005442527770996094, 0.00542851209640503, 0.0053844480514526364, 0.0055400638580322265, 0.005394688129425049, 0.0054085121154785155, 0.005424672126770019, 0.005386975765228272, 0.005488383769989014, 0.005554431915283203, 0.005414912223815918, 0.005404575824737549, 0.005526656150817871, 0.005391104221343994, 0.00543884801864624, 0.0053993921279907224, 0.005408768177032471, 0.00545308780670166, 0.0054299840927124025, 0.005402751922607422, 0.005431168079376221, 0.005414432048797608, 0.005405151844024658, 0.005431263923645019, 0.005390048027038574, 0.0053946561813354495, 0.005746784210205078, 0.005429247856140137, 0.0055316481590271, 0.005443583965301513, 0.0054271998405456545, 0.005439487934112549, 0.005388480186462402, 0.005467807769775391, 0.005496255874633789, 0.005425888061523438, 0.005386240005493164, 0.005419007778167725, 0.005394527912139893, 0.005440991878509521, 0.005404831886291504, 0.005195775985717774, 0.005433343887329102, 0.006186399936676025, 0.006274975776672363, 0.005647039890289306, 0.005398528099060058, 0.005431136131286621, 0.005474112033843994, 0.005413216114044189, 0.005429247856140137, 0.005417056083679199, 0.0054107198715209965, 0.005779679775238037, 0.005412064075469971, 0.005384768009185791, 0.005418591976165772, 0.005402080059051514, 0.005393343925476074, 0.005433343887329102, 0.005387648105621338, 0.005436031818389892, 0.005420608043670654, 0.005396927833557129, 0.005420447826385498, 0.005437920093536377, 0.005403872013092041, 0.005446335792541504, 0.005390560150146485, 0.005397823810577393, 0.00544755220413208, 0.005467199802398682, 0.005402368068695068, 0.00542310380935669, 0.0053944320678710935, 0.005477952003479004, 0.005453311920166016, 0.005391295909881592, 0.005430655956268311, 0.0054048957824707035, 0.005390751838684082, 0.005441120147705078, 0.00537391996383667, 0.005388735771179199, 0.005429408073425293, 0.005401535987854004, 0.005385119915008545, 0.00549289608001709, 0.00539631986618042, 0.005375391960144043, 0.005417888164520264, 0.00538592004776001, 0.0053944640159606936, 0.005496799945831299, 0.005389887809753418, 0.005429503917694092, 0.005385600090026856, 0.005390336036682129, 0.005560704231262207, 0.005429920196533203, 0.005851263999938965, 0.005525152206420899, 0.005642176151275635, 0.0070692157745361325, 0.005205023765563965, 0.00543782377243042, 0.0054767680168151854, 0.005451744079589844, 0.0054759359359741215, 0.005437280178070068, 0.0054198079109191895, 0.005494783878326416, 0.005426655769348145, 0.005423200130462646, 0.005453504085540771, 0.005446400165557861, 0.005416959762573242, 0.005486015796661377, 0.005401055812835693, 0.005635615825653076, 0.0054380159378051756, 0.005490431785583496, 0.0054926080703735355, 0.005426559925079346, 0.005475232124328613, 0.00552569580078125, 0.005453728199005127, 0.005462016105651855, 0.005467391967773437, 0.005421823978424072, 0.005468512058258056, 0.005506112098693848, 0.0054728641510009765, 0.005497951984405517, 0.005474976062774658, 0.005474559783935547, 0.005480480194091797, 0.005447648048400879, 0.005455488204956054, 0.005466495990753174, 0.00543779182434082, 0.005449088096618653, 0.00543775987625122, 0.005406303882598877, 0.005447711944580078, 0.005521152019500732, 0.00539244794845581, 0.005480991840362549, 0.005408768177032471, 0.005590784072875977, 0.0054438400268554685, 0.005423232078552246, 0.005509088039398194, 0.0054330239295959475, 0.005445631980895996, 0.005615551948547363, 0.006658656120300293, 0.006477503776550293, 0.006254591941833496, 0.006598656177520752, 0.005546175956726075, 0.005447487831115722, 0.005431295871734619, 0.005484543800354004, 0.005428544044494629, 0.005446335792541504, 0.005490143775939941, 0.005162752151489257, 0.005406015872955322, 0.0053993921279907224, 0.005437535762786865, 0.005420383930206299, 0.00541542387008667, 0.005410975933074951, 0.005390336036682129, 0.005404448032379151, 0.005429024219512939, 0.005390399932861328, 0.00544755220413208, 0.005634463787078857, 0.005391488075256348, 0.0054551358222961425, 0.005379936218261719, 0.00540451192855835, 0.005443136215209961, 0.0054172158241271974, 0.005447296142578125, 0.005415487766265869, 0.005382400035858154, 0.005443327903747559, 0.005489888191223144, 0.005405471801757812, 0.005412864208221436, 0.005410624027252197, 0.0053731842041015625, 0.005448319911956787, 0.005419328212738037, 0.005402304172515869, 0.005437024116516113, 0.005381120204925537, 0.0053736639022827146, 0.005506624221801757, 0.0055075201988220215, 0.005461696147918701, 0.005426655769348145, 0.005409632205963135, 0.005451776027679443, 0.0053987522125244145, 0.005408448219299316, 0.005460063934326172, 0.005490176200866699, 0.0054215679168701176, 0.005494783878326416, 0.005419007778167725, 0.005425151824951172, 0.005431519985198975, 0.005418560028076172, 0.005441664218902588, 0.005421152114868164, 0.005384191989898681, 0.005568511962890625, 0.005858335971832275, 0.005469120025634765, 0.005496863842010498, 0.005953728199005127, 0.00547215986251831, 0.00542416000366211, 0.005929088115692139, 0.005735167980194092, 0.005530848026275635, 0.005312928199768066, 0.005472288131713867, 0.0054150080680847165, 0.005426911830902099, 0.005476863861083985, 0.005476352214813233, 0.005542175769805908, 0.005635807991027832, 0.005496128082275391, 0.005681568145751953, 0.005527840137481689, 0.005615200042724609, 0.005380512237548828, 0.005421055793762207, 0.005440832138061524, 0.0053647680282592775, 0.006215295791625977, 0.00592460823059082, 0.005375423908233643, 0.005528351783752441, 0.005380159854888916, 0.005411903858184814, 0.005452672004699707, 0.005365151882171631, 0.00551584005355835, 0.005399712085723877, 0.005413824081420898, 0.005433343887329102, 0.005457632064819336, 0.005410624027252197, 0.005429728031158447, 0.005396096229553222, 0.0053760638236999515, 0.0055196800231933595, 0.005583968162536621, 0.005434463977813721, 0.005410655975341797, 0.005500576019287109, 0.005426559925079346, 0.005499839782714844, 0.0053961601257324215, 0.005439040184020996, 0.005372255802154541, 0.005410399913787842, 0.005462751865386963, 0.005373407840728759, 0.005408480167388916, 0.00608684778213501, 0.005417759895324707, 0.0055459198951721194, 0.00546611213684082, 0.005402656078338623, 0.005490176200866699, 0.0053949117660522464, 0.00540825605392456, 0.005419199943542481, 0.005409120082855224, 0.005433311939239502, 0.0053944320678710935, 0.0054020161628723145, 0.005399136066436767, 0.005402112007141113, 0.005372416019439697, 0.005194047927856446, 0.005447360038757324, 0.005368959903717041, 0.00538431978225708, 0.005423871994018554, 0.0054590082168579105, 0.005399551868438721, 0.00558892822265625, 0.005398015975952148, 0.005417471885681153, 0.0054330239295959475, 0.00544595193862915, 0.005433152198791504, 0.005433536052703857, 0.005396255970001221, 0.005443424224853516, 0.005497312068939209, 0.005381696224212647, 0.005425504207611084, 0.005443583965301513, 0.005392384052276611, 0.005443327903747559, 0.005400832176208496, 0.005393983840942383, 0.005421504020690918, 0.005423232078552246, 0.005535615921020508, 0.005412735939025879, 0.005488927841186523, 0.005428800106048584, 0.005390624046325684, 0.005404607772827148, 0.005425312042236328, 0.005417952060699463, 0.005368159770965576, 0.005441535949707031, 0.005407519817352295, 0.0054345917701721196, 0.00545251178741455, 0.005381663799285888, 0.005400544166564942, 0.00541868782043457, 0.00540499210357666, 0.0056917757987976075, 0.005408768177032471, 0.005412864208221436, 0.005436927795410156, 0.005519231796264648, 0.0053727998733520506, 0.0054412798881530765, 0.005393695831298828, 0.005533664226531982, 0.005407360076904297, 0.0053935680389404295, 0.005509664058685302, 0.0053805441856384275, 0.00546611213684082, 0.005488992214202881, 0.005386112213134765, 0.0053901119232177734, 0.005411007881164551, 0.0054362878799438475, 0.005454751968383789, 0.005115744113922119, 0.00553715181350708, 0.005359712123870849, 0.0053870720863342285, 0.005420767784118653, 0.005376287937164307, 0.005368927955627442, 0.005397119998931885, 0.005387968063354493, 0.005515679836273193, 0.005392191886901855, 0.005404640197753907, 0.005439199924468994, 0.005467936038970948, 0.005378015995025635, 0.005397151947021484, 0.005400576114654541, 0.005385791778564453, 0.00544217586517334, 0.005399775981903076, 0.005382495880126953, 0.005413055896759033, 0.005385824203491211, 0.005360095977783203, 0.0055682239532470704, 0.005398303985595703, 0.005450240135192871, 0.005408639907836914, 0.005437568187713623, 0.005442560195922852, 0.005399263858795166, 0.005388576030731201, 0.0055008320808410645, 0.0053842878341674805, 0.005451839923858642, 0.0054207038879394535, 0.0053909759521484375, 0.005391232013702393, 0.005522560119628906, 0.005402112007141113, 0.005398591995239258, 0.005640096187591552, 0.005537248134613037, 0.00546070384979248, 0.005369184017181397, 0.005391263961791992, 0.005460896015167236, 0.005391200065612793, 0.005420959949493408, 0.005521503925323487, 0.005461503982543945, 0.005449344158172607, 0.005415808200836181, 0.005427552223205566, 0.005701280117034912, 0.00545798397064209, 0.0054085440635681156, 0.00548095989227295, 0.00540451192855835, 0.0054531521797180176, 0.005437920093536377, 0.005418879985809326, 0.005458047866821289, 0.005201280117034912, 0.005423999786376953, 0.005436192035675049, 0.0054522562026977535, 0.005382431983947754, 0.005415487766265869, 0.005442368030548096, 0.005390367984771729, 0.005376832008361816, 0.005429247856140137, 0.005447679996490478, 0.0053710718154907225, 0.005403488159179687, 0.005395711898803711, 0.005391071796417237, 0.00543881607055664, 0.0054002881050109865, 0.005446591854095459, 0.0054143362045288084, 0.005489215850830078, 0.005486591815948487, 0.005414912223815918, 0.005504479885101318, 0.005532032012939453, 0.005421216011047363, 0.005445024013519287, 0.0054462399482727054, 0.005490911960601807, 0.0055354881286621095, 0.005417056083679199, 0.0054020161628723145, 0.005446400165557861, 0.005404128074645996, 0.005394144058227539, 0.005437568187713623, 0.005383872032165528, 0.005411007881164551, 0.005425280094146729, 0.005419487953186035, 0.005443488121032715, 0.005428607940673828, 0.005381951808929444, 0.005417888164520264, 0.005398528099060058, 0.005421055793762207, 0.00546611213684082, 0.005398015975952148, 0.005392864227294922, 0.005408927917480468, 0.005711520195007324, 0.005387839794158936, 0.005460447788238525, 0.005388160228729248, 0.005414720058441162, 0.005605535984039307, 0.005408736228942871, 0.005461919784545899, 0.005384511947631836, 0.005382304191589356, 0.005691391944885254, 0.005390336036682129, 0.005387872219085693, 0.0054297599792480465, 0.005163519859313965, 0.005413216114044189, 0.0054009280204772945, 0.00544323205947876, 0.005392384052276611, 0.005404640197753907, 0.005488351821899414, 0.005398943901062012, 0.0054141759872436525, 0.005429215908050537, 0.005397151947021484, 0.0054579200744628905, 0.005388288021087646, 0.0054172158241271974, 0.0054494719505310055, 0.005396512031555176, 0.005408736228942871, 0.0055214080810546875, 0.00545366382598877, 0.005403840065002442, 0.005396992206573486, 0.005388095855712891, 0.005431968212127686, 0.005410367965698242, 0.005409215927124024, 0.005463935852050781, 0.005416351795196533, 0.005391232013702393, 0.005461952209472656, 0.005419968128204345, 0.005411808013916016, 0.005431263923645019, 0.005421152114868164, 0.005425087928771973, 0.005453824043273926, 0.005418272018432617, 0.005444384098052978, 0.005425343990325928, 0.005426559925079346, 0.005472640037536621, 0.005420095920562744, 0.005421984195709229, 0.005467391967773437, 0.005367968082427978, 0.005406464099884033, 0.005438079833984375, 0.005447936058044433, 0.005378047943115235, 0.005452032089233398, 0.00539785623550415, 0.0054216961860656734, 0.005887775897979736, 0.005369855880737305, 0.005433343887329102, 0.0053678078651428224, 0.005458271980285644, 0.005418655872344971, 0.005384096145629883, 0.005378143787384033, 0.005422272205352783, 0.005397312164306641, 0.005427264213562012, 0.005443520069122314, 0.00518064022064209, 0.005438240051269532, 0.005437439918518067, 0.005515391826629639, 0.005391712188720703, 0.005444128036499024, 0.005394783973693848, 0.005389984130859375, 0.00541923189163208, 0.00537337589263916, 0.0054009280204772945, 0.005453824043273926, 0.0053792958259582515, 0.00551196813583374, 0.0054579200744628905, 0.005396192073822022, 0.005433279991149903, 0.00541107177734375, 0.005437535762786865, 0.005469791889190674, 0.0054460477828979495, 0.005478047847747803, 0.005427264213562012, 0.005418367862701416, 0.005374815940856934, 0.005415103912353516, 0.005375423908233643, 0.005376736164093018, 0.005426239967346191, 0.00539244794845581, 0.0054317121505737306, 0.005390528202056885, 0.005375999927520752, 0.005404704093933106, 0.00537497615814209, 0.005410016059875488, 0.005449759960174561, 0.005408703804016113, 0.00543449592590332, 0.005426080226898194, 0.00538102388381958, 0.0053846721649169925, 0.005417344093322754, 0.005623807907104492, 0.0054540162086486816, 0.005451583862304687, 0.005440512180328369, 0.005444608211517334, 0.005375648021697998, 0.005409120082855224, 0.005449728012084961, 0.005412543773651123, 0.005613887786865234, 0.005473951816558838, 0.005431647777557373, 0.00541596794128418, 0.005503007888793945, 0.005417920112609863, 0.0054496641159057614, 0.00544159984588623, 0.005388288021087646, 0.005484543800354004, 0.005414912223815918]",tokens/s,184.09794085685502,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 239752 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,890.281984,1136.525312,0.0,734.0032,709.336064,s,1,7.2261533203125,7.2261533203125,0.0,7.2261533203125,7.2261533203125,7.2261533203125,7.2261533203125,[7.2261533203125],,kWh,4.504761450001145e-06,4.824369393119647e-07,8.894451559993133e-07,5.8766435453124235e-06,,MB,1213.935616,1283.325952,0.0,870.31808,809.960448,s,15,0.2554636821746826,0.01703091214497884,0.0002962720431186955,0.016925088882446288,0.01728787841796875,0.017639094161987303,0.017923467025756836,"[0.01799456024169922, 0.016887392044067383, 0.016973472595214843, 0.01691846466064453, 0.01695952033996582, 0.01694358444213867, 0.016924415588378906, 0.016884607315063478, 0.016920991897583008, 0.016925088882446288, 0.016829824447631835, 0.016863359451293945, 0.017486751556396483, 0.016962080001831054, 0.016989568710327148]",tokens/s,15031.490845631279,kWh,6.067122581081148e-07,6.69081947342115e-08,4.018587183451442e-07,1.0754791711874705e-06,tokens/kWh,238033433.70875543,MB,1225.498624,1318.977536,0.0,905.969664,809.963008,s,15,9.872449645996094,0.6581633097330729,0.003109677138231171,0.6566630859375,0.6625375244140626,0.6630997253417968,0.6640510485839844,"[0.662590087890625, 0.661490478515625, 0.6624586791992187, 0.6642888793945313, 0.6557688598632813, 0.6566630859375, 0.6560877075195313, 0.6551048583984375, 0.660556396484375, 0.6548046264648437, 0.6561687622070312, 0.655614501953125, 0.6571704711914063, 0.6583905639648437, 0.6552916870117188]",tokens/s,95.72092377126052,kWh,1.9249967650227525e-05,2.122954441122724e-06,8.278092670054725e-06,2.9651014761404973e-05,tokens/kWh,2124716.4897035323,,s,945,9.86798069095611,0.010442307609477373,0.00023103927408061475,0.010399231910705567,0.010521235275268555,0.010628146934509277,0.011512325019836425,"[0.010408767700195313, 0.010391551971435547, 0.01043660831451416, 0.010330047607421875, 0.010362943649291993, 0.01041817569732666, 0.010440704345703124, 0.010358592033386231, 0.010352831840515136, 0.01040998363494873, 0.010405887603759765, 0.01062502384185791, 0.01045683193206787, 0.010833855628967284, 0.010469696044921876, 0.010438655853271485, 0.010364831924438477, 0.01044489574432373, 0.010448896408081054, 0.01045631980895996, 0.010549887657165528, 0.010424448013305663, 0.0104486083984375, 0.013164608001708985, 0.010523200035095215, 0.010474240303039551, 0.0105317440032959, 0.010430463790893555, 0.010411871910095214, 0.010649760246276856, 0.010647551536560058, 0.010375167846679688, 0.01047059154510498, 0.01036780834197998, 0.01041766357421875, 0.010480128288269042, 0.010337535858154296, 0.010506912231445313, 0.010426464080810547, 0.010567680358886718, 0.010389504432678222, 0.010344351768493652, 0.010430047988891602, 0.010389280319213868, 0.010551487922668457, 0.010482303619384766, 0.010394816398620605, 0.010586432456970215, 0.010473152160644532, 0.010415007591247558, 0.010798751831054687, 0.011135135650634766, 0.010369119644165039, 0.010476448059082032, 0.010562463760375977, 0.010403936386108398, 0.010583647727966309, 0.010424415588378906, 0.010516799926757813, 0.010411711692810058, 0.0103788480758667, 0.010406208038330079, 0.010307040214538574, 0.010322079658508301, 0.010426752090454102, 0.010391872406005859, 0.010375167846679688, 0.010461183547973632, 0.010360671997070312, 0.010367008209228515, 0.010371040344238282, 0.010499551773071289, 0.01038963222503662, 0.010332287788391114, 0.01044320011138916, 0.010400768280029296, 0.010406144142150879, 0.010385631561279297, 0.010328160285949708, 0.01039964771270752, 0.010314080238342285, 0.010399295806884765, 0.010400383949279786, 0.010366016387939454, 0.010390463829040528, 0.010362079620361328, 0.010435359954833984, 0.01043455982208252, 0.010417216300964356, 0.010464192390441895, 0.01042636775970459, 0.010454303741455078, 0.010510144233703613, 0.010494655609130859, 0.01052284812927246, 0.010549247741699219, 0.010407936096191407, 0.010784768104553222, 0.010410016059875488, 0.01038044834136963, 0.010433024406433105, 0.01034886360168457, 0.010474752426147461, 0.010750720024108887, 0.010629119873046875, 0.010555392265319824, 0.010396991729736328, 0.01038815975189209, 0.010413984298706054, 0.010364416122436524, 0.010356831550598144, 0.010444288253784179, 0.010714112281799316, 0.010585311889648438, 0.013033663749694824, 0.010737471580505371, 0.010508831977844238, 0.010522879600524903, 0.010496224403381348, 0.010460576057434083, 0.010471808433532714, 0.010450143814086914, 0.010400896072387695, 0.010772031784057617, 0.010481760025024415, 0.010458175659179687, 0.010378911972045898, 0.010463552474975586, 0.01038044834136963, 0.010535264015197754, 0.010459199905395507, 0.010405664443969726, 0.010461183547973632, 0.010342944145202636, 0.010391712188720703, 0.010309632301330567, 0.010387455940246582, 0.010352224349975585, 0.010352479934692382, 0.010357312202453613, 0.010448863983154297, 0.01046121597290039, 0.011619423866271973, 0.013702048301696777, 0.010612735748291016, 0.010474783897399902, 0.010418304443359375, 0.010433119773864746, 0.010393407821655274, 0.01049619197845459, 0.010497759819030762, 0.01042585563659668, 0.01039408016204834, 0.010416768074035644, 0.010415648460388184, 0.01041324806213379, 0.010384351730346679, 0.01043660831451416, 0.010457088470458984, 0.010440544128417968, 0.010391712188720703, 0.010347871780395508, 0.010418335914611816, 0.010357248306274413, 0.01041158390045166, 0.010526464462280274, 0.010365504264831543, 0.010393343925476074, 0.010336640357971192, 0.010369312286376954, 0.01035148811340332, 0.010352831840515136, 0.011698271751403809, 0.010398271560668946, 0.01039311981201172, 0.010418047904968262, 0.01036143970489502, 0.010363295555114746, 0.010428000450134277, 0.01044048023223877, 0.010471743583679199, 0.010497952461242676, 0.010436479568481444, 0.010560768127441406, 0.010377375602722168, 0.010416192054748535, 0.010627455711364746, 0.010332544326782227, 0.010442655563354492, 0.010382399559020997, 0.010378463745117188, 0.010381024360656738, 0.010368767738342286, 0.01037484836578369, 0.010462976455688477, 0.010421055793762207, 0.010711039543151855, 0.010471424102783204, 0.010382464408874512, 0.010468223571777343, 0.01039311981201172, 0.010392031669616698, 0.010452159881591797, 0.010386240005493165, 0.0104399995803833, 0.010641920089721679, 0.011224320411682129, 0.012034272193908692, 0.010491776466369628, 0.010918335914611816, 0.010424608230590821, 0.01050432014465332, 0.010482687950134278, 0.010417152404785156, 0.010596351623535156, 0.012001279830932618, 0.010534751892089843, 0.010633376121520996, 0.011267295837402344, 0.010486559867858887, 0.010471551895141602, 0.010553215980529785, 0.010405887603759765, 0.01048310375213623, 0.010541664123535157, 0.010419615745544434, 0.01046998405456543, 0.01040176010131836, 0.010401727676391602, 0.010467424392700195, 0.01042144012451172, 0.010412863731384277, 0.010394656181335449, 0.01052566432952881, 0.01043081569671631, 0.011523743629455567, 0.01042255973815918, 0.010444512367248535, 0.010368767738342286, 0.01046553611755371, 0.010430463790893555, 0.010372511863708496, 0.010375167846679688, 0.010447808265686035, 0.010386591911315917, 0.010399231910705567, 0.01033955192565918, 0.010383135795593261, 0.010326016426086425, 0.0103603515625, 0.01032044792175293, 0.010338208198547364, 0.010267616271972656, 0.010810400009155273, 0.010377311706542968, 0.010447615623474122, 0.010438015937805176, 0.010440799713134765, 0.010391839981079102, 0.010318047523498535, 0.010381312370300292, 0.010378591537475586, 0.01034716796875, 0.010356736183166505, 0.01032806396484375, 0.010369024276733398, 0.01037299156188965, 0.010401920318603515, 0.010403840065002442, 0.010337920188903808, 0.010377599716186523, 0.010344127655029296, 0.010359104156494141, 0.011119711875915527, 0.010363807678222656, 0.010384575843811035, 0.010386048316955566, 0.010317631721496582, 0.010389887809753418, 0.010335904121398926, 0.010367327690124512, 0.010379263877868652, 0.010352095603942872, 0.010371392250061034, 0.010318047523498535, 0.010373120307922363, 0.010397695541381835, 0.010362943649291993, 0.010372960090637207, 0.01046947193145752, 0.010356736183166505, 0.010374752044677735, 0.010352959632873535, 0.010489952087402344, 0.01033011245727539, 0.010395648002624512, 0.010299263954162597, 0.010364319801330566, 0.01036361598968506, 0.01033625602722168, 0.01041372776031494, 0.010350079536437988, 0.010367615699768067, 0.010386783599853516, 0.010312288284301759, 0.010482144355773926, 0.010301247596740722, 0.010376192092895508, 0.010386431694030761, 0.010753824234008788, 0.01044707202911377, 0.010442015647888183, 0.010457823753356934, 0.010559455871582031, 0.010426400184631347, 0.01038310432434082, 0.010439519882202148, 0.010500096321105956, 0.010388863563537598, 0.010729696273803711, 0.010359295845031738, 0.010442272186279297, 0.010415743827819825, 0.010367487907409668, 0.010369279861450195, 0.01029856014251709, 0.010344575881958008, 0.010442943572998046, 0.010949440002441406, 0.010376895904541016, 0.010371264457702636, 0.010354496002197266, 0.010370816230773926, 0.010289664268493653, 0.010393343925476074, 0.01031987190246582, 0.010485759735107422, 0.01035264015197754, 0.010331935882568359, 0.010361056327819824, 0.010309632301330567, 0.010365983963012696, 0.010427359580993653, 0.010397536277770996, 0.010438400268554688, 0.010440223693847657, 0.0104334077835083, 0.010415648460388184, 0.010366975784301758, 0.010367456436157227, 0.010371199607849121, 0.010499967575073241, 0.01042636775970459, 0.010387455940246582, 0.010401247978210449, 0.010372832298278808, 0.010449952125549316, 0.010411808013916016, 0.010373408317565919, 0.010403552055358888, 0.010377216339111327, 0.010440704345703124, 0.010430463790893555, 0.010432512283325195, 0.010430463790893555, 0.010405311584472656, 0.010451359748840332, 0.010431679725646972, 0.010429408073425293, 0.010469375610351562, 0.010484800338745118, 0.01044979190826416, 0.010477631568908692, 0.01036083221435547, 0.010417823791503905, 0.010432864189147949, 0.010515968322753906, 0.010461152076721192, 0.010392191886901856, 0.010335519790649414, 0.010383968353271484, 0.010401663780212402, 0.010555392265319824, 0.010467455863952637, 0.010424256324768066, 0.010857855796813965, 0.010428863525390625, 0.010368288040161133, 0.010419039726257325, 0.010332159996032715, 0.010397695541381835, 0.010401311874389649, 0.010336576461791992, 0.010387616157531738, 0.010342399597167968, 0.010391551971435547, 0.01043660831451416, 0.010369024276733398, 0.010348544120788575, 0.010344703674316407, 0.010370816230773926, 0.010373120307922363, 0.01064140796661377, 0.010428128242492675, 0.010391743659973144, 0.010441023826599121, 0.010452287673950195, 0.010424192428588868, 0.01049449634552002, 0.010451007843017578, 0.01046668815612793, 0.010502655982971192, 0.010342528343200683, 0.010407072067260742, 0.010748767852783203, 0.010348735809326172, 0.010362112045288085, 0.010322367668151856, 0.010381312370300292, 0.010379615783691407, 0.010297120094299317, 0.010390560150146484, 0.010341343879699707, 0.010405887603759765, 0.010341631889343261, 0.01037161636352539, 0.010422016143798828, 0.01033619213104248, 0.010398240089416503, 0.010385408401489257, 0.010383359909057617, 0.010422592163085937, 0.010321599960327148, 0.010423808097839356, 0.010354656219482422, 0.010482208251953125, 0.010383135795593261, 0.010381535530090332, 0.010416128158569337, 0.01034227180480957, 0.010375295639038086, 0.01036736011505127, 0.010380512237548828, 0.010404064178466796, 0.010465248107910156, 0.010370719909667968, 0.010392512321472168, 0.010419424057006837, 0.010377951622009277, 0.010454272270202637, 0.01038419246673584, 0.010395648002624512, 0.01040988826751709, 0.010344544410705566, 0.010461183547973632, 0.010362336158752442, 0.010410592079162598, 0.010366080284118652, 0.010445631980895997, 0.010390751838684082, 0.010355487823486328, 0.010456768035888672, 0.010438976287841796, 0.010379103660583497, 0.010426527976989746, 0.010353919982910156, 0.010384127616882325, 0.010386431694030761, 0.010314751625061035, 0.010389504432678222, 0.010360447883605958, 0.010393983840942382, 0.01036083221435547, 0.010381312370300292, 0.010419424057006837, 0.01032192039489746, 0.01051318359375, 0.010463232040405274, 0.010401375770568847, 0.010428832054138184, 0.010387455940246582, 0.010403167724609376, 0.010465951919555664, 0.010381312370300292, 0.010375167846679688, 0.010364224433898925, 0.010399552345275878, 0.010383872032165528, 0.01035916805267334, 0.010416128158569337, 0.010331775665283203, 0.010393983840942382, 0.010405887603759765, 0.010360639572143554, 0.010387647628784179, 0.010308639526367187, 0.010352928161621094, 0.010547904014587403, 0.010366975784301758, 0.010370304107666015, 0.010326784133911132, 0.010372608184814454, 0.010422783851623535, 0.010403840065002442, 0.010391839981079102, 0.01044598388671875, 0.010422847747802734, 0.010500384330749511, 0.010387167930603028, 0.010419967651367188, 0.010414336204528809, 0.010366975784301758, 0.010444704055786133, 0.010357855796813965, 0.010394623756408691, 0.01041817569732666, 0.0103853759765625, 0.010399423599243163, 0.010381792068481445, 0.010413951873779297, 0.010395648002624512, 0.01031167984008789, 0.010403327941894532, 0.010374943733215332, 0.010533535957336426, 0.010502528190612793, 0.010526399612426757, 0.010416383743286134, 0.010362272262573241, 0.010387935638427735, 0.010437664031982421, 0.010355551719665528, 0.01041817569732666, 0.010357824325561524, 0.010400704383850098, 0.01038918399810791, 0.010322239875793457, 0.010478879928588867, 0.010318367958068848, 0.010388863563537598, 0.010381919860839844, 0.01039356803894043, 0.010422528266906738, 0.011867136001586913, 0.012333984375, 0.011497792243957519, 0.010381088256835938, 0.010440704345703124, 0.010405119895935058, 0.010396703720092774, 0.010421183586120606, 0.010429216384887694, 0.010424320220947265, 0.010522432327270508, 0.010384575843811035, 0.010404447555541992, 0.010479455947875976, 0.01048236846923828, 0.010409855842590332, 0.010377344131469726, 0.010407391548156739, 0.01043289566040039, 0.01043996810913086, 0.010476287841796874, 0.010319135665893554, 0.010364800453186036, 0.01033299160003662, 0.010289567947387696, 0.0103373441696167, 0.010375712394714355, 0.010391551971435547, 0.01043660831451416, 0.010440447807312011, 0.010405920028686523, 0.010391039848327637, 0.010367712020874024, 0.010369119644165039, 0.01034556770324707, 0.010382240295410156, 0.01034768009185791, 0.010502911567687989, 0.010371071815490723, 0.010356639862060547, 0.010332384109497071, 0.010417951583862304, 0.010425536155700684, 0.010369952201843263, 0.010349920272827148, 0.010446880340576172, 0.010873472213745117, 0.010358816146850585, 0.010403807640075684, 0.01032192039489746, 0.010378944396972655, 0.010380607604980469, 0.010374143600463867, 0.010428288459777831, 0.010367103576660156, 0.010408224105834961, 0.010403552055358888, 0.01043455982208252, 0.010340352058410645, 0.010327072143554687, 0.010417119979858398, 0.010446144104003905, 0.010400447845458984, 0.010381312370300292, 0.010333984375, 0.010420767784118653, 0.010472448348999023, 0.010372927665710449, 0.010410752296447754, 0.010350079536437988, 0.010409728050231933, 0.010365311622619628, 0.010405632019042968, 0.010367744445800782, 0.01034819221496582, 0.010365280151367187, 0.010438976287841796, 0.010355808258056641, 0.010361536026000977, 0.010350496292114257, 0.010381312370300292, 0.010332159996032715, 0.010376928329467773, 0.010353055953979493, 0.010343392372131348, 0.010398303985595703, 0.010297120094299317, 0.010366111755371095, 0.010373023986816407, 0.010470335960388183, 0.010612735748291016, 0.010538304328918457, 0.010441408157348633, 0.010502143859863282, 0.01043660831451416, 0.01043455982208252, 0.010443872451782226, 0.010418304443359375, 0.010443552017211914, 0.010426079750061035, 0.01037667179107666, 0.010425151824951172, 0.010362879753112793, 0.010414079666137695, 0.010417344093322754, 0.01037600040435791, 0.010489855766296387, 0.010348511695861816, 0.010373151779174805, 0.01053270435333252, 0.010365280151367187, 0.01039247989654541, 0.010338751792907715, 0.010392031669616698, 0.010373087882995605, 0.01033129596710205, 0.01037606430053711, 0.010364895820617675, 0.010389535903930665, 0.010391263961791993, 0.010329376220703125, 0.010388480186462403, 0.010378591537475586, 0.010404512405395508, 0.01044480037689209, 0.010360287666320802, 0.010403743743896484, 0.010348480224609375, 0.01040454387664795, 0.010385408401489257, 0.01036246395111084, 0.010385536193847656, 0.010371520042419433, 0.010490880012512208, 0.010406720161437987, 0.010401823997497558, 0.010460927963256835, 0.010391807556152344, 0.010391551971435547, 0.010416128158569337, 0.010381312370300292, 0.010637311935424805, 0.010340352058410645, 0.010369024276733398, 0.010470944404602051, 0.010306015968322754, 0.01041817569732666, 0.010425791740417481, 0.010465632438659668, 0.010425791740417481, 0.010371871948242187, 0.010383359909057617, 0.010414079666137695, 0.010366975784301758, 0.010397055625915527, 0.010427007675170899, 0.010506048202514648, 0.010471360206604004, 0.01040777587890625, 0.01043500804901123, 0.010405792236328124, 0.010375231742858886, 0.010393600463867187, 0.010375167846679688, 0.010393600463867187, 0.010389311790466309, 0.010342111587524414, 0.010379648208618164, 0.010319519996643066, 0.010357184410095215, 0.010375167846679688, 0.010431967735290527, 0.01041648006439209, 0.010354880332946778, 0.01040559959411621, 0.010444928169250488, 0.010349920272827148, 0.010361184120178222, 0.010332608222961425, 0.010373408317565919, 0.010335264205932617, 0.010398431777954102, 0.010399264335632324, 0.010313920021057129, 0.010334495544433595, 0.010315999984741211, 0.010519488334655762, 0.010447936058044434, 0.010385184288024903, 0.010417375564575195, 0.010385215759277344, 0.010556320190429687, 0.0104202880859375, 0.010366975784301758, 0.010383359909057617, 0.010371007919311524, 0.010383423805236816, 0.010381119728088379, 0.010365119934082031, 0.010379263877868652, 0.010352512359619141, 0.01038755226135254, 0.01039798355102539, 0.010362367630004882, 0.01048755168914795, 0.010381823539733886, 0.010454400062561035, 0.010541600227355957, 0.010334303855895996, 0.010364992141723633, 0.010362848281860352, 0.010444831848144532, 0.01062831974029541, 0.010380800247192384, 0.010346464157104492, 0.01038163185119629, 0.01043222427368164, 0.0103504638671875, 0.010412672042846679, 0.010479968070983886, 0.01042966365814209, 0.010408384323120117, 0.010369279861450195, 0.010413824081420898, 0.010342399597167968, 0.010389504432678222, 0.010378496170043945, 0.010336352348327636, 0.010422207832336427, 0.010334943771362305, 0.01044275188446045, 0.010397695541381835, 0.010368288040161133, 0.01073145580291748, 0.010639328002929688, 0.010428319931030274, 0.010445471763610839, 0.010404095649719239, 0.01046735954284668, 0.010392800331115722, 0.01039027214050293, 0.01040169620513916, 0.01036086368560791, 0.010409215927124023, 0.010500063896179199, 0.01040060806274414, 0.010440064430236816, 0.010343040466308594, 0.010408063888549805, 0.010428288459777831, 0.010498047828674317, 0.010417471885681153, 0.010387840270996094, 0.010575615882873535, 0.010729632377624511, 0.010446751594543457, 0.010586624145507812, 0.010397695541381835, 0.010371007919311524, 0.01038700771331787, 0.010365440368652343, 0.01043660831451416, 0.01042841625213623, 0.010345600128173827, 0.010529088020324708, 0.010385919570922851, 0.010514495849609375, 0.010395999908447265, 0.010420000076293946, 0.010347423553466796, 0.010546431541442872, 0.010358495712280274, 0.01039948844909668, 0.0103057918548584, 0.010510111808776855, 0.01036291217803955, 0.01028502368927002, 0.010786368370056152, 0.010445440292358398, 0.01036201572418213, 0.01039635181427002, 0.010358783721923828, 0.010428447723388672, 0.010548640251159667, 0.010457663536071778, 0.010376447677612305, 0.010381728172302247, 0.010389856338500977, 0.010407967567443848, 0.010384832382202148, 0.01041209602355957, 0.01037484836578369, 0.010946623802185059, 0.010531231880187989, 0.010473024368286133, 0.010967743873596192, 0.010530879974365234, 0.010400064468383789, 0.010409664154052735, 0.01044275188446045, 0.010424160003662109, 0.010417759895324707, 0.010366975784301758, 0.01046735954284668, 0.010406432151794433, 0.01052239990234375, 0.010417951583862304, 0.010367424011230468, 0.01042147159576416, 0.010451744079589844, 0.010337823867797852, 0.01038588809967041, 0.010364928245544434, 0.010499327659606934, 0.01043507194519043, 0.010393280029296875, 0.01039571189880371, 0.010684927940368653, 0.010397695541381835, 0.01039078426361084, 0.010312447547912598, 0.010389504432678222, 0.010381088256835938, 0.010351967811584473, 0.010462080001831055, 0.010385408401489257, 0.010407936096191407, 0.010381312370300292, 0.010569727897644043, 0.010422112464904786, 0.010344320297241211, 0.01041209602355957, 0.010518752098083495, 0.010455039978027344, 0.010500096321105956, 0.010399359703063965, 0.010488384246826172, 0.01044211196899414, 0.010402144432067871, 0.010428256034851074, 0.010461407661437988, 0.01035472011566162, 0.010504159927368164, 0.010530783653259277, 0.010395999908447265, 0.01044825553894043, 0.010393919944763184, 0.010682144165039063, 0.010434783935546876, 0.010476639747619629, 0.010677311897277832, 0.010618720054626465, 0.010386719703674316, 0.010410655975341796, 0.010383423805236816, 0.010369024276733398, 0.01061683177947998, 0.010364928245544434, 0.010343903541564942, 0.010357279777526856, 0.010317824363708495, 0.010393152236938477, 0.010357184410095215, 0.010511839866638183, 0.010324159622192382, 0.010362943649291993, 0.010436896324157716, 0.010339743614196777, 0.010360639572143554, 0.010478495597839355, 0.010359935760498047, 0.01035750389099121, 0.01033625602722168, 0.010346495628356933, 0.01032806396484375, 0.01036083221435547, 0.010365056037902833, 0.01041327953338623, 0.010388128280639649, 0.010326239585876465, 0.010393376350402833, 0.01036083221435547, 0.010356191635131837, 0.010344991683959961, 0.010332159996032715, 0.010345472335815429, 0.010373536109924317, 0.010391712188720703, 0.010408608436584473, 0.010327839851379394, 0.010400927543640137, 0.010392031669616698, 0.010395936012268066, 0.01033955192565918, 0.01033510398864746, 0.010354496002197266, 0.010399231910705567, 0.010343104362487794, 0.010349632263183594, 0.010306495666503906, 0.010354144096374512, 0.010291744232177734]",tokens/s,95.76427331947264,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,888.766464,576.585728,0.0,174.063616,172.57984,s,1,7.06728955078125,7.06728955078125,0.0,7.06728955078125,7.06728955078125,7.06728955078125,7.06728955078125,[7.06728955078125],,kWh,3.848378070824765e-06,4.172718818290831e-07,1.8033347759932594e-06,6.068984728647108e-06,,MB,1238.413312,647.888896,0.0,234.881024,215.589888,s,26,0.28235529708862306,0.010859819118793194,0.0001360065044233193,0.010824063777923585,0.01089736032485962,0.010944176197052001,0.011372408151626587,"[0.010889856338500977, 0.010814080238342285, 0.010840895652770997, 0.011510784149169923, 0.010798591613769531, 0.010810015678405761, 0.010818623542785644, 0.01086575984954834, 0.01083334445953369, 0.010858400344848633, 0.010776512145996094, 0.010777791976928712, 0.010802656173706056, 0.010957280158996581, 0.010801600456237794, 0.010861824035644532, 0.010904864311218261, 0.010822431564331055, 0.010816224098205566, 0.01080508804321289, 0.010826944351196289, 0.010860383987426758, 0.010821887969970703, 0.010851008415222168, 0.010825695991516114, 0.010802751541137695]",tokens/s,23573.136642486566,kWh,3.3731653584490974e-07,3.718636989385528e-08,2.2249504584296503e-07,5.969979515817301e-07,tokens/kWh,428812191.60255885,MB,1269.600256,649.986048,0.0,236.978176,215.592448,s,26,9.943972259521482,0.382460471520057,0.002334617313673642,0.38179534912109375,0.3851622772216797,0.3864168395996094,0.3896744842529297,"[0.3906837158203125, 0.38401214599609373, 0.3808473205566406, 0.3822474365234375, 0.38050289916992186, 0.3818229675292969, 0.3813818664550781, 0.3824582824707031, 0.3809364929199219, 0.38148577880859375, 0.38115509033203127, 0.38112432861328127, 0.38056427001953125, 0.38459756469726564, 0.3828178100585938, 0.38664678955078124, 0.38572698974609376, 0.38178204345703126, 0.38230194091796876, 0.382131591796875, 0.3845191345214844, 0.38121914672851565, 0.37982711791992185, 0.38180865478515624, 0.38026632690429685, 0.38110455322265624]",tokens/s,164.72290521844465,kWh,1.0857227489152671e-05,1.1973870470431928e-06,3.990399270971405e-06,1.604501380716727e-05,tokens/kWh,3926453.4613150693,,s,1638,9.932118654727926,0.006063564502275908,0.00014978224178074478,0.006043247938156127,0.0061182911396026616,0.006183193588256836,0.006547319655418395,"[0.005787615776062012, 0.006090784072875976, 0.006064127922058105, 0.006203392028808594, 0.006031648159027099, 0.006339360237121582, 0.006200255870819092, 0.007366655826568603, 0.008054176330566406, 0.007906976222991944, 0.008039584159851074, 0.006973216056823731, 0.006046751976013184, 0.00607692813873291, 0.00603718376159668, 0.006059807777404785, 0.006022143840789795, 0.006048096179962158, 0.006049439907073974, 0.006047743797302246, 0.006047743797302246, 0.006024864196777344, 0.006041984081268311, 0.0060432000160217285, 0.006023231983184814, 0.006009183883666992, 0.006033408164978027, 0.006023359775543213, 0.0060557441711425785, 0.00601907205581665, 0.006060128211975098, 0.0059996480941772465, 0.00602620792388916, 0.005994400024414062, 0.006024576187133789, 0.006136320114135742, 0.006082528114318848, 0.006020287990570069, 0.006144256114959717, 0.0061324481964111326, 0.006102431774139405, 0.006039552211761475, 0.006145792007446289, 0.006085567951202393, 0.0061329278945922855, 0.006154975891113281, 0.006115392208099365, 0.006057919979095459, 0.006098944187164307, 0.006043456077575684, 0.006085855960845947, 0.006119616031646728, 0.006017312049865723, 0.006053760051727295, 0.00609494400024414, 0.0060637760162353515, 0.006032256126403809, 0.006077760219573974, 0.006015679836273193, 0.006069503784179688, 0.00613862419128418, 0.0060804481506347655, 0.006021183967590332, 0.005810175895690918, 0.006082560062408447, 0.006066112041473388, 0.006144192218780518, 0.006043519973754883, 0.0060312957763671874, 0.0060375361442565915, 0.006043968200683594, 0.006358751773834229, 0.006122591972351074, 0.006136288166046142, 0.006035903930664063, 0.006100287914276123, 0.0060581440925598145, 0.006089248180389404, 0.006034624099731445, 0.0060629119873046875, 0.006041152000427246, 0.006096960067749024, 0.0060174398422241215, 0.006148064136505127, 0.0060432000160217285, 0.006068160057067871, 0.00607692813873291, 0.0060702719688415525, 0.006014624118804932, 0.0060668478012084965, 0.006057663917541504, 0.006077600002288818, 0.006091968059539795, 0.006129631996154785, 0.0060720000267028805, 0.0061123199462890625, 0.006087615966796875, 0.006021120071411133, 0.006082143783569336, 0.006125984191894532, 0.006076000213623047, 0.00604201602935791, 0.006158336162567139, 0.006006879806518555, 0.006094751834869384, 0.006017024040222168, 0.006479872226715088, 0.006020512104034424, 0.0060646719932556156, 0.006019455909729004, 0.006082111835479736, 0.006013408184051514, 0.006067872047424316, 0.00605785608291626, 0.006053567886352539, 0.006076863765716553, 0.006162335872650146, 0.006117472171783447, 0.006041920185089112, 0.006315711975097656, 0.006047743797302246, 0.0061190400123596195, 0.006029695987701416, 0.006090752124786377, 0.0064134721755981445, 0.0061305279731750486, 0.005740255832672119, 0.006068031787872314, 0.006023359775543213, 0.006074463844299317, 0.0060531520843505856, 0.006147007942199707, 0.006000383853912354, 0.0060581440925598145, 0.006000192165374756, 0.006060256004333496, 0.006010816097259521, 0.006039040088653564, 0.006025792121887207, 0.006050848007202149, 0.0059847040176391605, 0.0060462398529052734, 0.005980160236358643, 0.006039680004119873, 0.005998464107513428, 0.006090976238250732, 0.005998239994049072, 0.00615008020401001, 0.005992640018463135, 0.0060713281631469725, 0.006003680229187011, 0.006045695781707764, 0.005990399837493897, 0.006059904098510743, 0.006001023769378662, 0.0060395197868347164, 0.0060067839622497555, 0.006039487838745117, 0.006012191772460937, 0.006011648178100586, 0.006016416072845459, 0.006057983875274659, 0.005994912147521973, 0.006041600227355957, 0.005987360000610352, 0.006161375999450684, 0.006034719944000244, 0.006287871837615967, 0.006102272033691406, 0.006134943962097168, 0.006110400199890137, 0.006019711971282959, 0.006084896087646484, 0.006024256229400635, 0.006079360008239746, 0.006010176181793213, 0.006066720008850098, 0.006014912128448486, 0.006060031890869141, 0.006018400192260742, 0.006060160160064697, 0.005984223842620849, 0.006031936168670655, 0.005983263969421387, 0.0060274238586425785, 0.005994944095611572, 0.0060358400344848635, 0.0059985918998718265, 0.0060553278923034665, 0.005744927883148193, 0.0060535998344421384, 0.00601039981842041, 0.006066207885742188, 0.006062848091125488, 0.006047455787658691, 0.006022719860076904, 0.0060433602333068845, 0.006000639915466309, 0.006206143856048584, 0.006045407772064209, 0.006055232048034668, 0.006010176181793213, 0.006098591804504395, 0.006060031890869141, 0.006086656093597412, 0.006072319984436035, 0.006053887844085694, 0.0061003842353820805, 0.006095359802246094, 0.0060826559066772465, 0.006043615818023681, 0.006078112125396729, 0.006066207885742188, 0.006076767921447754, 0.006079487800598145, 0.006073344230651856, 0.006032383918762207, 0.00606063985824585, 0.006044320106506348, 0.006070015907287597, 0.006031551837921143, 0.006064064025878906, 0.006011104106903076, 0.006078112125396729, 0.006049791812896729, 0.006063744068145752, 0.0061320638656616215, 0.006162335872650146, 0.006185152053833008, 0.0060999679565429685, 0.006036416053771972, 0.006072319984436035, 0.006053184032440186, 0.00603113603591919, 0.006060959815979004, 0.006053887844085694, 0.006068128108978272, 0.006055903911590576, 0.006088831901550293, 0.006007808208465576, 0.0060850558280944825, 0.006101247787475586, 0.006113599777221679, 0.0060293121337890625, 0.006055935859680176, 0.006045695781707764, 0.006080512046813965, 0.006062079906463623, 0.006074368000030517, 0.006031360149383545, 0.00606822395324707, 0.005999743938446045, 0.005785888195037842, 0.00602678394317627, 0.006107264041900635, 0.0060167679786682126, 0.006048384189605713, 0.005990623950958252, 0.006030335903167725, 0.005997312068939209, 0.006113408088684082, 0.006012415885925293, 0.0060339198112487795, 0.006011072158813476, 0.006039360046386719, 0.00608460807800293, 0.006019328117370605, 0.005979328155517578, 0.0060503678321838375, 0.005988575935363769, 0.00604099178314209, 0.00599283218383789, 0.006041600227355957, 0.00602726411819458, 0.006043744087219238, 0.006003615856170655, 0.006108160018920898, 0.0060024957656860355, 0.006015168190002441, 0.005996543884277344, 0.0060776638984680175, 0.005998784065246582, 0.0060234560966491695, 0.006007071971893311, 0.006045983791351318, 0.006026624202728271, 0.006064095973968506, 0.006003327846527099, 0.0061253437995910645, 0.006016608238220215, 0.00605020809173584, 0.006004159927368164, 0.006050432205200195, 0.006055871963500977, 0.006051839828491211, 0.00601087999343872, 0.006047743797302246, 0.006026815891265869, 0.006051743984222412, 0.006030144214630127, 0.006073215961456299, 0.006023295879364014, 0.006056672096252442, 0.006017024040222168, 0.006062079906463623, 0.0060661759376525876, 0.006033408164978027, 0.006051839828491211, 0.006033408164978027, 0.006041600227355957, 0.0060704641342163084, 0.006076384067535401, 0.006003583908081055, 0.006072703838348389, 0.00599510383605957, 0.006322944164276123, 0.00602726411819458, 0.006045152187347412, 0.006019487857818604, 0.006024479866027832, 0.005994783878326416, 0.006035999774932861, 0.0060351362228393554, 0.006045728206634521, 0.006074368000030517, 0.006281216144561768, 0.006011199951171875, 0.0061125121116638184, 0.0061569600105285645, 0.006207583904266357, 0.006178080081939697, 0.006101247787475586, 0.006035999774932861, 0.006074304103851318, 0.006022528171539306, 0.0060989117622375485, 0.006032032012939453, 0.006053887844085694, 0.006062079906463623, 0.00608460807800293, 0.006025216102600098, 0.00603875207901001, 0.005995296001434326, 0.006046879768371582, 0.0060035519599914555, 0.006018815994262695, 0.006043744087219238, 0.006025375843048096, 0.006041600227355957, 0.006067488193511963, 0.006087391853332519, 0.0060037441253662106, 0.006040031909942627, 0.00599283218383789, 0.006023200035095215, 0.005996640205383301, 0.006038784027099609, 0.006002975940704346, 0.00604857587814331, 0.006169695854187012, 0.006060095787048339, 0.005998688220977783, 0.00604201602935791, 0.006016255855560303, 0.006036223888397217, 0.005992415904998779, 0.006012959957122803, 0.0060067839622497555, 0.006033664226531983, 0.006004479885101319, 0.006047743797302246, 0.00602726411819458, 0.006068511962890625, 0.005982016086578369, 0.006059103965759277, 0.006026112079620362, 0.006094783782958984, 0.006014976024627685, 0.005745535850524903, 0.006012544155120849, 0.006001023769378662, 0.0059894719123840335, 0.006011807918548584, 0.005996543884277344, 0.006060031890869141, 0.006014048099517822, 0.0060282878875732426, 0.006035488128662109, 0.006040703773498535, 0.0060198397636413575, 0.0060061440467834475, 0.00597379207611084, 0.0062492799758911135, 0.00606825590133667, 0.006089920043945312, 0.006021632194519043, 0.006066495895385742, 0.0060002880096435545, 0.00603769588470459, 0.006004479885101319, 0.0060440640449523925, 0.006008128166198731, 0.0060217280387878415, 0.006004767894744873, 0.006053952217102051, 0.006000063896179199, 0.006281472206115722, 0.006604159832000732, 0.00607916784286499, 0.006050047874450684, 0.006047743797302246, 0.006137856006622314, 0.006058015823364258, 0.00608787202835083, 0.0060275840759277345, 0.006047616004943848, 0.0060028800964355465, 0.006095263957977295, 0.006009984016418457, 0.006044896125793457, 0.005994143962860107, 0.006043615818023681, 0.00598960018157959, 0.006031392097473144, 0.005992320060729981, 0.006050303936004638, 0.00599507188796997, 0.006303584098815918, 0.0060267200469970705, 0.006056735992431641, 0.005996479988098145, 0.006115007877349854, 0.005996255874633789, 0.006021024227142334, 0.006017536163330078, 0.006037439823150635, 0.006001920223236084, 0.006057888031005859, 0.005993247985839844, 0.00605020809173584, 0.005983200073242188, 0.005791999816894531, 0.00600710391998291, 0.006041056156158448, 0.006020607948303222, 0.006060768127441407, 0.005992447853088379, 0.006041088104248047, 0.006011616230010986, 0.00603113603591919, 0.006003967761993408, 0.006027999877929687, 0.005988063812255859, 0.00602563190460205, 0.005990272045135498, 0.006041632175445556, 0.0060128321647644044, 0.0060457921028137206, 0.00601907205581665, 0.006042784214019776, 0.006013184070587158, 0.006064799785614014, 0.006043680191040039, 0.0060496959686279295, 0.006014207839965821, 0.006078783988952637, 0.006003488063812256, 0.006053535938262939, 0.006112832069396973, 0.0061075201034545895, 0.006107103824615478, 0.006061439990997315, 0.0060119681358337404, 0.0060546879768371585, 0.005998816013336182, 0.00607913589477539, 0.006012928009033203, 0.0060661759376525876, 0.006074656009674073, 0.006092512130737305, 0.0061538882255554195, 0.006055456161499023, 0.006036287784576416, 0.00602726411819458, 0.006059936046600342, 0.0060152640342712406, 0.006053696155548096, 0.006207488059997559, 0.006073919773101807, 0.006039999961853028, 0.006038559913635254, 0.006007999897003174, 0.006327424049377441, 0.0061654081344604495, 0.006145792007446289, 0.006055168151855469, 0.006127711772918701, 0.006048736095428467, 0.0064899840354919434, 0.006317376136779785, 0.006074880123138428, 0.006073823928833008, 0.00601580810546875, 0.006096608161926269, 0.005755551815032959, 0.005992608070373535, 0.006043327808380127, 0.006023327827453614, 0.006041056156158448, 0.005990943908691406, 0.006061567783355713, 0.006076288223266601, 0.006051712036132813, 0.0060479679107666015, 0.006041952133178711, 0.006043583869934082, 0.0060291199684143065, 0.006047167778015137, 0.006048223972320557, 0.006246880054473877, 0.00603715181350708, 0.00608735990524292, 0.006000351905822754, 0.006061056137084961, 0.006013823986053467, 0.006075967788696289, 0.0060178241729736326, 0.006124544143676758, 0.006021696090698242, 0.0060776958465576176, 0.006005311965942383, 0.006047743797302246, 0.006021599769592285, 0.006051775932312012, 0.006050079822540283, 0.006089439868927002, 0.0060339198112487795, 0.006077023983001709, 0.006014111995697021, 0.006060351848602295, 0.00605836820602417, 0.006125631809234619, 0.006039552211761475, 0.006113471984863281, 0.006014783859252929, 0.006031360149383545, 0.0059985918998718265, 0.006025407791137695, 0.006039360046386719, 0.006000800132751465, 0.006036384105682373, 0.0060280637741088865, 0.006047167778015137, 0.0060054721832275395, 0.006080512046813965, 0.005990399837493897, 0.006040927886962891, 0.0059934401512146, 0.006026591777801514, 0.006028768062591553, 0.006060832023620606, 0.00601094388961792, 0.0060859198570251466, 0.006017536163330078, 0.0060152320861816405, 0.0059996161460876465, 0.006052864074707031, 0.005774879932403564, 0.006011328220367432, 0.0060764479637146, 0.00601203203201294, 0.006050335884094238, 0.005997119903564453, 0.006044864177703857, 0.005994175910949707, 0.006034527778625488, 0.005981088161468506, 0.006103968143463135, 0.005973599910736084, 0.006020959854125977, 0.005980544090270996, 0.006015168190002441, 0.005992447853088379, 0.006037504196166992, 0.006049791812896729, 0.006025279998779297, 0.006006752014160156, 0.0060207037925720215, 0.0060368962287902835, 0.00625273609161377, 0.006064159870147705, 0.006029056072235107, 0.006062623977661133, 0.006007264137268067, 0.0060928001403808595, 0.0060026879310607914, 0.006046080112457276, 0.006115295886993408, 0.0060529599189758304, 0.006023744106292725, 0.00603545618057251, 0.006012224197387696, 0.006234367847442627, 0.006017119884490967, 0.006443039894104004, 0.006013023853302002, 0.00610697603225708, 0.006011263847351074, 0.00605404806137085, 0.006006624221801758, 0.006042751789093017, 0.006015071868896485, 0.006064799785614014, 0.006105184078216553, 0.0060109119415283204, 0.00601907205581665, 0.006059807777404785, 0.006053088188171386, 0.006066431999206543, 0.006054624080657959, 0.006025119781494141, 0.006082367897033691, 0.006016831874847412, 0.00606873607635498, 0.006008416175842285, 0.0060829758644104, 0.006053728103637695, 0.006129759788513184, 0.006021183967590332, 0.006098048210144043, 0.005734399795532226, 0.00602246379852295, 0.006032256126403809, 0.005996064186096191, 0.006046207904815673, 0.00600867223739624, 0.006029248237609863, 0.00602726411819458, 0.0060293121337890625, 0.006008959770202636, 0.006030975818634033, 0.00602342414855957, 0.006044896125793457, 0.00602569580078125, 0.006035776138305664, 0.0060644478797912595, 0.006065855979919434, 0.006264832019805908, 0.0059937920570373535, 0.006060224056243896, 0.005999104022979736, 0.006049119949340821, 0.005987040042877197, 0.00618284797668457, 0.00617632007598877, 0.0061868481636047365, 0.006048448085784912, 0.006066431999206543, 0.006039199829101563, 0.006088479995727539, 0.006037727832794189, 0.0060702719688415525, 0.006019008159637451, 0.006076000213623047, 0.006019552230834961, 0.006055903911590576, 0.005997663974761963, 0.006067488193511963, 0.0060269122123718264, 0.0060702719688415525, 0.006033408164978027, 0.006021120071411133, 0.006105023860931397, 0.0060416641235351565, 0.006065375804901123, 0.006033567905426025, 0.006042240142822265, 0.005987904071807861, 0.006023295879364014, 0.005994624137878418, 0.006064320087432861, 0.006097983837127685, 0.006080927848815918, 0.006006303787231445, 0.0060908799171447755, 0.006017920017242432, 0.006086656093597412, 0.00599619197845459, 0.006040192127227783, 0.0060018239021301266, 0.006047616004943848, 0.005980864048004151, 0.006039552211761475, 0.005724991798400879, 0.0059976959228515625, 0.00603766393661499, 0.006003424167633057, 0.006012159824371338, 0.005995264053344726, 0.006033152103424072, 0.006052320003509522, 0.006049376010894775, 0.0060663681030273435, 0.0060488319396972655, 0.00606447982788086, 0.006062208175659179, 0.006109663963317871, 0.006074368000030517, 0.006109183788299561, 0.006040736198425293, 0.006095712184906006, 0.0060269122123718264, 0.006092576026916504, 0.00601964807510376, 0.006332704067230225, 0.006028192043304443, 0.006120255947113037, 0.0060160961151123045, 0.006092927932739258, 0.006086559772491455, 0.006078879833221435, 0.006015456199645996, 0.006075488090515137, 0.006025152206420898, 0.006050144195556641, 0.006011519908905029, 0.006040863990783692, 0.0060198078155517575, 0.0061296639442443845, 0.006040895938873291, 0.0060375680923461916, 0.006029151916503907, 0.006015423774719238, 0.006054240226745606, 0.00602246379852295, 0.00608460807800293, 0.006031424045562744, 0.006051487922668457, 0.0059905281066894535, 0.006048960208892822, 0.005993535995483398, 0.006059648036956787, 0.005989344120025635, 0.006059167861938476, 0.005981023788452148, 0.00606822395324707, 0.005980447769165039, 0.0060433602333068845, 0.006100992202758789, 0.0061265921592712404, 0.005981184005737304, 0.006057983875274659, 0.005994495868682862, 0.0060050559043884275, 0.005973120212554931, 0.006015552043914795, 0.005725887775421143, 0.005989215850830078, 0.006077727794647217, 0.0059807682037353515, 0.006072351932525635, 0.005980160236358643, 0.00602623987197876, 0.005972640037536621, 0.006031968116760254, 0.005974112033843994, 0.006046751976013184, 0.0059889922142028805, 0.006051839828491211, 0.006009056091308594, 0.005992224216461181, 0.006021312236785889, 0.006018080234527588, 0.006005119800567627, 0.006027711868286133, 0.006023136138916016, 0.0060109119415283204, 0.006115295886993408, 0.006014463901519776, 0.006012767791748047, 0.00604204797744751, 0.006031583786010742, 0.0060702719688415525, 0.006075551986694336, 0.006005856037139892, 0.006049536228179932, 0.006033408164978027, 0.006051839828491211, 0.005975679874420166, 0.0060498881340026855, 0.006011168003082276, 0.006061728000640869, 0.006076767921447754, 0.006045055866241455, 0.006060480117797851, 0.00605628776550293, 0.006004127979278564, 0.0060522880554199215, 0.0060228161811828615, 0.006066527843475342, 0.005994431972503662, 0.006047808170318604, 0.006014880180358887, 0.006016191959381104, 0.005993375778198242, 0.006053184032440186, 0.005994239807128906, 0.006060671806335449, 0.006002111911773682, 0.006132415771484375, 0.00604588794708252, 0.00605785608291626, 0.006021247863769531, 0.006057983875274659, 0.006000639915466309, 0.006053760051727295, 0.006021152019500733, 0.006352128028869629, 0.006160384178161621, 0.005809567928314209, 0.006047455787658691, 0.006079423904418946, 0.006008768081665039, 0.006088704109191895, 0.006030816078186035, 0.00610972785949707, 0.006049215793609619, 0.006058559894561767, 0.00601087999343872, 0.00607260799407959, 0.006014592170715332, 0.006013023853302002, 0.006043647766113281, 0.006019199848175049, 0.006063551902770996, 0.0060239357948303224, 0.00602396821975708, 0.00603439998626709, 0.0060778560638427735, 0.006003071784973144, 0.006059904098510743, 0.006141439914703369, 0.006074368000030517, 0.006038303852081299, 0.006094848155975342, 0.006017312049865723, 0.006059743881225586, 0.00603718376159668, 0.006041632175445556, 0.006035744190216064, 0.006055424213409424, 0.006004703998565674, 0.006048287868499756, 0.006001920223236084, 0.006034463882446289, 0.006428383827209473, 0.006118559837341308, 0.006073184013366699, 0.008145024299621582, 0.007181856155395508, 0.0061645121574401855, 0.00600710391998291, 0.006060031890869141, 0.0059894719123840335, 0.006027232170104981, 0.0059911680221557614, 0.006049983978271485, 0.005994495868682862, 0.006047776222229004, 0.006008480072021484, 0.006070015907287597, 0.0060032639503479, 0.006063231945037842, 0.006034304141998291, 0.0060989117622375485, 0.006004127979278564, 0.0060648322105407715, 0.006020063877105713, 0.006073344230651856, 0.005980095863342285, 0.005999936103820801, 0.005993184089660645, 0.005765088081359863, 0.005968287944793701, 0.006033984184265137, 0.005998271942138672, 0.006034912109375, 0.006006656169891357, 0.0060258560180664065, 0.006011007785797119, 0.006026559829711914, 0.006002304077148438, 0.006040544033050537, 0.005994048118591309, 0.006097184181213379, 0.00598031997680664, 0.006059328079223633, 0.005988800048828125, 0.00604099178314209, 0.006007999897003174, 0.006047391891479492, 0.006012351989746094, 0.006042175769805909, 0.006002111911773682, 0.0060382080078125, 0.005991968154907226, 0.006031712055206299, 0.006543231964111328, 0.006342879772186279, 0.0065484800338745115, 0.00633903980255127, 0.006470431804656982, 0.0060330557823181155, 0.006057919979095459, 0.006031424045562744, 0.006047327995300293, 0.006007199764251709, 0.006043647766113281, 0.0060070080757141115, 0.006059103965759277, 0.006009215831756592, 0.0060685439109802244, 0.006108352184295654, 0.006067008018493652, 0.006076416015625, 0.006020127773284912, 0.00602620792388916, 0.006017024040222168, 0.006170207977294922, 0.006001183986663819, 0.006103072166442871, 0.005976191997528076, 0.0060490560531616215, 0.005961376190185547, 0.006158976078033447, 0.006014944076538086, 0.006082272052764893, 0.0060063362121582035, 0.006116288185119629, 0.006203360080718994, 0.00614137601852417, 0.006074528217315674, 0.006071775913238525, 0.005997504234313965, 0.0060661759376525876, 0.0057853121757507325, 0.0060603199005126954, 0.006020544052124023, 0.0060728960037231446, 0.006010784149169922, 0.006039807796478272, 0.007252927780151367, 0.007109632015228271, 0.007026400089263916, 0.006438975811004639, 0.006423808097839355, 0.006083456039428711, 0.0060366721153259275, 0.0060680961608886716, 0.00602188777923584, 0.006098559856414795, 0.006046400070190429, 0.006082399845123291, 0.006039584159851074, 0.0060713281631469725, 0.0059985918998718265, 0.006061024188995361, 0.006222976207733154, 0.0060648322105407715, 0.006172031879425049, 0.006054111957550049, 0.00613804817199707, 0.006058656215667725, 0.006103968143463135, 0.006015840053558349, 0.006068448066711426, 0.006050655841827393, 0.006310848236083985, 0.006051328182220459, 0.006212160110473632, 0.006023104190826416, 0.006124703884124756, 0.006032224178314209, 0.006086207866668701, 0.006035871982574463, 0.006090144157409668, 0.006077055931091309, 0.006082880020141602, 0.006047423839569092, 0.006078464031219482, 0.00602294397354126, 0.006083903789520263, 0.006026144027709961, 0.006031455993652343, 0.006086880207061767, 0.006028992176055908, 0.006062079906463623, 0.006059711933135987, 0.006080639839172363, 0.00605398416519165, 0.006205215930938721, 0.006119264125823975, 0.006115551948547363, 0.006032703876495361, 0.006066336154937744, 0.006017248153686524, 0.006056511878967285, 0.0059985918998718265, 0.00578326416015625, 0.006049824237823486, 0.006055903911590576, 0.0060004801750183105, 0.006058495998382568, 0.005985280036926269, 0.006060863971710205, 0.00604918384552002, 0.006664063930511475, 0.0064633598327636715, 0.006694719791412353, 0.006132607936859131, 0.00602668809890747, 0.006405792236328125, 0.006423423767089844, 0.006117023944854736, 0.006138400077819824, 0.006131840229034424, 0.0060655040740966795, 0.0061355199813842775, 0.006070655822753906, 0.00608403205871582, 0.006075232028961181, 0.006059967994689942, 0.006064191818237304, 0.006342656135559082, 0.006176224231719971, 0.00605676794052124, 0.00612656021118164, 0.00616319990158081, 0.006143519878387451, 0.006051519870758057, 0.006136608123779297, 0.006025311946868897, 0.0061101441383361816, 0.006021440029144287, 0.006077087879180908, 0.006043647766113281, 0.006102431774139405, 0.006019455909729004, 0.006072415828704834, 0.0060330557823181155, 0.006081056118011474, 0.006060256004333496, 0.006038368225097656, 0.00604204797744751, 0.0060522880554199215, 0.006078559875488282, 0.006002431869506836, 0.006166816234588623, 0.0060618557929992675, 0.006079904079437256, 0.006019711971282959, 0.0064182720184326175, 0.006020351886749268, 0.006087232112884522, 0.0060174398422241215, 0.0060351681709289555, 0.006087264060974121, 0.006104159832000733, 0.006189343929290772, 0.006060544013977051, 0.006044991970062256, 0.005783967971801758, 0.005993279933929443, 0.006067999839782715, 0.006036863803863525, 0.0060544638633728024, 0.006037504196166992, 0.006071807861328125, 0.006026016235351562, 0.006087520122528076, 0.006011199951171875, 0.006141503810882568, 0.00602623987197876, 0.006057024002075195, 0.0060059199333190915, 0.0061519680023193355, 0.006017024040222168, 0.006022496223449707, 0.006015647888183593, 0.006055136203765869, 0.006005536079406739, 0.006072319984436035, 0.005998528003692627, 0.006043712139129639, 0.006033408164978027, 0.006033408164978027, 0.00608019208908081, 0.006088479995727539, 0.006022719860076904, 0.006052832126617432, 0.006053887844085694, 0.006021120071411133, 0.0060713601112365725, 0.006046207904815673, 0.00610748815536499, 0.006055391788482666, 0.006060927867889404, 0.005994239807128906, 0.006086656093597412, 0.006013152122497559, 0.006043519973754883, 0.006010784149169922, 0.0060449280738830566, 0.0059779839515686035, 0.006308800220489502, 0.00654534387588501, 0.006043647766113281, 0.006036928176879883, 0.006058176040649414, 0.0060013442039489745, 0.006034175872802735, 0.006015552043914795, 0.006018720149993897, 0.006032095909118652, 0.006090432167053223, 0.005986495971679688, 0.006066304206848144, 0.006026463985443115, 0.0061181759834289555, 0.006060031890869141, 0.00602678394317627, 0.006001183986663819, 0.006037439823150635, 0.0061337599754333495, 0.005791584014892578, 0.006064288139343262, 0.00606601619720459, 0.006008992195129395, 0.006320511817932129, 0.006171584129333496, 0.006055647850036621, 0.006062880039215088, 0.006054240226745606, 0.006094687938690185, 0.006110527992248535, 0.006136256217956543, 0.006127423763275146, 0.006064127922058105, 0.006019519805908203, 0.006094848155975342, 0.006023168087005615, 0.006098207950592041, 0.006074111938476562, 0.006039648056030273, 0.006040768146514892, 0.006069952011108398, 0.006000063896179199, 0.00608022403717041, 0.006064095973968506, 0.006087615966796875, 0.006059455871582031, 0.006091135978698731, 0.006085984230041504, 0.006075168132781982, 0.006260255813598633, 0.006046175956726074, 0.006088704109191895, 0.006035327911376953, 0.006029568195343018, 0.006011807918548584, 0.006049088001251221, 0.0060002880096435545, 0.0060696320533752445, 0.006034304141998291, 0.006054944038391114, 0.006001215934753418, 0.006131455898284912, 0.005984608173370361, 0.006060095787048339, 0.006205440044403076, 0.006123007774353028, 0.006021984100341797, 0.0060431680679321285, 0.005984384059906006, 0.006043647766113281, 0.006016032218933105, 0.006044640064239502, 0.006059648036956787, 0.006068607807159424, 0.005992544174194336, 0.006069183826446533, 0.005983200073242188, 0.006033567905426025, 0.006018911838531494, 0.006053696155548096, 0.006043263912200928, 0.006060575962066651, 0.0057429118156433106, 0.006039391994476318, 0.006019584178924561, 0.006059679985046387, 0.005991680145263672, 0.00606873607635498, 0.0060620479583740235, 0.006092959880828857, 0.006134143829345703, 0.006117343902587891, 0.006081984043121338, 0.006101439952850342, 0.00604150390625, 0.006238111972808838, 0.006162176132202149, 0.006189407825469971, 0.006016543865203857, 0.00608732795715332, 0.00606825590133667, 0.00604860782623291, 0.006067008018493652, 0.006031519889831543, 0.006014624118804932, 0.006025536060333252, 0.006056064128875732, 0.0060005121231079105, 0.006042943954467773, 0.00601094388961792, 0.006034048080444336, 0.0063181757926940914, 0.006098847866058349, 0.006000832080841064, 0.006086463928222656, 0.006041823863983154, 0.006052735805511474, 0.0060284481048583985, 0.006051583766937256, 0.005989952087402344, 0.006049376010894775, 0.006017888069152832, 0.006042880058288574, 0.006019392013549805, 0.006438464164733887, 0.006026016235351562, 0.006072415828704834, 0.00600710391998291, 0.0060264959335327144, 0.0060104641914367676, 0.006042463779449463, 0.0060026879310607914, 0.006086656093597412, 0.006016191959381104, 0.00603436803817749, 0.006045567989349365, 0.00612556791305542, 0.006055712223052978, 0.006025440216064453, 0.006041823863983154, 0.006012864112854004, 0.006092639923095703, 0.006021120071411133, 0.00602950382232666, 0.006025375843048096, 0.005788064002990723, 0.006039360046386719, 0.006015744209289551, 0.006067967891693115, 0.005958687782287597, 0.006095136165618896, 0.006040256023406982, 0.006047455787658691, 0.006013216018676758, 0.006060031890869141, 0.006000639915466309, 0.0060702719688415525, 0.006049791812896729, 0.006001728057861328, 0.006058495998382568, 0.006040256023406982, 0.0060167679786682126, 0.0060535998344421384, 0.006082592010498047, 0.0060234880447387696, 0.0060631041526794435, 0.006022079944610596, 0.006037216186523437, 0.00602294397354126, 0.006077280044555664, 0.006014656066894531, 0.006076384067535401, 0.006030816078186035, 0.006181759834289551, 0.006024543762207031, 0.006074687957763672, 0.006008831977844238, 0.006070112228393555, 0.00602947187423706, 0.006039872169494629, 0.0060310401916503905, 0.006043647766113281, 0.0059780158996582036, 0.006035295963287354, 0.00608896017074585, 0.006076352119445801, 0.006281280040740967, 0.0060696001052856444, 0.006148767948150635, 0.00606822395324707, 0.0060026879310607914, 0.006077727794647217, 0.0060199999809265135, 0.006102848052978516, 0.006029376029968262, 0.006039487838745117, 0.006586527824401856, 0.007223135948181152, 0.00727785587310791, 0.006093120098114014, 0.006016704082489013, 0.00608464002609253, 0.005995200157165527, 0.006083936214447022, 0.006019423961639405, 0.006166143894195557, 0.006046400070190429, 0.006086656093597412, 0.005754528045654297, 0.006072671890258789, 0.005994495868682862, 0.006048768043518066, 0.00599510383605957, 0.006031775951385498, 0.005989920139312744, 0.006021440029144287, 0.005984320163726807, 0.006024767875671386, 0.005964320182800293, 0.00603545618057251, 0.005994495868682862, 0.006025216102600098, 0.0060143680572509765, 0.006061984062194824, 0.00600713586807251, 0.006055776119232178, 0.0060052480697631834, 0.006033408164978027, 0.006002336025238037, 0.0060481600761413574, 0.006012864112854004, 0.006088223934173584, 0.006006368160247803, 0.006074783802032471, 0.0060150399208068845, 0.006059552192687988, 0.005998879909515381, 0.006060383796691895, 0.006002943992614746, 0.006416384220123291, 0.006108960151672363, 0.006096384048461914, 0.006095583915710449, 0.006004735946655273, 0.006041408061981201, 0.006049119949340821, 0.0060546879768371585, 0.00601251220703125, 0.006042079925537109, 0.006000927925109863, 0.0060349760055541995, 0.006008831977844238, 0.006053760051727295, 0.005980480194091797, 0.006072319984436035, 0.006088543891906739, 0.00612172794342041, 0.006110432147979736, 0.006103744029998779, 0.006037216186523437, 0.0060555520057678225, 0.006011551856994629, 0.006078432083129883, 0.006006720066070556, 0.006071712017059326, 0.006214335918426513, 0.00609600019454956, 0.0060424962043762204, 0.006066112041473388, 0.006006847858428955, 0.006060031890869141, 0.005874591827392578, 0.006071616172790528, 0.005999104022979736, 0.006055935859680176, 0.0060182719230651854, 0.006040544033050537, 0.006016223907470703, 0.006038303852081299, 0.006004384040832519, 0.006030752182006836, 0.0060078721046447755, 0.006041376113891601, 0.006018527984619141, 0.006051775932312012, 0.0060013442039489745, 0.006141952037811279, 0.0059968318939208985, 0.006064864158630371, 0.006032383918762207, 0.006078464031219482, 0.006033408164978027, 0.006041183948516846, 0.005988096237182617, 0.006046720027923584, 0.006018208026885986, 0.0060503678321838375, 0.006043583869934082, 0.006029024124145508, 0.0060991358757019045, 0.006039648056030273, 0.006039552211761475, 0.006000895977020263, 0.006063519954681396, 0.006000127792358399, 0.0060301761627197265, 0.005999936103820801, 0.006015679836273193, 0.006000639915466309, 0.006023168087005615, 0.005979455947875976, 0.006062975883483887, 0.005984064102172852, 0.006023071765899658, 0.005994239807128906, 0.006009183883666992, 0.005962944030761719, 0.005998496055603027, 0.005983136177062988, 0.0060293121337890625, 0.005973440170288086, 0.0060481600761413574, 0.005978271961212158, 0.006032512187957763, 0.005995007991790771, 0.006021503925323486, 0.005998847961425781, 0.0060208640098571775, 0.005977215766906738, 0.006017920017242432, 0.005971968173980713, 0.00612556791305542, 0.0059985918998718265, 0.006039552211761475, 0.00581993579864502, 0.006005440235137939, 0.006035103797912598, 0.005994847774505615, 0.006024288177490234, 0.005997471809387207, 0.006043231964111328, 0.006037471771240234, 0.006051519870758057, 0.0060239357948303224, 0.006038784027099609, 0.006048511981964111, 0.00601855993270874, 0.006199808120727539, 0.006174240112304687, 0.0062243518829345704, 0.006051296234130859, 0.006077248096466065, 0.006033120155334473, 0.006109024047851562, 0.00604963207244873, 0.006091104030609131, 0.006064095973968506, 0.006086656093597412, 0.006029248237609863, 0.006060095787048339, 0.006023039817810058, 0.0060992960929870605, 0.006049568176269531, 0.006193088054656982, 0.0060375680923461916, 0.0061987838745117185, 0.006068511962890625, 0.006074592113494873, 0.006068480014801025, 0.006061823844909668, 0.006040863990783692, 0.00601366376876831, 0.0060471038818359375, 0.00599513578414917, 0.006059999942779541, 0.005999839782714843, 0.006060863971710205, 0.00602726411819458, 0.006025504112243652, 0.006117087841033935, 0.006074336051940918, 0.006024864196777344, 0.006084991931915283, 0.006003903865814209, 0.006067008018493652, 0.005980160236358643, 0.006094848155975342, 0.006014976024627685, 0.006072319984436035, 0.006022719860076904, 0.006062528133392334, 0.005993472099304199, 0.006097919940948486, 0.0060269122123718264, 0.006023839950561523, 0.006002143859863281, 0.006061823844909668, 0.005738719940185547, 0.006065919876098633, 0.006041600227355957, 0.006075615882873535, 0.006030111789703369, 0.0060661759376525876, 0.00601087999343872, 0.006074368000030517, 0.0060193600654602055, 0.006053088188171386, 0.005995264053344726, 0.006049344062805176, 0.006004576206207275, 0.006076511859893799, 0.006002943992614746, 0.006060224056243896, 0.006018879890441894, 0.00605628776550293, 0.006006015777587891, 0.006068031787872314, 0.006027872085571289, 0.006052192211151123, 0.0060449280738830566, 0.00610041618347168, 0.006087520122528076, 0.006092895984649658, 0.006035488128662109, 0.00608460807800293, 0.006001887798309326, 0.006107935905456543, 0.005981215953826904, 0.006015967845916748, 0.005977439880371094, 0.006005375862121582, 0.005973023891448975, 0.0060013442039489745, 0.0060150399208068845, 0.006041855812072754, 0.005998176097869873, 0.0060440640449523925, 0.005977439880371094, 0.0060152640342712406, 0.0059716482162475585, 0.006039872169494629, 0.005984064102172852, 0.006019008159637451, 0.00598908805847168, 0.00602729606628418, 0.005975935935974121, 0.006057983875274659, 0.005994495868682862, 0.006042687892913818, 0.005977151870727539, 0.006029183864593506, 0.006115359783172607, 0.006076735973358154, 0.00613862419128418, 0.005994912147521973, 0.006054143905639648, 0.0060481600761413574, 0.006055967807769775, 0.005986112117767334, 0.006043456077575684, 0.005743391990661621, 0.005994592189788818, 0.006048799991607666, 0.006048096179962158, 0.006050432205200195, 0.006034527778625488, 0.006054751873016358, 0.00601097583770752, 0.006051136016845703, 0.0060033597946166995, 0.006049791812896729, 0.006016640186309814, 0.006037888050079346, 0.0060026879310607914, 0.0060414719581604005, 0.006008959770202636, 0.00602726411819458, 0.006033311843872071, 0.006017119884490967, 0.006045695781707764, 0.006034719944000244, 0.006029856204986572, 0.006012735843658447, 0.006028863906860351, 0.006027711868286133, 0.0060399360656738284, 0.006039968013763428, 0.006064991950988769, 0.00602339220046997, 0.006047647953033447, 0.006017663955688477, 0.006055935859680176, 0.0060274238586425785, 0.006069087982177734, 0.006013055801391601, 0.006267776012420654, 0.0060145602226257325, 0.006062816143035889, 0.006001599788665771, 0.006052608013153076, 0.006044672012329101, 0.006030335903167725, 0.005998335838317871, 0.006045343875885009, 0.005996416091918946, 0.006015168190002441, 0.006025023937225342, 0.0060832958221435545, 0.005975935935974121, 0.006082911968231201, 0.0060347518920898435, 0.006312416076660156, 0.006026624202728271, 0.0061896958351135255, 0.006017024040222168, 0.006105088233947754, 0.006055456161499023, 0.006035935878753662, 0.006075551986694336, 0.006056352138519287, 0.0060581440925598145, 0.006037792205810547, 0.0061001920700073245]",tokens/s,164.9194957231276,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,807.645184,6322.782208,0.0,5920.260096,5695.433728,s,1,7.12536669921875,7.12536669921875,0.0,7.12536669921875,7.12536669921875,7.12536669921875,7.12536669921875,[7.12536669921875],,kWh,5.054365458340726e-06,5.503244990129626e-07,3.0572246680071746e-06,8.661914625360863e-06,,MB,1162.0352,6335.36512,0.0,5922.357248,5577.222144,s,10,4.8595546569824215,0.48595546569824216,0.0026914178177418232,0.4865880584716797,0.4880344970703125,0.48858194580078124,0.4890199047851562,"[0.47869808959960936, 0.4847711181640625, 0.4854494323730469, 0.48749282836914065, 0.48667742919921875, 0.4859573974609375, 0.4864986877441406, 0.48696743774414064, 0.48912939453125, 0.487912841796875]",tokens/s,526.7972439247452,kWh,1.4156607620834115e-05,1.5612290922462073e-06,9.374399033904949e-06,2.509223574698527e-05,tokens/kWh,10202359.111453723,MB,1192.07936,6335.36512,0.0,5922.357248,5663.963648,s,10,20.011736694335937,2.0011736694335935,0.0021670698038016452,2.001565673828125,2.003222912597656,2.0043452209472656,2.005243067626953,"[2.0014833984375, 2.005467529296875, 2.0018673095703123, 1.9991851806640626, 1.9970859375, 2.0029735107421875, 1.999965576171875, 1.999953369140625, 2.00164794921875, 2.00210693359375]",tokens/s,31.481525547870785,kWh,5.8278851632081926e-05,6.428209655866404e-06,3.8712611657894715e-05,0.00010341967294584303,tokens/kWh,609168.4319383868,,s,630,20.0090021343231,0.031760320848131934,0.00032325048716952865,0.031707296371459956,0.031982431983947755,0.032135174179077144,0.03323925701141357,"[0.0358177604675293, 0.03291910552978516, 0.031412511825561526, 0.03133695983886719, 0.03130956840515137, 0.03127542304992676, 0.03128835105895996, 0.03154243278503418, 0.03131139183044434, 0.031389984130859375, 0.03173942375183105, 0.03144723129272461, 0.031357215881347655, 0.03142019271850586, 0.03140022468566894, 0.0313896312713623, 0.03143420791625977, 0.0314515209197998, 0.03148201560974121, 0.03164316749572754, 0.03151100730895996, 0.0317706241607666, 0.03161027145385742, 0.031766912460327146, 0.031862911224365235, 0.031748384475708005, 0.03202169418334961, 0.031472192764282224, 0.03154572868347168, 0.03168335914611817, 0.032223262786865235, 0.03236953735351562, 0.031866399765014645, 0.03180127906799316, 0.031801887512207035, 0.03169827270507813, 0.03165456008911133, 0.03169516754150391, 0.03165184020996094, 0.031988639831542966, 0.03163366317749024, 0.031641120910644534, 0.03187804794311523, 0.03166012763977051, 0.031721792221069335, 0.03168828773498535, 0.0316561279296875, 0.03174390411376953, 0.03184230422973633, 0.031704288482666015, 0.03172774314880371, 0.03185935974121094, 0.031987712860107424, 0.03168278312683105, 0.031806495666503905, 0.03198179244995117, 0.03180352020263672, 0.03179766464233398, 0.03180544090270996, 0.03183001518249512, 0.031921728134155274, 0.03185903930664063, 0.03187516784667969, 0.03348438262939453, 0.03229123306274414, 0.031971328735351565, 0.0317697925567627, 0.031686975479125974, 0.03185635185241699, 0.031509248733520505, 0.0315248966217041, 0.03191398429870605, 0.03169440078735351, 0.03163164710998535, 0.03166428756713867, 0.03178700828552246, 0.03170012855529785, 0.03159535980224609, 0.031605791091918946, 0.031594463348388675, 0.03183011245727539, 0.03161718368530273, 0.0314703369140625, 0.031588287353515626, 0.03162112045288086, 0.03162150382995606, 0.03168428802490234, 0.03165388870239258, 0.03161087989807129, 0.03243008041381836, 0.03199385643005371, 0.03175340843200684, 0.031960159301757815, 0.03194819259643555, 0.031684928894042966, 0.03171737670898438, 0.03177471923828125, 0.03183353614807129, 0.03166649627685547, 0.03182617568969726, 0.03178643226623535, 0.03169337654113769, 0.03160905647277832, 0.031873119354248046, 0.031672000885009766, 0.03170697593688965, 0.031692960739135745, 0.031801343917846676, 0.0316080322265625, 0.031680320739746096, 0.031787391662597655, 0.03190342330932617, 0.03185552024841309, 0.031749120712280275, 0.03173081588745117, 0.03187699127197265, 0.0317007999420166, 0.03180060768127441, 0.031870975494384765, 0.031718303680419925, 0.03323289489746094, 0.03213926315307617, 0.032198654174804685, 0.03197747230529785, 0.031988000869750975, 0.03198534393310547, 0.03325747299194336, 0.032217086791992186, 0.03181158447265625, 0.03156787109375, 0.03156787109375, 0.031398111343383786, 0.03155699157714844, 0.03150223922729492, 0.0316151351928711, 0.03144739151000977, 0.03177267265319824, 0.032059391021728514, 0.03160479927062988, 0.03169379234313965, 0.031929311752319334, 0.03156582450866699, 0.03161680030822754, 0.031553760528564456, 0.031660032272338864, 0.03147350311279297, 0.03194825553894043, 0.0316977596282959, 0.03167011260986328, 0.03155353546142578, 0.031768575668334964, 0.0315695686340332, 0.031638879776000974, 0.03157920074462891, 0.031858623504638674, 0.03177872085571289, 0.03201603317260742, 0.03177215957641601, 0.03185113525390625, 0.03175027275085449, 0.03190921592712402, 0.03187504005432129, 0.031878015518188474, 0.03172966384887695, 0.031696895599365234, 0.03168876838684082, 0.031758399963378904, 0.03164761543273926, 0.031719423294067385, 0.03173990440368652, 0.0316682243347168, 0.03175424003601074, 0.031882783889770505, 0.03175446319580078, 0.03169468879699707, 0.03185420799255371, 0.03182284736633301, 0.03176220893859863, 0.031817728042602536, 0.0318442554473877, 0.031747615814208985, 0.03175849533081055, 0.031809951782226564, 0.03178214454650879, 0.03198233604431153, 0.031879199981689456, 0.031848415374755856, 0.03199148750305176, 0.03198355293273926, 0.03311785507202149, 0.03204569625854492, 0.031733760833740236, 0.03157561683654785, 0.03147446441650391, 0.03166815948486328, 0.03149407958984375, 0.03151059150695801, 0.03144412803649902, 0.03147990417480469, 0.031525888442993165, 0.0314714241027832, 0.031551456451416014, 0.0314245433807373, 0.031475711822509765, 0.03153510475158691, 0.03147776031494141, 0.031488000869750975, 0.03156582450866699, 0.03155577659606933, 0.0315056324005127, 0.03151318359375, 0.03157001686096191, 0.03156982421875, 0.031647743225097655, 0.031702495574951174, 0.03171792030334473, 0.03177471923828125, 0.03183616065979004, 0.03199385643005371, 0.03200204849243164, 0.031923583984375, 0.031784671783447266, 0.03176335906982422, 0.031784959793090824, 0.03181705665588379, 0.03169919967651367, 0.031723936080932616, 0.03166940879821777, 0.031703903198242185, 0.03188035202026367, 0.03166294479370117, 0.03163340759277344, 0.031821823120117186, 0.031631359100341795, 0.03165184020996094, 0.03172147178649903, 0.03159241676330567, 0.031641023635864254, 0.031707744598388675, 0.03169484710693359, 0.031897567749023435, 0.03171641540527344, 0.03181667137145996, 0.03177676773071289, 0.031938560485839845, 0.031891584396362305, 0.031902719497680664, 0.0318240966796875, 0.031873695373535155, 0.031854591369628905, 0.03216998291015625, 0.03229270553588867, 0.03297110366821289, 0.03203481674194336, 0.031779071807861325, 0.0315020809173584, 0.031493696212768554, 0.03138937568664551, 0.03142527961730957, 0.031440895080566404, 0.03149590492248535, 0.03152092742919922, 0.0314590072631836, 0.031469728469848635, 0.03158012771606445, 0.031568191528320313, 0.031396928787231444, 0.03151932716369629, 0.03146707153320313, 0.031482431411743166, 0.03148111915588379, 0.03151148796081543, 0.031528959274291994, 0.03150883293151856, 0.03152688026428223, 0.03144441604614258, 0.03164096069335937, 0.03170761680603027, 0.03148636817932129, 0.03161087989807129, 0.03149635124206543, 0.03162227249145508, 0.031851232528686525, 0.031866880416870115, 0.03218534469604492, 0.03170620727539063, 0.03175795173645019, 0.03160297584533692, 0.0315817928314209, 0.03159257507324219, 0.031665632247924805, 0.03163651275634766, 0.031635040283203124, 0.031665407180786136, 0.031640352249145506, 0.03175564765930176, 0.03161577606201172, 0.0316275520324707, 0.031803232192993164, 0.031802751541137694, 0.031613504409790036, 0.03159647941589355, 0.031733760833740236, 0.031848447799682614, 0.03190284729003906, 0.03199475288391113, 0.03195715141296387, 0.031894847869873046, 0.03192886352539062, 0.03190169525146484, 0.03203071975708008, 0.03200204849243164, 0.03197337532043457, 0.0318791675567627, 0.031973312377929684, 0.033134559631347656, 0.03210380935668945, 0.031769407272338866, 0.03165772819519043, 0.03159616088867188, 0.03154988861083984, 0.031656991958618166, 0.03165283203125, 0.03167027282714844, 0.03159449577331543, 0.03161497688293457, 0.03145244789123535, 0.03151945686340332, 0.03147295951843262, 0.03145167922973633, 0.031473823547363285, 0.03175625610351562, 0.03151875114440918, 0.03155964851379395, 0.03158809661865234, 0.03155753517150879, 0.03151091194152832, 0.03147776031494141, 0.031471616744995115, 0.03154150390625, 0.03158809661865234, 0.0316866569519043, 0.03155868721008301, 0.03178185653686524, 0.032487422943115234, 0.03170899200439453, 0.03210464096069336, 0.031909887313842776, 0.03194620704650879, 0.03186233520507813, 0.031734144210815426, 0.03170569610595703, 0.031813440322875974, 0.031659807205200195, 0.0316072006225586, 0.031802751541137694, 0.03164748764038086, 0.03165171241760254, 0.03166924858093262, 0.031734975814819336, 0.03174073600769043, 0.031829023361206056, 0.03194569587707519, 0.031839712142944336, 0.03183180809020996, 0.03187587165832519, 0.031893632888793946, 0.031983295440673826, 0.03189779281616211, 0.03180339241027832, 0.03202252960205078, 0.0319815673828125, 0.03193142318725586, 0.031914976119995116, 0.03256924819946289, 0.03221683120727539, 0.032244064331054687, 0.03215087890625, 0.03341945648193359, 0.03212246322631836, 0.03182601547241211, 0.03155942344665527, 0.03170771217346192, 0.03167951965332031, 0.03156691169738769, 0.03143852806091309, 0.03146159934997558, 0.03151872062683105, 0.0314715518951416, 0.031382688522338865, 0.032209823608398434, 0.03187711906433106, 0.031510528564453126, 0.03199996757507324, 0.03171708869934082, 0.031581504821777344, 0.03156803131103516, 0.03152499198913574, 0.03164393615722656, 0.03147616004943848, 0.031581567764282226, 0.031607135772705075, 0.0315702075958252, 0.03155353546142578, 0.03163545608520508, 0.03151852798461914, 0.0315579833984375, 0.031635295867919924, 0.0316243839263916, 0.03166662406921387, 0.03165443229675293, 0.03180303955078125, 0.03169094467163086, 0.03170076751708984, 0.03173318481445313, 0.03177657508850098, 0.03169379234313965, 0.03159654426574707, 0.03164547157287598, 0.03161468887329102, 0.03162982368469238, 0.0316492805480957, 0.03167024040222168, 0.031733631134033206, 0.03170688056945801, 0.031642528533935545, 0.03187433624267578, 0.031818464279174806, 0.03184828758239746, 0.03190815925598144, 0.0318503360748291, 0.031932416915893554, 0.03189750480651855, 0.03187731170654297, 0.03208591842651367, 0.03194166374206543, 0.03196208000183105, 0.031881280899047855, 0.03188719940185547, 0.031919296264648435, 0.03185683250427246, 0.0336009292602539, 0.03224019241333008, 0.03190275192260742, 0.031785503387451175, 0.031685087203979494, 0.0317706241607666, 0.03163545608520508, 0.0317544002532959, 0.03167420768737793, 0.03164329528808594, 0.031713247299194336, 0.031525056838989256, 0.03155167961120606, 0.03160678482055664, 0.03153510475158691, 0.03167350387573242, 0.03164246368408203, 0.03182489585876465, 0.031654943466186525, 0.03154940795898437, 0.031587551116943356, 0.03159734344482422, 0.031508480072021484, 0.031559488296508786, 0.03154083251953125, 0.03155411148071289, 0.03162729644775391, 0.03163955116271973, 0.03150028800964356, 0.03160294342041016, 0.03185008049011231, 0.03172745513916016, 0.03164396858215332, 0.03170451164245605, 0.031723648071289065, 0.031750591278076175, 0.03174505615234375, 0.03169779205322266, 0.03166012763977051, 0.03159654426574707, 0.03159040069580078, 0.03155763244628906, 0.0317542724609375, 0.031606752395629885, 0.03162832069396972, 0.0316343994140625, 0.03165977668762207, 0.03172089576721191, 0.0317325439453125, 0.03169484710693359, 0.03179708862304687, 0.03189561653137207, 0.03189769554138184, 0.03184435272216797, 0.031752191543579104, 0.0317890567779541, 0.031739551544189455, 0.03180505561828613, 0.03191574478149414, 0.031853567123413085, 0.03184025573730469, 0.032069633483886716, 0.0321064338684082, 0.03311727905273437, 0.032130176544189454, 0.03169401550292969, 0.03154185676574707, 0.031498239517211916, 0.03148540878295898, 0.03158480072021484, 0.031492095947265625, 0.03141836738586426, 0.031455232620239255, 0.03161497688293457, 0.03168889617919922, 0.031647903442382816, 0.031516319274902345, 0.0319048957824707, 0.03170368003845215, 0.03195315170288086, 0.031666175842285156, 0.031839712142944336, 0.031797792434692385, 0.03180339241027832, 0.03171564865112304, 0.031710527420043946, 0.03171980857849121, 0.03172681617736817, 0.0319370231628418, 0.03165417671203613, 0.031716991424560546, 0.03169523239135742, 0.03170508766174317, 0.03179110336303711, 0.03177395248413086, 0.0318287353515625, 0.03212492752075195, 0.03178688049316406, 0.03170502471923828, 0.03169503974914551, 0.031684736251831054, 0.03170495986938476, 0.03159561538696289, 0.0316296329498291, 0.03163327980041504, 0.03174195289611816, 0.03178969573974609, 0.031731807708740234, 0.03173126411437988, 0.031797119140625, 0.03169308853149414, 0.031953184127807614, 0.031703039169311525, 0.03174179267883301, 0.03183939170837402, 0.03182204818725586, 0.0318535041809082, 0.031860031127929685, 0.03182028770446777, 0.03179532814025879, 0.031931455612182615, 0.03184127998352051, 0.031819616317749023, 0.03192147254943847, 0.03189216041564941, 0.032013439178466795, 0.03324185562133789, 0.032220382690429684, 0.0318492488861084, 0.03152454376220703, 0.031526527404785155, 0.03154604721069336, 0.03157196807861328, 0.031530208587646484, 0.031652511596679686, 0.03152607917785644, 0.03168966484069824, 0.03171327972412109, 0.031631071090698244, 0.03154368019104004, 0.03155958366394043, 0.031475711822509765, 0.031537120819091796, 0.03157606315612793, 0.03158799934387207, 0.031500223159790036, 0.031524288177490235, 0.03176495933532715, 0.031586143493652345, 0.03154540824890137, 0.03163993644714355, 0.03174326324462891, 0.031828960418701174, 0.031692800521850584, 0.03186073684692383, 0.031748096466064454, 0.03189276885986328, 0.03330326461791992, 0.031942655563354495, 0.03179635238647461, 0.03170937538146973, 0.0316866569519043, 0.03166268730163574, 0.03163961601257324, 0.03170921516418457, 0.03175785636901855, 0.0316329288482666, 0.031603647232055665, 0.0316847038269043, 0.03162451171875, 0.03164425659179688, 0.031649791717529296, 0.03173513603210449, 0.03167465591430664, 0.03176063919067383, 0.03187110328674316, 0.031787103652954105, 0.031772544860839844, 0.03179430389404297, 0.0317938232421875, 0.03178726387023926, 0.03186268806457519, 0.03180963134765625, 0.03180899238586426, 0.031796831130981446, 0.03188153648376465, 0.032675647735595705, 0.03211756896972656, 0.03201990509033203]",tokens/s,31.48582801734566,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,890.097664,3361.603584,0.0,2959.081472,2942.567424,s,1,7.2298310546875,7.2298310546875,0.0,7.2298310546875,7.2298310546875,7.2298310546875,7.2298310546875,[7.2298310546875],,kWh,5.871420945807889e-06,6.365301692932745e-07,1.9875015899867288e-06,8.495452705087893e-06,,MB,1337.679872,3556.63872,0.0,3139.436544,3105.830912,s,10,0.3450074577331543,0.034500745773315425,0.001306026524944813,0.03400886535644531,0.035316902542114254,0.03679782695770263,0.03798256649017334,"[0.038278751373291016, 0.03498780822753906, 0.033869537353515625, 0.03375299072265625, 0.03370415878295899, 0.03424854278564453, 0.03406230545043945, 0.03420979309082031, 0.03395542526245117, 0.03393814468383789]",tokens/s,7420.129456969681,kWh,1.192634588095037e-06,1.315263919325835e-07,7.90623081477629e-07,2.11478406150525e-06,tokens/kWh,121052548.41848283,MB,1375.617024,3598.58176,0.0,3181.379584,3162.0096,s,10,13.93775732421875,1.393775732421875,0.006007822959603111,1.3930152587890625,1.401655810546875,1.404278466796875,1.4063765917968751,"[1.3899971923828125, 1.393892578125, 1.3951678466796875, 1.394636474609375, 1.38808984375, 1.3851827392578124, 1.401072998046875, 1.392137939453125, 1.406901123046875, 1.3906785888671875]",tokens/s,45.20095918912932,kWh,4.084483393273861e-05,4.504788901988803e-06,2.572956480132129e-05,7.10791876360487e-05,tokens/kWh,886335.397114876,,s,630,13.935423479080203,0.02211971980806381,0.00037760557642382,0.02207139205932617,0.022286151123046875,0.022426221275329592,0.023095409641265875,"[0.022145055770874025, 0.022112255096435548, 0.022079488754272462, 0.022312959671020507, 0.021989183425903322, 0.02196089553833008, 0.02199075126647949, 0.021923648834228517, 0.022010719299316406, 0.021984447479248048, 0.022045503616333006, 0.02186649513244629, 0.021914688110351563, 0.022033344268798827, 0.021978143692016602, 0.02191868782043457, 0.02194380760192871, 0.022180160522460936, 0.021990943908691406, 0.021944320678710938, 0.022008480072021483, 0.02205721664428711, 0.022036224365234374, 0.022014976501464844, 0.022029312133789062, 0.021923807144165038, 0.022349119186401367, 0.021983968734741212, 0.021979135513305666, 0.022071296691894532, 0.022367904663085938, 0.022044992446899413, 0.022081344604492188, 0.022299999237060546, 0.021992128372192384, 0.02206867218017578, 0.02209846305847168, 0.02208176040649414, 0.02206675148010254, 0.022050687789916992, 0.022253311157226563, 0.022045503616333006, 0.022162912368774414, 0.022060800552368164, 0.02199580764770508, 0.02196326446533203, 0.021946367263793946, 0.0220153923034668, 0.022040672302246093, 0.02213871955871582, 0.022035104751586914, 0.022013471603393554, 0.021989856719970703, 0.02226585578918457, 0.02206719970703125, 0.02193120002746582, 0.02199839973449707, 0.022003040313720704, 0.022043296813964844, 0.021964799880981444, 0.021942272186279296, 0.022018047332763673, 0.022939584732055665, 0.022173471450805664, 0.022112127304077148, 0.022198911666870116, 0.022076351165771484, 0.022041215896606445, 0.021971296310424805, 0.022071264266967774, 0.022135839462280274, 0.021958816528320314, 0.022004383087158203, 0.021949440002441405, 0.022272480010986327, 0.022000160217285156, 0.02196268844604492, 0.022081151962280273, 0.021971391677856444, 0.02238287925720215, 0.022181600570678712, 0.022147071838378905, 0.022038528442382813, 0.02207744026184082, 0.02212380790710449, 0.0221026554107666, 0.022058624267578125, 0.02207753562927246, 0.02235968017578125, 0.022175712585449217, 0.022048671722412108, 0.022116384506225585, 0.022097759246826172, 0.02212883186340332, 0.022215072631835937, 0.02218844795227051, 0.022468608856201173, 0.022097600936889648, 0.022198591232299805, 0.02215936088562012, 0.02211020851135254, 0.022091775894165038, 0.02227609634399414, 0.022131711959838866, 0.022150144577026368, 0.022146751403808593, 0.02211257553100586, 0.022025312423706055, 0.02210902404785156, 0.022007871627807617, 0.021993471145629884, 0.021999616622924805, 0.022040576934814454, 0.0220425910949707, 0.02225155258178711, 0.022260927200317384, 0.022094655990600585, 0.022079488754272462, 0.02208563232421875, 0.022429695129394533, 0.022068576812744142, 0.02213324737548828, 0.022116512298583985, 0.022111263275146485, 0.022197216033935548, 0.022169599533081053, 0.022039199829101564, 0.02219980812072754, 0.02213907241821289, 0.022179424285888674, 0.022096607208251955, 0.02211622428894043, 0.021989503860473634, 0.022042623519897463, 0.022031551361083986, 0.022143455505371095, 0.02214313507080078, 0.0221114559173584, 0.022084640502929687, 0.022204576492309572, 0.02262403106689453, 0.02203411293029785, 0.02212668800354004, 0.021997791290283203, 0.022190080642700196, 0.022125823974609375, 0.022192352294921874, 0.02209017562866211, 0.022142112731933592, 0.022072256088256834, 0.022230911254882812, 0.02205708885192871, 0.022086847305297853, 0.022079328536987304, 0.02222947120666504, 0.022031871795654297, 0.0221624641418457, 0.022064287185668944, 0.022373184204101563, 0.02225766372680664, 0.02226585578918457, 0.022177791595458983, 0.02224742317199707, 0.02228972816467285, 0.022188192367553712, 0.02217206382751465, 0.022136415481567383, 0.02215769577026367, 0.022424896240234374, 0.022122751235961913, 0.022179519653320313, 0.02207561683654785, 0.02218259239196777, 0.022050880432128907, 0.02216111946105957, 0.022001888275146483, 0.022083391189575197, 0.022333023071289062, 0.02208950424194336, 0.021977920532226563, 0.02207744026184082, 0.021987327575683592, 0.02206719970703125, 0.022140928268432617, 0.02207744026184082, 0.022168928146362305, 0.022162080764770508, 0.022116352081298828, 0.02211020851135254, 0.022118688583374024, 0.022138336181640624, 0.02211199951171875, 0.02203865623474121, 0.022164159774780274, 0.021965824127197265, 0.022010879516601564, 0.022091264724731444, 0.022299135208129883, 0.02208118438720703, 0.022028799057006835, 0.02202716827392578, 0.02192470359802246, 0.022257759094238282, 0.022021568298339844, 0.022104639053344727, 0.02203968048095703, 0.022038400650024412, 0.022311935424804686, 0.022007808685302735, 0.02215465545654297, 0.02214067268371582, 0.02204863929748535, 0.022037248611450195, 0.02212681579589844, 0.022142847061157225, 0.022028608322143553, 0.022131872177124024, 0.022088159561157228, 0.022101247787475586, 0.021985439300537108, 0.02207823944091797, 0.02210825538635254, 0.022056224822998047, 0.022053504943847658, 0.02201955223083496, 0.022053375244140624, 0.022106048583984374, 0.022110496520996094, 0.022052671432495115, 0.022124544143676757, 0.02217747116088867, 0.022230688095092772, 0.02224604797363281, 0.022429695129394533, 0.022202144622802733, 0.02209103965759277, 0.02218060874938965, 0.02217184066772461, 0.022060800552368164, 0.022218816757202147, 0.02211568069458008, 0.02214790344238281, 0.023136287689208983, 0.02239388847351074, 0.022295520782470702, 0.02202560043334961, 0.02211395263671875, 0.022066015243530274, 0.02216908836364746, 0.022090112686157227, 0.022138208389282227, 0.022170528411865235, 0.02213190460205078, 0.022198463439941408, 0.022134719848632814, 0.022583135604858397, 0.02217046356201172, 0.022065216064453125, 0.022165088653564452, 0.022172000885009764, 0.02222483253479004, 0.022140064239501954, 0.022135040283203126, 0.02215519905090332, 0.02242633628845215, 0.022237184524536133, 0.022161472320556642, 0.022140031814575196, 0.022121280670166017, 0.022163007736206056, 0.022028736114501953, 0.02214908790588379, 0.022164543151855468, 0.022109216690063476, 0.022110143661499024, 0.022323200225830078, 0.02217087936401367, 0.022391551971435546, 0.022016000747680665, 0.02187398338317871, 0.022053247451782228, 0.02187295913696289, 0.021974720001220704, 0.02186636734008789, 0.021909568786621095, 0.021821151733398436, 0.021824159622192384, 0.021897216796875, 0.021989376068115234, 0.02188688087463379, 0.021876096725463867, 0.022061792373657227, 0.021818527221679686, 0.021879648208618162, 0.02173516845703125, 0.0217989444732666, 0.02196656036376953, 0.02185219192504883, 0.021989856719970703, 0.0220446720123291, 0.021910688400268555, 0.02183440017700195, 0.021899200439453124, 0.02176748847961426, 0.021943231582641602, 0.021917280197143556, 0.021851936340332032, 0.021856767654418945, 0.021919872283935545, 0.02188287925720215, 0.022169599533081053, 0.021897216796875, 0.022157184600830077, 0.021891199111938476, 0.021977088928222657, 0.02189468765258789, 0.022029024124145508, 0.02204876708984375, 0.02201513671875, 0.02198204803466797, 0.021921760559082033, 0.021962783813476563, 0.022127904891967774, 0.021807552337646485, 0.021843904495239257, 0.021924192428588868, 0.021882335662841798, 0.02195510482788086, 0.02191564750671387, 0.02204252815246582, 0.021878559112548827, 0.021885248184204103, 0.021823488235473632, 0.021769792556762695, 0.02180944061279297, 0.022296735763549805, 0.02183737564086914, 0.021764543533325194, 0.021880191802978517, 0.021936767578125, 0.022823104858398436, 0.022031967163085937, 0.021975263595581055, 0.021980960845947264, 0.02223651123046875, 0.021992319107055663, 0.02189254379272461, 0.021898048400878906, 0.021772031784057618, 0.02199920082092285, 0.021856672286987306, 0.021964799880981444, 0.021937984466552735, 0.022223007202148436, 0.022310047149658202, 0.021954751968383788, 0.02187948799133301, 0.021938175201416017, 0.021769376754760743, 0.021951200485229493, 0.02188073539733887, 0.021788896560668944, 0.021861440658569337, 0.02189923286437988, 0.021813695907592773, 0.02188313674926758, 0.021767776489257814, 0.022011775970458985, 0.021969600677490233, 0.02205708885192871, 0.022449472427368163, 0.022270656585693358, 0.02209587287902832, 0.0221712646484375, 0.022145408630371094, 0.022196224212646484, 0.021975040435791016, 0.022094079971313477, 0.022008064270019532, 0.022180063247680664, 0.0221048641204834, 0.02206822395324707, 0.022062623977661133, 0.022426080703735352, 0.022826976776123047, 0.02237648010253906, 0.022388959884643556, 0.022134559631347656, 0.022179391860961913, 0.02207379150390625, 0.022189760208129884, 0.021987648010253907, 0.0222410888671875, 0.022111583709716796, 0.022155935287475587, 0.021976863861083985, 0.022655391693115236, 0.022861440658569335, 0.02299532890319824, 0.02234163284301758, 0.0220897274017334, 0.021992799758911132, 0.021948223114013673, 0.022063264846801756, 0.022184511184692383, 0.021900575637817384, 0.021960575103759764, 0.02195964813232422, 0.022175743103027345, 0.02458243179321289, 0.023178367614746093, 0.022104480743408202, 0.021980928421020507, 0.022014400482177735, 0.022091775894165038, 0.022025983810424806, 0.021876031875610352, 0.02202310371398926, 0.022007808685302735, 0.02191974449157715, 0.021954463958740233, 0.021952608108520507, 0.02201193618774414, 0.02212246322631836, 0.02205900764465332, 0.02185420799255371, 0.022027999877929687, 0.022019584655761718, 0.023918975830078126, 0.022709728240966797, 0.022936031341552733, 0.022135263442993165, 0.022153215408325197, 0.022598976135253905, 0.02203308868408203, 0.021968896865844727, 0.0220446720123291, 0.022016000747680665, 0.022078847885131835, 0.02193471908569336, 0.02188287925720215, 0.02243328094482422, 0.022202688217163084, 0.02208992004394531, 0.022024192810058595, 0.02202009582519531, 0.021972991943359374, 0.022004735946655272, 0.021998559951782227, 0.02208140754699707, 0.02193014335632324, 0.022083295822143554, 0.022026527404785157, 0.022023551940917967, 0.022042623519897463, 0.02208438491821289, 0.021956064224243163, 0.022167936325073242, 0.021921344757080078, 0.022200191497802734, 0.021969472885131836, 0.022140640258789063, 0.02224342346191406, 0.022257856369018555, 0.02204876708984375, 0.02210406494140625, 0.02205081558227539, 0.022003231048583986, 0.02206358337402344, 0.021944320678710938, 0.022117984771728515, 0.022042848587036132, 0.0220830078125, 0.022161216735839845, 0.02199852752685547, 0.022001407623291017, 0.022072832107543947, 0.022139488220214845, 0.022138496398925782, 0.0219583683013916, 0.02199776077270508, 0.022081279754638673, 0.022085952758789062, 0.022192703247070313, 0.02196406364440918, 0.022340320587158204, 0.022511615753173828, 0.022191423416137696, 0.022170303344726562, 0.022568960189819336, 0.022054752349853515, 0.022071487426757814, 0.022026432037353515, 0.02203830337524414, 0.022076799392700196, 0.02210633659362793, 0.022188255310058594, 0.02209542465209961, 0.02202400016784668, 0.022107967376708983, 0.022061920166015624, 0.022001823425292968, 0.02208793640136719, 0.02205695915222168, 0.02818409538269043, 0.022362592697143555, 0.022261632919311523, 0.02205094337463379, 0.021952224731445313, 0.022015424728393556, 0.02196976089477539, 0.02216671943664551, 0.02206710433959961, 0.021996448516845703, 0.022126304626464845, 0.022038816452026367, 0.022202112197875976, 0.022024736404418946, 0.022142688751220704, 0.022110176086425782, 0.022636575698852537, 0.022286048889160158, 0.022239519119262696, 0.022062911987304687, 0.022095136642456055, 0.02217568016052246, 0.022095008850097655, 0.022409023284912108, 0.022218591690063478, 0.022632608413696288, 0.02290614318847656, 0.02224611282348633, 0.02226585578918457, 0.02215056037902832, 0.022024351119995116, 0.022142751693725586, 0.022096000671386718, 0.02213875198364258, 0.022031007766723634, 0.022106111526489256, 0.022146335601806642, 0.02215910339355469, 0.022059999465942382, 0.02209328079223633, 0.022126272201538087, 0.02213971138000488, 0.022160703659057618, 0.022287071228027342, 0.022022144317626953, 0.02234102439880371, 0.022594144821166992, 0.025604095458984375, 0.022253568649291993, 0.02209382438659668, 0.02224332809448242, 0.02211840057373047, 0.02225971221923828, 0.02208896064758301, 0.02210291290283203, 0.022203807830810548, 0.022094303131103516, 0.02244371223449707, 0.022038848876953124, 0.0221265926361084, 0.02201919937133789, 0.022170495986938477, 0.022048191070556642, 0.022061727523803712, 0.02214393615722656, 0.0220948486328125, 0.0219169921875, 0.02198963165283203, 0.021974496841430664, 0.022217695236206054, 0.02214851188659668, 0.022063167572021484, 0.022023712158203125, 0.02202851104736328, 0.02195088005065918, 0.02204047966003418, 0.021966367721557616, 0.021947328567504882, 0.02206096076965332, 0.022054431915283203, 0.022079999923706056, 0.021940288543701173, 0.022024192810058595, 0.02192793655395508, 0.022076608657836914, 0.022323392868041993, 0.021975200653076173, 0.022161888122558593, 0.021977024078369142, 0.022019584655761718, 0.021996095657348634, 0.02242361640930176, 0.02541267204284668, 0.021982240676879882, 0.02187558364868164, 0.021867488861083983, 0.02198940849304199, 0.02186195182800293, 0.021850528717041014, 0.02199728012084961, 0.02194047927856445, 0.022128671646118165, 0.021981088638305665, 0.021975296020507813, 0.022024032592773437, 0.022322431564331054, 0.021965568542480468, 0.021936128616333008, 0.02188287925720215, 0.02206515121459961, 0.02196236801147461, 0.02204681587219238, 0.021923263549804686, 0.021854047775268556, 0.022289407730102538, 0.02250752067565918, 0.022012992858886717, 0.02203651237487793, 0.021906240463256836, 0.02188047981262207, 0.02193657684326172, 0.021901311874389647, 0.021842079162597658, 0.021897056579589843, 0.021878528594970702, 0.021901567459106444]",tokens/s,45.20852925250197,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 316.12 MiB is free. Process 240132 has 14.43 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 13.04 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,890.159104,6230.50752,0.0,5827.985408,5712.718848,s,1,7.4758134765625,7.4758134765625,0.0,7.4758134765625,7.4758134765625,7.4758134765625,7.4758134765625,[7.4758134765625],,kWh,6.451343412481946e-06,7.044562819980021e-07,3.865558648008527e-06,1.1021358342488476e-05,,MB,1336.68864,6463.291392,0.0,6046.089216,5989.425664,s,10,0.7722324447631836,0.07722324447631834,0.0015798091876918802,0.07718647766113282,0.07885745697021483,0.0788709358215332,0.0788817189025879,"[0.07340624237060547, 0.07647743988037109, 0.07821929931640625, 0.07694863891601562, 0.07628163146972657, 0.07725186920166016, 0.07878736114501952, 0.07888441467285157, 0.07885446166992187, 0.07712108612060548]",tokens/s,3315.0640294387813,kWh,2.3561742213706607e-06,2.5984331893430883e-07,1.5665088696935828e-06,4.1825264099985525e-06,tokens/kWh,61207025.349085264,MB,1374.86336,6526.205952,0.0,6109.003776,6090.851328,s,10,21.036782714843753,2.1036782714843754,0.007672768184465667,2.1016650390625,2.116079443359375,2.1176091796875,2.11883296875,"[2.09547021484375, 2.100125732421875, 2.102502197265625, 2.100827880859375, 2.09576171875, 2.103072998046875, 2.09724951171875, 2.115739501953125, 2.10689404296875, 2.119138916015625]",tokens/s,29.94754514222681,kWh,6.151625244737964e-05,6.7850435215998984e-06,4.06189971009067e-05,0.00010892029306988624,tokens/kWh,578404.6133586648,,s,630,21.033884994506845,0.03338711903889974,0.0005022680373334965,0.03332195281982422,0.03374483375549316,0.033963593864440915,0.03617086681365967,"[0.03484393692016602, 0.03379487991333008, 0.03323494338989258, 0.03309539031982422, 0.03282767868041992, 0.032855457305908206, 0.03291196823120117, 0.03302195358276367, 0.03374899291992187, 0.032901119232177735, 0.032898529052734375, 0.03289059066772461, 0.033058944702148436, 0.03292393493652344, 0.033003936767578124, 0.032970207214355465, 0.03294998550415039, 0.0328548469543457, 0.033099777221679685, 0.03303628921508789, 0.03292160034179688, 0.03296255874633789, 0.03301481628417969, 0.033186782836914064, 0.033051647186279294, 0.033345664978027344, 0.03298720169067383, 0.03304531097412109, 0.03313423919677734, 0.03319228744506836, 0.03353395080566406, 0.033331199645996096, 0.03327385711669922, 0.03325443267822266, 0.03324361419677734, 0.03313919830322266, 0.0331038703918457, 0.03318374252319336, 0.033323009490966796, 0.03337420654296875, 0.03325107192993164, 0.033255680084228516, 0.03336304092407227, 0.03331564712524414, 0.03338380813598633, 0.03330902481079102, 0.03328243255615235, 0.03341299057006836, 0.03347382354736328, 0.03341398239135742, 0.033467838287353516, 0.033530433654785155, 0.033617919921875, 0.033529857635498046, 0.03351068878173828, 0.03341795349121094, 0.03342745590209961, 0.033314815521240236, 0.033519519805908206, 0.03339654541015625, 0.033600833892822264, 0.03338339233398437, 0.03350527954101563, 0.035987903594970706, 0.034132190704345707, 0.03338854217529297, 0.03341856002807617, 0.03307097625732422, 0.03304531097412109, 0.032950271606445314, 0.03301763153076172, 0.03351574325561523, 0.032911361694335936, 0.03295641708374023, 0.03289632034301758, 0.03299996948242188, 0.033151134490966794, 0.033279998779296875, 0.032982078552246094, 0.033056766510009765, 0.03308025741577148, 0.032968704223632815, 0.03367238235473633, 0.033048736572265626, 0.03299190521240234, 0.033000511169433595, 0.03305673599243164, 0.03305683135986328, 0.03305744171142578, 0.03308569717407227, 0.033087200164794925, 0.03310195159912109, 0.033302078247070314, 0.03327593612670898, 0.0333458251953125, 0.03344003295898437, 0.03349264144897461, 0.03335203170776367, 0.03341926574707031, 0.0332798080444336, 0.03349113464355469, 0.03316444778442383, 0.033215038299560545, 0.0333243522644043, 0.03328246307373047, 0.03321491241455078, 0.03325759887695313, 0.03333670425415039, 0.03330025482177734, 0.03328803253173828, 0.033547264099121094, 0.033544193267822264, 0.033611263275146484, 0.03342697525024414, 0.03362300872802734, 0.03338444900512695, 0.03343084716796875, 0.03339904022216797, 0.03358560180664062, 0.03340697479248047, 0.033525150299072264, 0.03345673751831055, 0.033504673004150394, 0.033536609649658204, 0.03362774276733398, 0.0334944953918457, 0.03636483383178711, 0.034473983764648435, 0.03344134521484375, 0.033051071166992185, 0.03292480087280274, 0.03293478393554688, 0.032914432525634765, 0.03297894287109375, 0.03301683044433594, 0.03292700958251953, 0.0330022087097168, 0.03298713684082031, 0.03307724761962891, 0.03306496047973633, 0.03299868774414062, 0.03297078323364258, 0.033044319152832034, 0.032967521667480466, 0.03298451232910156, 0.033046497344970706, 0.033001953125, 0.0329381103515625, 0.033007614135742186, 0.033060703277587894, 0.03295609664916992, 0.033073440551757816, 0.03308303833007813, 0.033033760070800784, 0.03319705581665039, 0.033437473297119144, 0.033573089599609376, 0.03360153579711914, 0.033309696197509765, 0.033344512939453126, 0.03373567962646484, 0.034592769622802735, 0.033210750579833984, 0.03324537658691406, 0.03344985580444336, 0.03323567962646484, 0.03327164840698242, 0.033431041717529295, 0.03329795074462891, 0.033307327270507815, 0.03610780715942383, 0.033409793853759764, 0.03319756698608398, 0.03350969696044922, 0.033433792114257815, 0.03337625503540039, 0.033538047790527346, 0.03328204727172852, 0.033330814361572265, 0.033427841186523435, 0.03330252838134766, 0.03349264144897461, 0.033357662200927736, 0.03341363143920899, 0.033503231048583985, 0.03346982574462891, 0.03349158477783203, 0.03344307327270508, 0.03357734298706055, 0.03652166366577148, 0.034466751098632814, 0.03368492889404297, 0.033069633483886716, 0.03298303985595703, 0.032925697326660154, 0.03317964935302734, 0.0329747200012207, 0.03300902557373047, 0.032971519470214844, 0.033007614135742186, 0.033017856597900394, 0.03296255874633789, 0.03299737548828125, 0.03296051025390625, 0.03304447937011719, 0.03303583908081055, 0.03297324752807617, 0.03297510528564453, 0.03309952163696289, 0.03306291198730469, 0.033107967376708985, 0.03308131027221679, 0.03306665420532227, 0.03315955352783203, 0.03315708923339844, 0.03314243316650391, 0.03315267181396484, 0.03326844787597656, 0.03342089462280273, 0.033343647003173826, 0.03335808181762695, 0.03344384002685547, 0.03328566360473633, 0.03329481506347656, 0.03337340927124023, 0.033293087005615236, 0.03359900665283203, 0.03331119918823242, 0.03321145629882812, 0.033370975494384766, 0.033349727630615236, 0.03326976013183594, 0.03350425720214844, 0.03332403182983398, 0.03349094390869141, 0.03346022415161133, 0.033337345123291014, 0.033501182556152344, 0.033261566162109374, 0.03342335891723633, 0.03340902328491211, 0.03324470520019531, 0.03351599884033203, 0.03341926574707031, 0.033599296569824216, 0.033486846923828126, 0.033546432495117184, 0.033492767333984375, 0.03365091323852539, 0.0334986572265625, 0.03364707183837891, 0.03375475311279297, 0.03616355133056641, 0.03453247833251953, 0.0335748176574707, 0.03326665496826172, 0.03293756866455078, 0.032925342559814455, 0.03294598388671875, 0.03296147155761719, 0.03302604675292969, 0.03294950485229492, 0.03302067184448242, 0.03297635269165039, 0.032997920989990236, 0.03307110214233398, 0.03304179382324219, 0.032952510833740234, 0.03303436660766602, 0.0329813117980957, 0.03304854583740234, 0.03299945449829102, 0.03306252670288086, 0.03309539031982422, 0.03305104064941406, 0.0329955825805664, 0.033097728729248044, 0.03309116744995117, 0.03311001586914063, 0.03324278259277344, 0.033197856903076174, 0.033200447082519534, 0.03317974472045898, 0.03334748840332031, 0.03340675354003906, 0.03326793670654297, 0.03323267364501953, 0.033325950622558595, 0.03318783950805664, 0.03314688110351562, 0.03319193649291992, 0.033290241241455076, 0.033238208770751954, 0.03317136001586914, 0.03319615936279297, 0.03323779296875, 0.03333840179443359, 0.0332413444519043, 0.033215198516845706, 0.033226688385009764, 0.03332041549682617, 0.033251007080078124, 0.033161376953125, 0.03333708953857422, 0.033334270477294925, 0.033226463317871095, 0.0332474250793457, 0.03342281723022461, 0.033386177062988284, 0.03348767852783203, 0.033376064300537106, 0.0333007698059082, 0.03356060791015625, 0.03344179153442383, 0.03363011169433594, 0.03617385482788086, 0.03452431869506836, 0.03363107299804687, 0.033172576904296876, 0.03301468658447266, 0.032901119232177735, 0.03296051025390625, 0.032901119232177735, 0.03298054504394531, 0.0335610237121582, 0.03343564987182617, 0.03305472183227539, 0.033218753814697265, 0.033013568878173825, 0.03299903869628906, 0.032992671966552735, 0.032952896118164064, 0.03293225479125977, 0.03296201705932617, 0.0330588493347168, 0.0330511360168457, 0.03323030471801758, 0.033085983276367185, 0.03306905746459961, 0.033065120697021486, 0.033005409240722657, 0.033135616302490234, 0.033113086700439456, 0.03313808059692383, 0.03320073699951172, 0.03315865707397461, 0.03327747344970703, 0.033420257568359375, 0.033261566162109374, 0.033343425750732424, 0.033335361480712894, 0.033380352020263675, 0.03351289749145508, 0.03456041717529297, 0.03380617523193359, 0.03340889739990234, 0.03341308975219726, 0.03334313583374023, 0.03342419052124023, 0.033413120269775394, 0.03340652847290039, 0.03351801681518555, 0.033976318359375, 0.03334348678588867, 0.033562625885009766, 0.033339393615722655, 0.03352073669433594, 0.03350006484985352, 0.033576961517333984, 0.033462272644042966, 0.033656513214111325, 0.033433921813964845, 0.0335052490234375, 0.033493022918701175, 0.033512958526611326, 0.0334730224609375, 0.03350105667114258, 0.03341257476806641, 0.03674016189575195, 0.03436412811279297, 0.03348704147338867, 0.03307724761962891, 0.032950271606445314, 0.03294822311401367, 0.03297075271606445, 0.03289654541015625, 0.03285200119018555, 0.03290156936645508, 0.033056766510009765, 0.033061920166015626, 0.03301884841918945, 0.03303587341308594, 0.033046592712402345, 0.03307350540161133, 0.033027198791503905, 0.03296527862548828, 0.03309545516967773, 0.03302444839477539, 0.03303219223022461, 0.03312025451660156, 0.03296460723876953, 0.033116161346435545, 0.03309296035766601, 0.03302163314819336, 0.03309791946411133, 0.033074241638183594, 0.033084705352783204, 0.03319340896606445, 0.0332564468383789, 0.03321241760253906, 0.03332089614868164, 0.03333126449584961, 0.03316320037841797, 0.033140480041503904, 0.03324713516235352, 0.03323519897460937, 0.03318508911132813, 0.03311881637573242, 0.03347481536865234, 0.033842559814453124, 0.03343014526367188, 0.03328160095214844, 0.033186241149902346, 0.03324067306518555, 0.0333724479675293, 0.033253440856933596, 0.03325276947021484, 0.033399105072021484, 0.03329878234863281, 0.03323235321044922, 0.03351145553588867, 0.03353241729736328, 0.03349708938598633, 0.03328156661987305, 0.03334396743774414, 0.033452030181884765, 0.03335295867919922, 0.03357977676391601, 0.03336806488037109, 0.03361996841430664, 0.033552383422851564, 0.03653231811523437, 0.034369503021240234, 0.03347251129150391, 0.033097728729248044, 0.0328908805847168, 0.03295641708374023, 0.0328908805847168, 0.03340902328491211, 0.03394704055786133, 0.03317206573486328, 0.033081344604492184, 0.03296051025390625, 0.032933185577392575, 0.0331288948059082, 0.03333350372314453, 0.03334713745117188, 0.03357740783691406, 0.03357900619506836, 0.033756385803222655, 0.03354079818725586, 0.033387935638427735, 0.033333953857421876, 0.0333699836730957, 0.03336528015136719, 0.033567008972167967, 0.03336249542236328, 0.03324671936035156, 0.033497535705566406, 0.03338246536254883, 0.03329433441162109, 0.033551422119140625, 0.033592254638671874, 0.03341721725463867, 0.033311840057373046, 0.03322691345214844, 0.0333704948425293, 0.03325785446166992, 0.03321855926513672, 0.03380633544921875, 0.035781631469726564, 0.03351820755004883, 0.03369327926635742, 0.03386758422851562, 0.03358950424194336, 0.03360620880126953, 0.033820831298828125, 0.03374684906005859, 0.03362345504760742, 0.03373519897460937, 0.03356278228759765, 0.0335175666809082, 0.0336814079284668, 0.033501182556152344, 0.0335994873046875, 0.03350089645385742, 0.033672767639160155, 0.03376793670654297, 0.03352953720092773, 0.03380428695678711, 0.03399734497070313, 0.03396137619018555, 0.03389295959472656, 0.03386982345581055, 0.036939777374267575, 0.034799583435058595, 0.03395491027832031, 0.03338265609741211, 0.03316598510742187, 0.0332014389038086, 0.03316400146484375, 0.03304188919067383, 0.03305292892456055, 0.032981281280517576, 0.033006816864013674, 0.03302028656005859, 0.033075775146484375, 0.033054561614990235, 0.03307724761962891, 0.03302195358276367, 0.03299737548828125, 0.03307110214233398, 0.03329980850219726, 0.03334112167358398, 0.03320051193237305, 0.033329982757568356, 0.033430686950683595, 0.03335641479492187, 0.03341107177734375, 0.033342689514160154, 0.03306371307373047, 0.03315020751953125, 0.03324115371704101, 0.03331347274780273, 0.03341926574707031, 0.033212352752685546, 0.033474624633789064, 0.03331071853637695, 0.03323699188232422, 0.03365273666381836, 0.033445888519287106, 0.03349708938598633, 0.033323009490966796, 0.03359721755981445, 0.03357513427734375, 0.03371321487426758, 0.033861568450927734, 0.033756160736083986, 0.033562625885009766, 0.033726463317871096, 0.03377897644042969, 0.0335074577331543, 0.0336533432006836, 0.03334348678588867, 0.033501182556152344, 0.033454078674316406, 0.033445888519287106, 0.03327590560913086, 0.0333392333984375, 0.03346396636962891, 0.03331737518310547, 0.03339468765258789, 0.03342745590209961, 0.033347583770751955, 0.0335994873046875, 0.03337420654296875, 0.033552383422851564, 0.03638060760498047, 0.034495361328125, 0.03355033493041992, 0.033124351501464845, 0.033164478302001955, 0.03294857788085938, 0.032934368133544924, 0.03310345458984375, 0.03318790435791016, 0.03315465545654297, 0.03311215972900391, 0.03343222427368164, 0.03312550354003906, 0.033094528198242185, 0.033017856597900394, 0.03316249465942383, 0.03331967926025391, 0.03327155303955078, 0.03348624038696289, 0.03380310440063477, 0.03343155288696289, 0.03343683242797851, 0.03336832046508789, 0.03347312164306641, 0.03332057571411133, 0.033401214599609375, 0.03335782241821289, 0.033331199645996096, 0.033545982360839846, 0.03350348663330078, 0.03385139083862305, 0.03375904083251953, 0.0335994873046875, 0.033704128265380856, 0.033873920440673826, 0.033974273681640625, 0.034344959259033206, 0.03374460983276367, 0.03372876739501953, 0.03366486358642578, 0.0335994873046875, 0.033670913696289065, 0.03355897521972656, 0.033701889038085936, 0.03362406539916992, 0.03361702346801758, 0.03369388961791992, 0.03404841613769531, 0.03381875228881836, 0.033527969360351566, 0.033742847442626955, 0.03382067108154297, 0.033957889556884766, 0.03356671905517578, 0.03381974411010742, 0.034089088439941406, 0.033742847442626955, 0.03384195327758789, 0.03397836685180664, 0.03378995132446289, 0.033753089904785157, 0.03457603073120117, 0.03396540832519531]",tokens/s,29.951670847517207,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.700352,6182.273024,0.0,5779.750912,5773.960192,s,1,7.25391259765625,7.25391259765625,0.0,7.25391259765625,7.25391259765625,7.25391259765625,7.25391259765625,[7.25391259765625],,kWh,5.9966448208191044e-06,6.54319508839171e-07,1.92389042801e-06,8.574854757668275e-06,,MB,1233.788928,6498.942976,0.0,6085.935104,6038.345728,s,10,1.976500747680664,0.1976500747680664,0.006187100916178227,0.19934066772460937,0.20078077697753904,0.20194725036621095,0.20288042907714846,"[0.17957827758789063, 0.20022499084472656, 0.19916741943359376, 0.1979325408935547, 0.1979754180908203, 0.1997315216064453, 0.20311372375488282, 0.199513916015625, 0.1987413787841797, 0.2005215606689453]",tokens/s,1295.2183311865915,kWh,5.625732945112124e-06,6.204132493022244e-07,3.731557472422798e-06,9.977703666837146e-06,tokens/kWh,25657206.161660843,MB,1239.199744,6519.914496,0.0,6106.906624,6086.544896,s,10,16.02639599609375,1.602639599609375,0.004871174393990476,1.6023424072265624,1.6096802001953125,1.609740185546875,1.609788173828125,"[1.602439208984375, 1.5988126220703125, 1.605950927734375, 1.6063497314453126, 1.6098001708984375, 1.6096668701171875, 1.60224560546875, 1.5962471923828125, 1.5964027099609375, 1.59848095703125]",tokens/s,39.31014809278114,kWh,4.675744683447044e-05,5.157069325050647e-06,3.0987914747577524e-05,8.29024309070986e-05,tokens/kWh,759929.465404923,,s,630,16.023336830139144,0.02543386798434787,0.00033816012899622733,0.025386832237243653,0.025616895866394044,0.025729276943206786,0.027414859294891358,"[0.02723263931274414, 0.02614918327331543, 0.025610240936279297, 0.02531491279602051, 0.025242015838623046, 0.02529280090332031, 0.025157632827758788, 0.02518796730041504, 0.025212575912475586, 0.025135904312133788, 0.025212703704833986, 0.02522742462158203, 0.025233407974243165, 0.02522224044799805, 0.025215360641479494, 0.025305408477783203, 0.025250015258789064, 0.02531532859802246, 0.025271360397338866, 0.02527894401550293, 0.02527427291870117, 0.025319456100463867, 0.025244192123413087, 0.025304864883422852, 0.0253885440826416, 0.02532406425476074, 0.025290943145751952, 0.025329280853271484, 0.025294591903686523, 0.02532147216796875, 0.025348543167114258, 0.025317567825317383, 0.025337440490722656, 0.02542633628845215, 0.0253703670501709, 0.02530124855041504, 0.02551807975769043, 0.025658815383911134, 0.025594432830810546, 0.025571327209472656, 0.02559564781188965, 0.025596256256103515, 0.025632671356201172, 0.025456256866455078, 0.02543382453918457, 0.02543017578125, 0.025467391967773437, 0.025439327239990234, 0.02537939262390137, 0.025485504150390626, 0.025464576721191408, 0.0254399356842041, 0.025481855392456055, 0.025460832595825194, 0.025457727432250978, 0.025486047744750977, 0.025448671340942385, 0.02548121643066406, 0.025408544540405274, 0.025610687255859375, 0.02591004753112793, 0.025468671798706054, 0.02545039939880371, 0.0276889591217041, 0.026318527221679686, 0.02570681571960449, 0.025407487869262696, 0.025245824813842774, 0.025206655502319337, 0.02512895965576172, 0.025116672515869142, 0.025143072128295897, 0.02515315246582031, 0.025129344940185545, 0.025075935363769532, 0.025151487350463866, 0.02513523292541504, 0.025131999969482424, 0.025148319244384765, 0.025214975357055663, 0.025159807205200196, 0.025175840377807616, 0.02516592025756836, 0.02516092872619629, 0.025221248626708985, 0.02517673683166504, 0.025208992004394533, 0.025272159576416015, 0.025288703918457032, 0.02524675178527832, 0.025215967178344727, 0.025214975357055663, 0.025272319793701172, 0.025210880279541017, 0.025263391494750976, 0.025289535522460938, 0.02524969673156738, 0.025243167877197267, 0.02523391914367676, 0.025491487503051757, 0.02549679946899414, 0.02561507225036621, 0.025546335220336915, 0.025561376571655272, 0.02556844711303711, 0.02551094436645508, 0.02547420883178711, 0.02547283172607422, 0.02543881607055664, 0.02548150444030762, 0.025356351852416994, 0.025403648376464843, 0.025421087265014648, 0.0254136962890625, 0.02535161590576172, 0.02540028762817383, 0.02538310432434082, 0.025395103454589844, 0.025421728134155275, 0.025395200729370116, 0.02540345573425293, 0.025386175155639647, 0.025405664443969727, 0.025420320510864257, 0.025430015563964844, 0.025482976913452148, 0.027557119369506836, 0.026409568786621093, 0.025759904861450196, 0.025481151580810546, 0.02539116859436035, 0.025316864013671874, 0.025185823440551758, 0.02530988883972168, 0.025292352676391603, 0.02526870346069336, 0.025321727752685548, 0.025298944473266603, 0.02533580780029297, 0.025395200729370116, 0.02535737609863281, 0.025330623626708983, 0.025290752410888673, 0.02528374481201172, 0.025387872695922853, 0.025334815979003906, 0.025331968307495116, 0.025324256896972656, 0.025423871994018556, 0.02533919906616211, 0.025399999618530275, 0.025324832916259764, 0.02527903938293457, 0.025333759307861328, 0.025313440322875976, 0.025395008087158204, 0.025473215103149413, 0.025429439544677735, 0.025458303451538086, 0.02541254425048828, 0.025400768280029296, 0.025337600708007814, 0.025475807189941406, 0.02550793647766113, 0.02557481575012207, 0.02565305519104004, 0.025588512420654297, 0.025640960693359374, 0.025520128250122072, 0.02551807975769043, 0.02555084800720215, 0.025440288543701173, 0.025483232498168945, 0.02543996810913086, 0.025514272689819335, 0.025632768630981444, 0.02549350357055664, 0.02545212745666504, 0.025534624099731444, 0.0255631046295166, 0.025532703399658203, 0.025503904342651366, 0.025528160095214844, 0.02562607955932617, 0.02550966453552246, 0.02557619285583496, 0.025487360000610353, 0.025468639373779297, 0.025522464752197264, 0.027432352066040038, 0.026382944107055665, 0.025683967590332032, 0.025415679931640626, 0.025367839813232422, 0.025316287994384765, 0.02523664093017578, 0.025263872146606445, 0.025226112365722655, 0.025225215911865235, 0.025282560348510744, 0.025198816299438476, 0.02527769660949707, 0.025283103942871095, 0.02526780891418457, 0.025250207901000975, 0.02526348876953125, 0.025256576538085936, 0.02526361656188965, 0.025252351760864256, 0.025286624908447266, 0.025241632461547852, 0.025235456466674806, 0.025312416076660155, 0.025287519454956053, 0.025416736602783204, 0.025417856216430664, 0.025406303405761718, 0.02533785629272461, 0.025444351196289062, 0.02544041633605957, 0.025357791900634766, 0.025389440536499025, 0.025386560440063478, 0.02539967918395996, 0.025518144607543945, 0.025636192321777343, 0.025657375335693358, 0.025696895599365235, 0.025721920013427733, 0.02573529624938965, 0.025709503173828124, 0.025648416519165038, 0.02559427261352539, 0.025664800643920897, 0.025568159103393554, 0.0255098876953125, 0.025591039657592775, 0.025627264022827147, 0.025569408416748048, 0.02550783920288086, 0.02551759910583496, 0.025497312545776366, 0.02556390380859375, 0.025663488388061522, 0.025669631958007814, 0.02555904006958008, 0.02558361625671387, 0.025487360000610353, 0.025497055053710936, 0.025512416839599608, 0.02543417549133301, 0.025572511672973634, 0.02757164764404297, 0.026409120559692384, 0.025804895401000977, 0.02549177551269531, 0.025405023574829103, 0.025350751876831053, 0.02529177665710449, 0.025393983840942384, 0.025279872894287108, 0.02542854309082031, 0.025271360397338866, 0.025312255859375, 0.025358335494995117, 0.025221120834350585, 0.02533785629272461, 0.025579519271850586, 0.025325567245483398, 0.025357568740844726, 0.025396127700805664, 0.02537766456604004, 0.02530019187927246, 0.025370624542236327, 0.02539481544494629, 0.025310367584228517, 0.025390047073364258, 0.02533171272277832, 0.02549763107299805, 0.025452512741088867, 0.025462047576904297, 0.025405344009399415, 0.02543903923034668, 0.025468671798706054, 0.025462848663330078, 0.02545289611816406, 0.025438047409057616, 0.025511936187744142, 0.025585727691650392, 0.025652191162109376, 0.02578326416015625, 0.025763071060180665, 0.025670303344726562, 0.02564656066894531, 0.025858688354492187, 0.025592960357666016, 0.025582464218139648, 0.025628671646118165, 0.025568960189819336, 0.025560768127441406, 0.025557472229003907, 0.025612447738647463, 0.02553856086730957, 0.02550495910644531, 0.02569094467163086, 0.02556723213195801, 0.025527807235717775, 0.025792320251464843, 0.025517824172973633, 0.02570307159423828, 0.02559414482116699, 0.025543840408325195, 0.025592672348022462, 0.025587039947509764, 0.025574047088623045, 0.027613183975219727, 0.026406911849975585, 0.025882240295410155, 0.0254652156829834, 0.025341951370239257, 0.025358272552490235, 0.02525913619995117, 0.025223712921142578, 0.02537939262390137, 0.02526595115661621, 0.025286272048950197, 0.025452991485595704, 0.02562063980102539, 0.02532745552062988, 0.025466976165771486, 0.0252458553314209, 0.02536832046508789, 0.025382911682128906, 0.025474559783935546, 0.025426431655883788, 0.025366527557373047, 0.02532147216796875, 0.025312320709228515, 0.025529407501220704, 0.02544156837463379, 0.02547158432006836, 0.02540652847290039, 0.025391807556152345, 0.025495328903198243, 0.02540105628967285, 0.025504512786865233, 0.025509088516235352, 0.02553433609008789, 0.02543008041381836, 0.025397279739379882, 0.02544076728820801, 0.025571647644042968, 0.025616479873657227, 0.025710079193115236, 0.02575811195373535, 0.025660608291625978, 0.025705184936523438, 0.025736799240112306, 0.025651647567749025, 0.025572608947753907, 0.02561311912536621, 0.02566147232055664, 0.025532384872436524, 0.025546335220336915, 0.025549215316772463, 0.02552422332763672, 0.02548940849304199, 0.02557708740234375, 0.02556867218017578, 0.02559689521789551, 0.025612287521362305, 0.025592832565307616, 0.025477216720581054, 0.02564803123474121, 0.025513439178466796, 0.02544278335571289, 0.02557548713684082, 0.025589759826660157, 0.02799635124206543, 0.02732044792175293, 0.02597875213623047, 0.025593311309814452, 0.025430463790893556, 0.025278560638427733, 0.025306751251220703, 0.025188735961914063, 0.025194719314575197, 0.025214752197265624, 0.02525388717651367, 0.025221120834350585, 0.025267936706542968, 0.025274656295776368, 0.025186304092407227, 0.025192447662353516, 0.02529078483581543, 0.025262048721313476, 0.025260032653808592, 0.02526323127746582, 0.02524457550048828, 0.025235424041748045, 0.02530303955078125, 0.02523535919189453, 0.025268320083618165, 0.025329504013061523, 0.02541993522644043, 0.025265247344970702, 0.02531830406188965, 0.02529280090332031, 0.025263296127319337, 0.025299072265625, 0.0254081916809082, 0.025423871994018556, 0.02533785629272461, 0.025315584182739256, 0.025394752502441408, 0.0254006404876709, 0.025576095581054687, 0.025528543472290038, 0.025604032516479493, 0.025534528732299805, 0.02551603126525879, 0.025454336166381836, 0.02542639923095703, 0.025384735107421875, 0.02536150360107422, 0.025336736679077147, 0.025367551803588868, 0.02537980842590332, 0.025350175857543945, 0.025309183120727538, 0.02535545539855957, 0.02542265510559082, 0.025399295806884766, 0.02538710403442383, 0.025403295516967773, 0.025391231536865233, 0.02536147117614746, 0.025381696701049804, 0.025380191802978517, 0.025303455352783204, 0.025356544494628906, 0.027547679901123046, 0.026238943099975588, 0.025597951889038087, 0.02530860710144043, 0.025188928604125978, 0.02512303924560547, 0.025121984481811525, 0.025106496810913086, 0.02510492706298828, 0.025083072662353517, 0.025108480453491212, 0.025086368560791016, 0.02519798469543457, 0.025145599365234375, 0.025170272827148437, 0.02513052749633789, 0.02514019203186035, 0.02512067222595215, 0.02518016052246094, 0.02511052894592285, 0.02514739227294922, 0.025198400497436522, 0.02515897560119629, 0.02516214370727539, 0.025177600860595704, 0.02518729591369629, 0.025148767471313477, 0.025203359603881835, 0.025272319793701172, 0.02524777603149414, 0.025284736633300782, 0.025237279891967772, 0.02525103950500488, 0.02520140838623047, 0.025286304473876954, 0.025190719604492186, 0.025458816528320313, 0.02552217674255371, 0.025509952545166015, 0.025513120651245117, 0.02553731155395508, 0.025478559494018553, 0.0255185604095459, 0.025478784561157226, 0.025450847625732423, 0.02538528060913086, 0.025359359741210938, 0.02536739158630371, 0.025333951950073243, 0.025355167388916015, 0.025343008041381836, 0.025322816848754884, 0.025336032867431642, 0.02532588768005371, 0.025346080780029298, 0.025331615447998047, 0.02532566452026367, 0.025341503143310545, 0.025418367385864258, 0.02538857650756836, 0.025409696578979492, 0.02535641670227051, 0.02531670379638672, 0.027372032165527343, 0.026198400497436523, 0.025587711334228515, 0.025290367126464843, 0.025166208267211915, 0.025153696060180666, 0.025100128173828125, 0.025067520141601563, 0.025089216232299805, 0.025076416015625, 0.0250897274017334, 0.025067968368530272, 0.025147071838378908, 0.025131328582763672, 0.025164800643920897, 0.02514588737487793, 0.025168352127075196, 0.025165824890136718, 0.0251507511138916, 0.025149663925170897, 0.025173599243164063, 0.025146272659301756, 0.025151615142822267, 0.025142911911010743, 0.025219327926635744, 0.025266176223754884, 0.02527027130126953, 0.025298944473266603, 0.02524896049499512, 0.025281343460083008, 0.025268224716186522, 0.025192287445068358, 0.02527248001098633, 0.025186304092407227, 0.025235519409179688, 0.025200000762939455, 0.025426591873168945, 0.025475168228149415, 0.025552703857421876, 0.025518304824829103, 0.025534271240234375, 0.025546527862548827, 0.02549331283569336, 0.02546832084655762, 0.025422815322875977, 0.025412607192993163, 0.02541366386413574, 0.025310176849365235, 0.02533990478515625, 0.025362432479858397, 0.025351423263549805, 0.025299423217773436, 0.02537516784667969, 0.025329599380493163, 0.025368480682373046, 0.025392255783081054, 0.025381023406982423, 0.025364416122436523, 0.025416479110717774, 0.025424959182739258, 0.02536953544616699, 0.025401344299316408, 0.025364479064941405, 0.02734492874145508, 0.02621446418762207, 0.025599264144897462, 0.025324159622192383, 0.025219072341918947, 0.02511631965637207, 0.025108352661132812, 0.025151968002319336, 0.0251144962310791, 0.02509427261352539, 0.02516691207885742, 0.025217887878417968, 0.025188447952270508, 0.025148479461669922, 0.025172416687011718, 0.025202623367309572, 0.025176639556884765, 0.02517353630065918, 0.025257440567016603, 0.02524425506591797, 0.025250207901000975, 0.025267263412475587, 0.02518726348876953, 0.025229440689086915, 0.025249568939208985, 0.025225311279296874, 0.025243776321411133, 0.025246912002563477, 0.025264352798461915, 0.02522707176208496, 0.025225759506225586, 0.025273536682128905, 0.02528278350830078, 0.02528518486022949, 0.025335840225219727, 0.025285984039306642, 0.025477920532226563, 0.0254849910736084, 0.025548095703125, 0.0255533447265625, 0.0255118408203125, 0.025472864151000977, 0.025512767791748048, 0.025446495056152343, 0.025411359786987303, 0.025429983139038086, 0.02541584014892578, 0.025380256652832032, 0.025419519424438475, 0.02540015983581543, 0.025368255615234377, 0.02537094306945801, 0.025366527557373047, 0.025450496673583983, 0.025421247482299805, 0.02542255973815918, 0.02557119941711426, 0.025437408447265625, 0.02542198371887207, 0.025418239593505858, 0.02538409614562988, 0.025377759933471678, 0.025401311874389647]",tokens/s,39.31765316291667,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 634, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 306, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 102, in __init__ self.query_key_value = nn.Linear(config.hidden_size, 3 * config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 219533 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,890.392576,14281.474048,0.0,13878.951936,13865.632768,s,1,7.39852880859375,7.39852880859375,0.0,7.39852880859375,7.39852880859375,7.39852880859375,7.39852880859375,[7.39852880859375],,kWh,9.39636865414286e-06,1.0285647333483222e-06,4.00250320199691e-06,1.4427436589488092e-05,,MB,1336.684544,14707.195904,0.0,14289.993728,14241.298944,s,10,1.8285996551513672,0.18285996551513672,0.00679664568458064,0.18536700439453124,0.18816171417236327,0.18827529220581055,0.18836615463256837,"[0.16611967468261718, 0.18321197509765624, 0.18838887023925782, 0.1835581817626953, 0.1860232696533203, 0.1743507537841797, 0.18586172485351563, 0.1848722839355469, 0.18807644653320313, 0.188136474609375]",tokens/s,1399.978389358325,kWh,5.341654935908936e-06,5.890950617273028e-07,3.566613964399824e-06,9.497363962036063e-06,tokens/kWh,26954847.78969324,MB,1361.924096,14874.968064,0.0,14457.765888,14413.156352,s,10,41.01407421875,4.101407421875001,0.007813974178694479,4.101002197265625,4.1099016601562495,4.1118629394531245,4.113431962890624,"[4.09911474609375, 4.090711669921875, 4.093533447265625, 4.09018359375, 4.09937255859375, 4.10689306640625, 4.10834326171875, 4.1026318359375, 4.1094658203125, 4.11382421875]",tokens/s,15.360580776244586,kWh,0.00011977558217200816,1.321150365873686e-05,7.939864685220056e-05,0.00021238573268294556,tokens/kWh,296630.09470625734,,s,630,41.01055606842041,0.06509612074352447,0.0004135725938333099,0.06507632064819335,0.06546919860839844,0.06563858413696289,0.06710072738647462,"[0.06735187530517578, 0.06532396697998047, 0.06516502380371093, 0.06479071807861328, 0.06499951934814453, 0.0645323486328125, 0.06461027526855469, 0.06488079833984375, 0.06477587127685547, 0.06466796875, 0.06449330902099609, 0.06459417724609375, 0.06447309112548828, 0.06449945831298828, 0.06474368286132813, 0.06489282989501953, 0.06492988586425781, 0.06497074890136718, 0.06543974304199218, 0.06479052734375, 0.06467359924316406, 0.06479657745361328, 0.06460192108154297, 0.0648274917602539, 0.06471903991699218, 0.06471289825439454, 0.06465945434570312, 0.06459379577636719, 0.06462630462646485, 0.0650245132446289, 0.06498303985595703, 0.06511001586914063, 0.06507520294189453, 0.06525746917724609, 0.06526322937011719, 0.06508927917480468, 0.06514342498779296, 0.06496051025390626, 0.06506905364990234, 0.06501580810546875, 0.06517759704589844, 0.06498099517822266, 0.06530006408691406, 0.06506265258789062, 0.06516802978515625, 0.06537625885009765, 0.06529750061035157, 0.06541756439208984, 0.06529491424560546, 0.06534950256347656, 0.0654603500366211, 0.06551347351074219, 0.06522214508056641, 0.06523283386230469, 0.06524781036376953, 0.06511180877685546, 0.06531462097167968, 0.06527635192871094, 0.06530441284179687, 0.06541059112548828, 0.06534381103515625, 0.06527798461914062, 0.06547481536865235, 0.06736640167236328, 0.06529484558105468, 0.06450176239013672, 0.06445465850830077, 0.06456934356689453, 0.06446275329589844, 0.06453052520751953, 0.06456729888916016, 0.06437660980224609, 0.06459945678710938, 0.06448210906982423, 0.06449884796142578, 0.0645283203125, 0.06452009582519531, 0.06472579193115234, 0.06513890838623047, 0.06497280120849609, 0.06493987274169923, 0.06478781127929688, 0.06482208251953125, 0.06473113250732422, 0.06456114959716797, 0.06466355133056641, 0.06461014556884766, 0.06517161560058594, 0.06468402862548828, 0.06469817352294922, 0.06478630065917969, 0.0647039031982422, 0.06489180755615234, 0.06498303985595703, 0.06511001586914063, 0.06495346832275391, 0.06509248352050781, 0.06507110595703125, 0.06496665954589843, 0.06489702606201173, 0.06485369873046876, 0.06494425964355469, 0.06514073944091797, 0.06481510162353515, 0.06486153411865235, 0.06489379119873047, 0.06480486297607421, 0.06491737365722657, 0.06494371032714844, 0.06551507568359374, 0.06511840057373047, 0.06515529632568359, 0.0653148193359375, 0.06504700469970703, 0.06526089477539063, 0.06506114959716797, 0.0651596450805664, 0.06512614440917969, 0.06501593780517578, 0.06496415710449219, 0.06510240173339844, 0.06512751770019531, 0.06496758270263672, 0.06514800262451172, 0.06512854766845703, 0.06531094360351562, 0.06709436798095703, 0.0651060791015625, 0.06463005065917969, 0.06449964904785156, 0.06460697937011718, 0.06444051361083984, 0.06446080017089843, 0.0645505599975586, 0.06471702575683594, 0.06444249725341797, 0.06441741180419921, 0.06459430694580078, 0.06445875549316406, 0.0646819839477539, 0.06477619171142578, 0.06501350402832032, 0.06506716918945313, 0.06487254333496094, 0.06488678741455078, 0.06471475219726562, 0.06469574737548828, 0.06464288330078125, 0.06466815948486328, 0.06455935668945313, 0.06472198486328125, 0.06528217315673829, 0.06460288238525391, 0.06516925048828125, 0.06483289337158203, 0.06481382751464844, 0.0652613754272461, 0.06498454284667969, 0.06498796844482421, 0.06502809906005859, 0.06491136169433594, 0.06479666900634766, 0.06489190673828125, 0.06487535858154297, 0.06501801300048828, 0.06490493011474609, 0.06549526214599609, 0.06507730865478516, 0.06500678253173828, 0.06500764465332032, 0.06498588562011719, 0.06517263793945313, 0.06518256378173828, 0.06519398498535156, 0.0650588150024414, 0.06517350769042969, 0.06483558654785156, 0.06524313354492188, 0.06513565063476562, 0.06506390380859375, 0.0652410888671875, 0.06527548980712891, 0.06536227416992188, 0.06529030609130859, 0.06520326232910156, 0.06520867156982423, 0.06523481750488282, 0.06541939544677734, 0.06560623931884765, 0.06704124450683593, 0.06522617340087891, 0.06467132568359375, 0.06471984100341797, 0.06453648376464843, 0.06447663879394532, 0.06460889434814453, 0.06447923278808594, 0.06453842926025391, 0.06467398071289063, 0.06445260620117188, 0.06461843109130859, 0.0645853729248047, 0.06465167999267578, 0.06483545684814453, 0.06515865325927735, 0.06514908599853515, 0.06492002868652344, 0.0648908462524414, 0.0646902084350586, 0.0646014404296875, 0.0646388168334961, 0.06465414428710937, 0.06467977905273438, 0.06491756439208984, 0.06469149017333985, 0.06467667388916015, 0.06469222259521484, 0.06481836700439453, 0.06484255981445312, 0.06492768096923827, 0.06505651092529297, 0.06495059204101562, 0.0650956802368164, 0.064901123046875, 0.0648253402709961, 0.06472294616699219, 0.06475161743164062, 0.06488473510742188, 0.0647557144165039, 0.06479257965087891, 0.06479872131347657, 0.06484377288818359, 0.06482067108154296, 0.06497532653808594, 0.0650195541381836, 0.0651119384765625, 0.06513311767578125, 0.06541516876220703, 0.06499529266357422, 0.06516944122314453, 0.06499737548828124, 0.06507724761962891, 0.06493583679199219, 0.06516918182373047, 0.06505299377441406, 0.06520783996582032, 0.06510018920898437, 0.06508547210693359, 0.06522032165527344, 0.0651902084350586, 0.0653656005859375, 0.06538690948486328, 0.06710332489013672, 0.06517964935302735, 0.0650403823852539, 0.06466150665283203, 0.06460594940185548, 0.06454668426513673, 0.06449600219726563, 0.06458367919921874, 0.06452838134765625, 0.06454681396484375, 0.06455091094970702, 0.06455091094970702, 0.06457917022705079, 0.06452470397949218, 0.06473846435546875, 0.0650406723022461, 0.06517533111572266, 0.0650816650390625, 0.06476841735839843, 0.06473939514160157, 0.06479257965087891, 0.06494777679443359, 0.06503456115722656, 0.06494425964355469, 0.06500879669189454, 0.06485417938232421, 0.06482809448242187, 0.06477005004882813, 0.06469149017333985, 0.06476006317138672, 0.06502393341064452, 0.06517814636230469, 0.06522675323486328, 0.06540191650390625, 0.06520928192138672, 0.06504966735839844, 0.06513555145263672, 0.06505657958984375, 0.06507539367675781, 0.06511615753173829, 0.0650218276977539, 0.06507942199707031, 0.06503014373779296, 0.06503014373779296, 0.06495231628417969, 0.06524451446533203, 0.06533932495117188, 0.06578659057617188, 0.06553600311279296, 0.06569983673095703, 0.06528819274902344, 0.06516233825683594, 0.06568390655517578, 0.06513302612304687, 0.06512966156005859, 0.06505145263671876, 0.06540697479248046, 0.06519385528564453, 0.06508502197265625, 0.06521705627441406, 0.06551119995117187, 0.06540252685546875, 0.06583558654785156, 0.06778137969970703, 0.06548070526123047, 0.06490438079833985, 0.0647441635131836, 0.06481423950195313, 0.06464575958251953, 0.06475357055664062, 0.06484419250488281, 0.06469580841064453, 0.06472512054443359, 0.06484751892089843, 0.06483222198486328, 0.06479257965087891, 0.06467378997802735, 0.06505677032470703, 0.06515916442871093, 0.06553337860107422, 0.06523551940917968, 0.06509363555908203, 0.0651673583984375, 0.06478582763671875, 0.06488534545898438, 0.064901123046875, 0.06476595306396485, 0.06476361846923828, 0.06479663848876953, 0.0647149429321289, 0.06488896179199219, 0.06500287628173829, 0.06518438720703125, 0.06531417846679688, 0.06553459167480469, 0.06535782623291016, 0.06541926574707031, 0.06519808197021484, 0.06492364501953125, 0.06497689819335938, 0.06515507507324218, 0.06512569427490235, 0.0651661148071289, 0.06542326354980468, 0.06517536163330079, 0.06526995086669922, 0.06513664245605469, 0.06527369689941406, 0.06546857452392578, 0.06573670196533203, 0.06564454650878906, 0.06538416290283203, 0.06537993621826171, 0.06545478057861329, 0.06532659149169921, 0.06524524688720704, 0.06529068756103516, 0.06518374633789062, 0.06530048370361329, 0.06518361663818359, 0.06531462097167968, 0.0657155532836914, 0.06531375885009766, 0.065414306640625, 0.0655301742553711, 0.06572048187255859, 0.06721737670898438, 0.06532710266113281, 0.06498079681396485, 0.064897216796875, 0.06489702606201173, 0.06473232269287109, 0.06491426849365234, 0.06474752044677734, 0.06486563110351562, 0.0648526382446289, 0.06482061004638671, 0.06497344207763672, 0.06484928131103515, 0.064853759765625, 0.06506169891357422, 0.0653674545288086, 0.06545062255859375, 0.06505593872070313, 0.06517974090576172, 0.06484249877929688, 0.06494822692871094, 0.06485401916503906, 0.06492364501953125, 0.06491283416748046, 0.065155517578125, 0.06480089569091797, 0.0649767074584961, 0.06513257598876954, 0.06520796966552735, 0.06508329772949219, 0.06550994873046875, 0.0651223373413086, 0.0653042221069336, 0.06530902099609374, 0.06529209899902344, 0.06515084838867187, 0.0653314208984375, 0.06491059112548828, 0.06515593719482422, 0.0649912338256836, 0.06525459289550781, 0.06502268981933594, 0.06531696319580078, 0.06550498962402344, 0.06605033874511719, 0.06522675323486328, 0.06542668914794922, 0.06533808135986328, 0.06526569366455078, 0.06521855926513671, 0.0652410888671875, 0.06521446228027344, 0.06536601257324219, 0.06513616180419922, 0.0652886734008789, 0.06550051116943359, 0.06526223754882812, 0.0652779541015625, 0.06537830352783203, 0.0661904296875, 0.06560655975341798, 0.06535987091064453, 0.06558502197265625, 0.06715200042724609, 0.06523085021972656, 0.0647039031982422, 0.06519570922851563, 0.06461673736572265, 0.0647399673461914, 0.06512435150146484, 0.06474956512451172, 0.06504029083251953, 0.06564198303222656, 0.06465318298339844, 0.06465404510498046, 0.06475775909423828, 0.0646123504638672, 0.06512230682373046, 0.06520146942138672, 0.06527171325683594, 0.06486710357666016, 0.06509158325195312, 0.06505677032470703, 0.06477113342285157, 0.06474140930175781, 0.06477078247070313, 0.06492505645751953, 0.06493414306640626, 0.06581305694580078, 0.064851806640625, 0.06489923095703125, 0.06492931365966798, 0.0650203857421875, 0.06535577392578125, 0.06531609344482422, 0.06515789031982422, 0.06543106842041016, 0.06518627166748046, 0.06484377288818359, 0.06481100463867187, 0.0649318389892578, 0.0648169937133789, 0.06494633483886719, 0.06489497375488282, 0.06593536376953125, 0.06476547241210938, 0.0649097900390625, 0.06499737548828124, 0.0650035171508789, 0.06522866821289063, 0.06528118133544922, 0.06521654510498047, 0.06517577362060546, 0.06512713623046874, 0.06513868713378906, 0.06513356781005859, 0.06539571380615235, 0.06522991943359376, 0.06531708526611328, 0.06536812591552735, 0.06577011108398438, 0.06515507507324218, 0.06529347229003907, 0.06535440063476562, 0.06533740997314454, 0.06536819458007813, 0.06701292419433594, 0.06505010986328125, 0.06471501159667968, 0.0645732192993164, 0.06451042938232422, 0.06470601654052735, 0.06469229125976562, 0.06466537475585937, 0.06475437164306641, 0.06486835479736328, 0.06484786987304687, 0.06488195037841797, 0.06482608032226563, 0.06500291442871094, 0.06519852447509765, 0.06544774627685547, 0.06547901153564453, 0.06522463989257812, 0.0651817626953125, 0.0649318389892578, 0.06500348663330079, 0.06499945831298828, 0.06499094390869141, 0.06504611206054688, 0.06565548706054687, 0.06534349060058593, 0.06505795288085937, 0.0650753631591797, 0.06522528076171875, 0.06516259002685547, 0.06536444854736329, 0.06539033508300782, 0.06524781036376953, 0.0653803482055664, 0.06531804656982422, 0.0651600341796875, 0.06521011352539062, 0.06527555084228516, 0.06524066925048828, 0.06557389068603516, 0.06518089294433593, 0.06515382385253907, 0.06517555236816407, 0.06525865936279297, 0.06543753814697266, 0.06538499450683594, 0.06559715270996094, 0.06551193237304688, 0.06562422180175781, 0.06540092468261718, 0.06527180480957032, 0.06535107421875, 0.06537216186523437, 0.06517001342773437, 0.06539190673828126, 0.06534627532958984, 0.06533033752441406, 0.06539942169189453, 0.06568777465820312, 0.06530223846435547, 0.0654543685913086, 0.06556057739257813, 0.06544998168945312, 0.06724588775634766, 0.06513053131103516, 0.06476016235351563, 0.06467993927001953, 0.06462032318115235, 0.06478460693359375, 0.06459801483154297, 0.06497824096679687, 0.06497459411621094, 0.06486659240722656, 0.06490115356445313, 0.06500621032714844, 0.06487628936767578, 0.0648642578125, 0.06538880157470703, 0.06529843139648438, 0.06539673614501954, 0.0652738265991211, 0.0650670394897461, 0.06500863647460937, 0.0649144287109375, 0.065193603515625, 0.06505510711669922, 0.0650668487548828, 0.065446044921875, 0.06568653106689454, 0.06485298919677734, 0.06504243469238281, 0.06561996459960938, 0.06579609680175781, 0.06575308990478515, 0.06571826934814454, 0.06538201904296875, 0.06528147125244141, 0.06531683349609375, 0.06519692993164063, 0.06529647827148438, 0.06519805145263671, 0.06513369750976562, 0.06536284637451172, 0.06600498962402344, 0.06532505798339844, 0.06516070556640625, 0.06542182159423827, 0.06538182067871094, 0.06546217346191406, 0.06554486083984375, 0.06546781158447265, 0.06571887969970704, 0.06526976013183594, 0.06519779205322265, 0.06543981170654296, 0.06526290893554687, 0.06549801635742188, 0.06551347351074219, 0.06521171569824219, 0.06522908782958985, 0.06558966064453126, 0.06541216278076172, 0.06556358337402343, 0.06555632019042969, 0.06563442993164062, 0.06553593444824218]",tokens/s,15.36189850605616,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,806.776832,3458.072576,0.0,3055.550464,2937.680896,s,1,7.21132177734375,7.21132177734375,0.0,7.21132177734375,7.21132177734375,7.21132177734375,7.21132177734375,[7.21132177734375],,kWh,4.10241950416245e-06,4.441524944228583e-07,1.0480563939971321e-06,5.5946283925824404e-06,,MB,1175.04,3527.278592,0.0,3114.27072,2817.475072,s,10,2.482448928833008,0.24824489288330076,0.0016909882646942798,0.24833857727050782,0.25089162292480466,0.25090201873779294,0.2509103353881836,"[0.24583378601074218, 0.24858905029296874, 0.24837997436523437, 0.24926112365722655, 0.2463399658203125, 0.24640899658203125, 0.2509124145507812, 0.2508893127441406, 0.24829718017578126, 0.24753712463378907]",tokens/s,1031.239744860914,kWh,7.317287283542744e-06,8.069281542162056e-07,4.866010837250092e-06,1.2990226275009042e-05,tokens/kWh,19707124.000795882,MB,1205.354496,3527.278592,0.0,3114.27072,2877.809152,s,10,11.657559082031248,1.165755908203125,0.0022896255459479375,1.1647609252929687,1.169291943359375,1.1696603149414062,1.1699550122070312,"[1.1692100830078125, 1.1629317626953124, 1.1638907470703126, 1.1669404296875, 1.1650626220703124, 1.1643480224609375, 1.1669080810546875, 1.1700286865234375, 1.1637794189453126, 1.164459228515625]",tokens/s,54.04218804012502,kWh,3.403438198270786e-05,3.7537413728457805e-06,2.2342399818349805e-05,6.013052317390345e-05,tokens/kWh,1047720.8025913517,,s,630,11.655215375900264,0.018500341866508365,0.00023007966966821772,0.018454511642456053,0.018700957107543943,0.018925845527648925,0.019542841854095463,"[0.019302751541137694, 0.018955327987670897, 0.018617279052734376, 0.018476512908935545, 0.018383392333984373, 0.018284255981445313, 0.01831929588317871, 0.018262367248535156, 0.01828659248352051, 0.01886412811279297, 0.01835212707519531, 0.018391136169433595, 0.018378047943115233, 0.01857391929626465, 0.019127679824829102, 0.01894588851928711, 0.018570016860961915, 0.018398464202880858, 0.018486015319824217, 0.018710784912109375, 0.018517311096191407, 0.018356672286987306, 0.01830297660827637, 0.018486303329467775, 0.018379840850830078, 0.018513824462890623, 0.018391040802001952, 0.018464767456054687, 0.018519584655761718, 0.01842780876159668, 0.018613983154296875, 0.01889980888366699, 0.019611455917358397, 0.018426048278808595, 0.018491296768188475, 0.01864918327331543, 0.018515968322753908, 0.01848726463317871, 0.01835775947570801, 0.018373151779174805, 0.018527679443359375, 0.018417919158935547, 0.01847532844543457, 0.018685504913330077, 0.018579904556274413, 0.018763776779174804, 0.01841971206665039, 0.018585599899291993, 0.018524160385131837, 0.018497440338134767, 0.018552928924560546, 0.018747392654418944, 0.01858531188964844, 0.018534175872802733, 0.018620895385742187, 0.018519840240478515, 0.01857561683654785, 0.018503679275512695, 0.018437280654907226, 0.018494304656982423, 0.018515968322753908, 0.018448383331298827, 0.018489343643188477, 0.019336511611938476, 0.018788480758666994, 0.018555456161499024, 0.018495487213134765, 0.018322559356689454, 0.018366559982299805, 0.018307199478149416, 0.01828531265258789, 0.018354080200195313, 0.01838470458984375, 0.018311168670654295, 0.018368703842163086, 0.018333696365356447, 0.018457855224609375, 0.018283103942871092, 0.018287040710449218, 0.018317024230957032, 0.018335744857788085, 0.018298431396484374, 0.0183110408782959, 0.018389568328857422, 0.018296831130981444, 0.018862144470214844, 0.018638784408569337, 0.018575551986694337, 0.018427711486816406, 0.018329599380493163, 0.01852035140991211, 0.01839689636230469, 0.01847644805908203, 0.018563167572021484, 0.018446239471435547, 0.018420671463012694, 0.01844380760192871, 0.018350208282470703, 0.018384384155273437, 0.018352319717407226, 0.01832534408569336, 0.018366207122802736, 0.018401920318603517, 0.018512319564819336, 0.018568864822387697, 0.018370559692382812, 0.018365888595581054, 0.018344512939453127, 0.018451744079589844, 0.01841334342956543, 0.018442495346069336, 0.018740095138549805, 0.018429759979248048, 0.018452543258666992, 0.018464384078979493, 0.01840937614440918, 0.01849795150756836, 0.018573312759399413, 0.0184770565032959, 0.018574975967407228, 0.018491264343261718, 0.018559423446655274, 0.01854470443725586, 0.018486719131469726, 0.01859231948852539, 0.01846272087097168, 0.020162559509277343, 0.019084768295288088, 0.018700832366943358, 0.018569215774536133, 0.018374656677246092, 0.018298112869262695, 0.018235744476318358, 0.01828700828552246, 0.018296831130981444, 0.01823539161682129, 0.018466527938842774, 0.01834012794494629, 0.018507776260375978, 0.018331647872924805, 0.01828873634338379, 0.018353759765625, 0.01828691291809082, 0.01827382469177246, 0.018340255737304686, 0.018292991638183594, 0.018327360153198243, 0.018296831130981444, 0.01835212707519531, 0.018259296417236327, 0.018475679397583007, 0.01835807991027832, 0.0183175048828125, 0.01839308738708496, 0.018381824493408205, 0.018477119445800782, 0.018331584930419923, 0.018347007751464844, 0.018335231781005858, 0.01831987190246582, 0.018405376434326173, 0.018356224060058594, 0.018459936141967774, 0.01845430374145508, 0.018355199813842774, 0.018392608642578124, 0.018360960006713868, 0.018528032302856445, 0.018413568496704103, 0.018338943481445314, 0.01845952033996582, 0.018500831604003905, 0.018396095275878908, 0.018460512161254883, 0.018618080139160158, 0.018417407989501953, 0.018467327117919922, 0.018494527816772462, 0.018568159103393555, 0.018679807662963867, 0.018619935989379884, 0.019091871261596678, 0.01866143989562988, 0.0185930233001709, 0.018578176498413087, 0.018731008529663085, 0.01854412841796875, 0.01853286361694336, 0.01847248077392578, 0.01947920036315918, 0.018787776947021485, 0.018482847213745116, 0.01841856002807617, 0.018311199188232423, 0.01902902412414551, 0.01852720069885254, 0.018491104125976564, 0.018315263748168945, 0.018273920059204102, 0.018428064346313475, 0.018333471298217774, 0.018268896102905274, 0.01836627197265625, 0.018306432723999025, 0.018279104232788085, 0.01834988784790039, 0.018319679260253907, 0.01840127944946289, 0.01842585563659668, 0.018337791442871093, 0.018476160049438476, 0.018399328231811524, 0.018461183547973634, 0.01849577522277832, 0.01855824089050293, 0.01846451187133789, 0.018381792068481444, 0.018491392135620118, 0.018378751754760742, 0.018484607696533203, 0.018545280456542967, 0.0186429443359375, 0.01885331153869629, 0.018540128707885743, 0.01846678352355957, 0.01841663932800293, 0.018452320098876953, 0.018354591369628907, 0.018396863937377928, 0.018388256072998047, 0.0184586238861084, 0.018559423446655274, 0.019554655075073243, 0.018423807144165038, 0.018437408447265626, 0.01840812873840332, 0.018446367263793947, 0.018509471893310547, 0.018396799087524413, 0.01870921516418457, 0.018683231353759766, 0.018557600021362305, 0.01863884735107422, 0.018501760482788086, 0.018526079177856446, 0.018545888900756837, 0.018459264755249023, 0.018497695922851564, 0.018581504821777343, 0.018583295822143554, 0.019074783325195313, 0.018577888488769533, 0.019333120346069335, 0.018941152572631837, 0.018651391983032225, 0.018450464248657227, 0.01843452835083008, 0.018300960540771485, 0.018331647872924805, 0.018350080490112306, 0.018368032455444334, 0.0183056640625, 0.018571104049682617, 0.018576448440551757, 0.018398143768310546, 0.018333696365356447, 0.018339839935302735, 0.018282495498657226, 0.018374431610107423, 0.01834169578552246, 0.01838483238220215, 0.01837718391418457, 0.018364416122436524, 0.018355840682983397, 0.018321792602539064, 0.018327552795410155, 0.018339359283447265, 0.018312671661376952, 0.018542591094970702, 0.01847327995300293, 0.018428255081176757, 0.018544992446899413, 0.018388992309570314, 0.01840246391296387, 0.018377216339111328, 0.018460960388183595, 0.019232831954956054, 0.018431999206542968, 0.018370559692382812, 0.018630847930908204, 0.018403167724609374, 0.018376672744750976, 0.018419519424438476, 0.018380992889404296, 0.018617855072021485, 0.01835615921020508, 0.01839571189880371, 0.018472095489501954, 0.01847724723815918, 0.018576032638549806, 0.018534400939941405, 0.018543935775756835, 0.018716384887695312, 0.01851817512512207, 0.018498367309570312, 0.01864089584350586, 0.018513280868530272, 0.018558656692504883, 0.018641599655151365, 0.018604127883911133, 0.01862460708618164, 0.01852560043334961, 0.018573600769042967, 0.018601791381835937, 0.018512447357177733, 0.01919385528564453, 0.01880816078186035, 0.018508447647094726, 0.0184106559753418, 0.01871494483947754, 0.018342432022094728, 0.018395040512084963, 0.01834809684753418, 0.018282495498657226, 0.018276384353637695, 0.018251775741577148, 0.018305023193359374, 0.018361631393432616, 0.018377023696899412, 0.018331647872924805, 0.018397600173950195, 0.01881292724609375, 0.018380800247192384, 0.018415199279785157, 0.018358144760131836, 0.018323551177978514, 0.018309568405151366, 0.018343936920166014, 0.018503679275512695, 0.018364416122436524, 0.018257343292236328, 0.018327199935913085, 0.018510751724243164, 0.018538496017456055, 0.018366464614868162, 0.018509824752807616, 0.01838595199584961, 0.018385887145996094, 0.018510080337524413, 0.018376447677612304, 0.01860630416870117, 0.01856230354309082, 0.018360864639282225, 0.018556928634643553, 0.01869004821777344, 0.01842585563659668, 0.0184770565032959, 0.018497472763061525, 0.01839651107788086, 0.018492000579833984, 0.01861599922180176, 0.018495071411132814, 0.018587615966796874, 0.01849228858947754, 0.018543743133544923, 0.01843395233154297, 0.01849625587463379, 0.01883568000793457, 0.01848518371582031, 0.01850579261779785, 0.018530303955078126, 0.018499263763427733, 0.018547008514404297, 0.018544639587402344, 0.01854422378540039, 0.018643360137939453, 0.018506752014160157, 0.018455551147460936, 0.019028095245361327, 0.018698528289794923, 0.018584096908569336, 0.018501632690429686, 0.01835212707519531, 0.018413408279418945, 0.01870220756530762, 0.019036064147949217, 0.01834553527832031, 0.018428735733032227, 0.018298879623413086, 0.018538496017456055, 0.018281856536865235, 0.018348672866821288, 0.01841971206665039, 0.018315231323242188, 0.01838640022277832, 0.01842211151123047, 0.018505184173583985, 0.01846944046020508, 0.018734655380249023, 0.018405920028686525, 0.018475103378295898, 0.018491392135620118, 0.01847657585144043, 0.01854080009460449, 0.01841584014892578, 0.018339839935302735, 0.018349472045898436, 0.01842403221130371, 0.018302623748779296, 0.01837129592895508, 0.018361663818359374, 0.01846326446533203, 0.018403167724609374, 0.018358528137207033, 0.018495328903198244, 0.018381311416625978, 0.018352895736694335, 0.01845471954345703, 0.018340160369873047, 0.018440671920776367, 0.01831484794616699, 0.018424224853515626, 0.018562559127807618, 0.018491903305053712, 0.018435455322265624, 0.018406015396118164, 0.01881907272338867, 0.018907136917114258, 0.018538496017456055, 0.0184420166015625, 0.01862268829345703, 0.018554880142211915, 0.01862246322631836, 0.018989055633544923, 0.018610111236572267, 0.018573375701904298, 0.01861555290222168, 0.01958940887451172, 0.018603967666625976, 0.01851587104797363, 0.018573408126831056, 0.01961350440979004, 0.018976320266723634, 0.0187193603515625, 0.01846886444091797, 0.01840070343017578, 0.01838870429992676, 0.01833660888671875, 0.018431999206542968, 0.018325504302978517, 0.018631744384765624, 0.018463680267333984, 0.018364479064941406, 0.018382783889770507, 0.01830297660827637, 0.01828659248352051, 0.0182968635559082, 0.018282112121582032, 0.018392608642578124, 0.018310976028442384, 0.019108383178710938, 0.020308448791503907, 0.018554655075073243, 0.018378976821899415, 0.01840742492675781, 0.018356224060058594, 0.018340959548950195, 0.01836857604980469, 0.01838502311706543, 0.018440927505493164, 0.01834726333618164, 0.018342304229736327, 0.01839958381652832, 0.018347360610961913, 0.01839779281616211, 0.018421279907226563, 0.018355871200561525, 0.018869119644165037, 0.018948095321655273, 0.019027488708496094, 0.018518207550048828, 0.018462047576904297, 0.018441152572631837, 0.018423295974731444, 0.018442752838134766, 0.018421760559082033, 0.018532127380371095, 0.018499807357788087, 0.01844380760192871, 0.018415327072143554, 0.018417823791503907, 0.01854252815246582, 0.01859014320373535, 0.018641120910644533, 0.018671327590942383, 0.018774303436279297, 0.01857254409790039, 0.01911427116394043, 0.01975267219543457, 0.018545408248901368, 0.01851408004760742, 0.018556671142578127, 0.018423807144165038, 0.018597984313964845, 0.019513919830322267, 0.018950143814086915, 0.018605791091918945, 0.01861689567565918, 0.018367263793945314, 0.01831827163696289, 0.018298496246337892, 0.018397567749023437, 0.018313215255737304, 0.018226463317871092, 0.018268896102905274, 0.018258975982666015, 0.01824867248535156, 0.018290815353393556, 0.018290016174316408, 0.018477216720581054, 0.018368223190307616, 0.018333759307861328, 0.018470495223999024, 0.018342687606811525, 0.01833366394042969, 0.018301120758056642, 0.01837273597717285, 0.01831635284423828, 0.01825827217102051, 0.018369407653808595, 0.018400928497314454, 0.018456703186035157, 0.018462560653686524, 0.01872489547729492, 0.018478912353515627, 0.018456480026245118, 0.018434335708618164, 0.01860812759399414, 0.018437471389770508, 0.01846681594848633, 0.018795072555541994, 0.018417760848999022, 0.018348031997680665, 0.018374719619750977, 0.0183438720703125, 0.018378751754760742, 0.018398687362670897, 0.018518560409545897, 0.01839523124694824, 0.01832963180541992, 0.018477951049804688, 0.01837772750854492, 0.01848320007324219, 0.018524160385131837, 0.018563072204589845, 0.01854364776611328, 0.01848214340209961, 0.018561023712158203, 0.018621471405029295, 0.018588640213012694, 0.018714624404907225, 0.018733055114746093, 0.01853843116760254, 0.01856108856201172, 0.018534271240234376, 0.018495616912841798, 0.018593791961669923, 0.019368415832519532, 0.018815359115600585, 0.018613344192504884, 0.018436288833618163, 0.018354656219482422, 0.018446592330932616, 0.018335744857788085, 0.01848646354675293, 0.018285375595092773, 0.018231296539306642, 0.018288639068603514, 0.01846886444091797, 0.018386943817138672, 0.018355871200561525, 0.018348384857177734, 0.01840947151184082, 0.018394720077514647, 0.01830735969543457, 0.018378335952758788, 0.018407968521118163, 0.01844223976135254, 0.01840742492675781, 0.018323423385620118, 0.01852329635620117, 0.018361215591430665, 0.018406911849975584, 0.01834239959716797, 0.018366592407226563, 0.018352031707763672, 0.018403295516967774, 0.018468767166137694, 0.018399328231811524, 0.01844223976135254, 0.018498783111572267, 0.018389280319213868, 0.018788864135742187, 0.018493440628051756, 0.018407264709472657, 0.01846284866333008, 0.01841155242919922, 0.01845248031616211, 0.018440191268920898, 0.01846886444091797, 0.01850921630859375, 0.01870207977294922, 0.018452863693237304, 0.018612703323364257, 0.018476480484008788, 0.018448896408081054, 0.01854819107055664, 0.018546335220336913, 0.01852899169921875, 0.018544864654541016, 0.018534400939941405, 0.018618368148803712, 0.018560352325439452, 0.018507648468017578, 0.018657663345336913, 0.018569631576538084, 0.018585599899291993, 0.018566816329956055, 0.01854483222961426, 0.0185447998046875]",tokens/s,54.05305519301377,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,890.032128,11731.337216,0.0,11328.815104,11314.254848,s,1,7.22546435546875,7.22546435546875,0.0,7.22546435546875,7.22546435546875,7.22546435546875,7.22546435546875,[7.22546435546875],,kWh,7.402437816699603e-06,8.092916687964222e-07,2.296390726005959e-06,1.0508120211501984e-05,,MB,1198.927872,12173.836288,0.0,11760.828416,11713.906688,s,10,3.7130997009277347,0.37130997009277344,0.006986893901071928,0.37297303771972656,0.3758653411865234,0.37595865631103514,0.37603330841064453,"[0.3509007568359375, 0.3758446044921875, 0.37183895874023437, 0.3719395751953125, 0.3731261901855469, 0.3714414367675781, 0.37605197143554686, 0.37281988525390625, 0.3738340759277344, 0.37530224609375]",tokens/s,689.4509186921032,kWh,1.0556966376041525e-05,1.164235018950597e-06,7.0568012803573695e-06,1.8778002675349494e-05,tokens/kWh,13632972.815370811,MB,1204.740096,12278.693888,0.0,11865.686016,11828.952576,s,10,29.94567333984375,2.9945673339843752,0.0018176486249442253,2.9943807373046876,2.9966992431640627,2.9977650268554688,2.998617653808594,"[2.992723388671875, 2.9925146484375, 2.99286376953125, 2.99646240234375, 2.998830810546875, 2.994684326171875, 2.993934814453125, 2.9947958984375, 2.9947861328125, 2.9940771484375]",tokens/s,21.038097652717102,kWh,8.771368657062376e-05,9.675002910558274e-06,5.812569531004312e-05,0.0001555143847912251,tokens/kWh,405107.21940337686,,s,630,29.942526981353762,0.04752782060532343,0.0003257842007814169,0.047480831146240236,0.04774613304138183,0.047851397132873535,0.04957880672454834,"[0.04954307174682617, 0.04790003204345703, 0.04738246536254883, 0.04725574493408203, 0.04721926498413086, 0.04726345443725586, 0.047196449279785155, 0.04728569412231445, 0.04725932693481445, 0.04719295883178711, 0.04721161651611328, 0.047274654388427734, 0.04728448104858399, 0.047247039794921876, 0.04721491241455078, 0.04724636840820313, 0.04726780700683594, 0.04735078430175781, 0.047372287750244144, 0.04757913589477539, 0.04774195098876953, 0.047651073455810544, 0.047644927978515626, 0.0475296630859375, 0.047414081573486325, 0.04739481735229492, 0.04733747100830078, 0.047316993713378906, 0.0473125114440918, 0.047375934600830075, 0.04739299011230469, 0.04740073776245117, 0.04739769744873047, 0.04737334442138672, 0.04734051132202149, 0.04738793563842773, 0.047428321838378903, 0.047408416748046876, 0.04738326263427734, 0.04756601715087891, 0.047612735748291016, 0.04771820831298828, 0.04766329574584961, 0.04764672088623047, 0.04768972778320312, 0.04765647888183594, 0.04764284896850586, 0.047536033630371094, 0.047500736236572264, 0.047430431365966794, 0.04742351913452148, 0.047439968109130856, 0.04749039840698242, 0.04748294448852539, 0.04748742294311523, 0.047451969146728515, 0.04747267150878906, 0.04756921768188477, 0.047587329864501954, 0.04784870529174805, 0.047897342681884766, 0.04788838577270508, 0.04785359954833984, 0.04956444931030273, 0.048004798889160157, 0.04763264083862305, 0.04739465713500977, 0.047274208068847655, 0.04725923156738281, 0.04718950271606445, 0.04720732879638672, 0.047247360229492184, 0.047277793884277344, 0.04725788879394531, 0.04724099349975586, 0.047260929107666015, 0.04724220657348633, 0.047298561096191405, 0.0472347526550293, 0.04734790420532226, 0.04741542434692383, 0.0474071044921875, 0.04745132827758789, 0.047636608123779296, 0.04764640045166016, 0.04761088180541992, 0.04758937454223633, 0.04739276885986328, 0.04737974548339844, 0.04733750534057617, 0.047333728790283205, 0.04739884948730469, 0.04738908767700195, 0.04739891052246094, 0.04737401580810547, 0.04735622406005859, 0.047331329345703124, 0.04743167877197266, 0.04738252639770508, 0.047363296508789066, 0.04735670471191406, 0.04743164825439453, 0.04750953674316406, 0.04759347152709961, 0.047759361267089843, 0.047775135040283204, 0.04769971084594726, 0.047643489837646484, 0.04756480026245117, 0.04746793746948242, 0.04740687942504883, 0.04741353607177735, 0.047480831146240236, 0.047424030303955075, 0.04746444702148438, 0.04751769638061523, 0.04749926376342774, 0.047467647552490236, 0.04756364822387695, 0.04756604766845703, 0.04752873611450195, 0.04758649444580078, 0.04766803359985351, 0.047718399047851565, 0.04777280044555664, 0.04779209518432617, 0.04955955123901367, 0.047901729583740234, 0.04742448043823242, 0.0472756462097168, 0.047186016082763675, 0.04723462295532226, 0.04721273422241211, 0.047194656372070314, 0.047201953887939456, 0.04731939315795899, 0.04725350570678711, 0.0472913932800293, 0.047219711303710936, 0.047243232727050784, 0.047274017333984376, 0.04728627014160156, 0.04729190444946289, 0.04731878280639648, 0.04734576034545898, 0.04747078323364258, 0.04772502517700195, 0.04765865707397461, 0.047610206604003905, 0.04754003143310547, 0.04741958236694336, 0.047394367218017576, 0.04739116668701172, 0.047333377838134766, 0.047341217041015626, 0.047298912048339845, 0.04736000061035156, 0.04740300750732422, 0.047351646423339847, 0.04741251373291016, 0.04748783874511719, 0.047460384368896484, 0.04737638473510742, 0.047392478942871095, 0.04748838424682617, 0.047508350372314455, 0.0476201286315918, 0.04779753494262695, 0.04774576187133789, 0.04774198532104492, 0.04760367965698242, 0.04759366226196289, 0.047540897369384764, 0.04752195358276367, 0.04746854400634765, 0.047529983520507815, 0.0475074577331543, 0.047562751770019535, 0.04753376007080078, 0.04752620697021484, 0.047521793365478515, 0.04761804962158203, 0.04760780715942383, 0.04758515167236328, 0.047634559631347655, 0.04764422225952149, 0.04771475219726563, 0.04773068618774414, 0.04776668930053711, 0.0498487663269043, 0.04801923370361328, 0.047431903839111327, 0.047301631927490234, 0.047252479553222655, 0.047273281097412106, 0.04723571014404297, 0.047220287322998045, 0.04729062271118164, 0.04725785446166992, 0.047285503387451175, 0.0472993278503418, 0.04726489639282227, 0.04727283096313477, 0.04724531173706055, 0.047440929412841795, 0.04740582275390625, 0.047515872955322266, 0.04747625732421875, 0.04757961654663086, 0.04781190490722656, 0.04781865692138672, 0.047626495361328125, 0.04758377456665039, 0.04749619293212891, 0.04739104080200195, 0.04733407974243164, 0.04734566497802734, 0.0473620491027832, 0.04733257675170898, 0.0473853759765625, 0.04740095901489258, 0.04739276885986328, 0.04738662338256836, 0.047372287750244144, 0.04739683151245117, 0.04746409606933594, 0.04753398513793945, 0.04755913543701172, 0.047754753112792966, 0.04782131195068359, 0.047857601165771486, 0.04772848129272461, 0.047647998809814456, 0.04762313461303711, 0.04758233642578125, 0.04754726409912109, 0.047480831146240236, 0.047480831146240236, 0.04752588653564453, 0.04750118255615234, 0.04745587158203125, 0.04749884796142578, 0.047489280700683596, 0.047549087524414065, 0.04758486557006836, 0.047575454711914066, 0.04754227066040039, 0.04784041595458984, 0.048026142120361326, 0.047998497009277344, 0.047999713897705076, 0.04799596786499023, 0.04974563217163086, 0.04809664154052734, 0.04761206436157227, 0.047478527069091794, 0.047524864196777344, 0.04745977783203125, 0.047483295440673826, 0.047527393341064456, 0.04754048156738281, 0.04762041473388672, 0.04749715042114258, 0.047499454498291016, 0.047470401763916016, 0.0474600944519043, 0.04755500793457031, 0.04753388977050781, 0.047457534790039065, 0.04751424026489258, 0.047599937438964846, 0.047666656494140626, 0.04793193435668945, 0.04787756729125976, 0.047841056823730466, 0.047761985778808594, 0.047562976837158204, 0.047529022216796876, 0.047412158966064454, 0.04734905624389649, 0.04740166473388672, 0.047396160125732424, 0.047368896484375, 0.04742915344238281, 0.047358177185058595, 0.047382400512695315, 0.04742297744750976, 0.04742438507080078, 0.04737638473510742, 0.04736614227294922, 0.04745785522460937, 0.04762179183959961, 0.04771920013427734, 0.04767852783203125, 0.047649055480957034, 0.04776393508911133, 0.04769196701049805, 0.04760102462768555, 0.04752848052978516, 0.04756694412231445, 0.04752384185791016, 0.047505409240722656, 0.047480831146240236, 0.047572254180908206, 0.04756326293945313, 0.047586944580078124, 0.04752444839477539, 0.047440929412841795, 0.0474851188659668, 0.04751849746704102, 0.04761801528930664, 0.04764675140380859, 0.047734783172607424, 0.04773068618774414, 0.04780230331420898, 0.04979795074462891, 0.047984607696533205, 0.04743577575683594, 0.04726784133911133, 0.047222782135009765, 0.04723865509033203, 0.04720896148681641, 0.04730006408691406, 0.047251998901367186, 0.047249408721923826, 0.047242366790771484, 0.04728006362915039, 0.047256511688232423, 0.047255550384521484, 0.04723708724975586, 0.047288352966308594, 0.047429630279541016, 0.047398750305175784, 0.04737654495239258, 0.04757689666748047, 0.04778403091430664, 0.0477564811706543, 0.04770844650268555, 0.04765555191040039, 0.047520927429199215, 0.047385215759277344, 0.04737046432495117, 0.047357601165771486, 0.04740911865234375, 0.04736857604980469, 0.0474152946472168, 0.04740300750732422, 0.047443649291992185, 0.04738489532470703, 0.04738016128540039, 0.04733724975585937, 0.04737225723266601, 0.04744454574584961, 0.04758643341064453, 0.04768447875976563, 0.047744384765625, 0.047725185394287106, 0.047761409759521485, 0.047742752075195315, 0.04769404983520508, 0.04762009429931641, 0.04755660629272461, 0.04752371215820313, 0.047495201110839845, 0.04749935913085938, 0.04743731307983398, 0.047446529388427736, 0.04744192123413086, 0.04767942428588867, 0.04762835311889648, 0.04759142303466797, 0.047633598327636716, 0.04762665557861328, 0.04757955169677734, 0.04763852691650391, 0.047736831665039066, 0.047736705780029295, 0.04775945663452148, 0.049742431640625, 0.04797439956665039, 0.047421440124511716, 0.04730060958862305, 0.047248577117919924, 0.04725747299194336, 0.047195041656494144, 0.047214622497558596, 0.04729241561889649, 0.04731903839111328, 0.04730470275878906, 0.047290367126464845, 0.04723427200317383, 0.04726784133911133, 0.047344417572021485, 0.04731679916381836, 0.0473397102355957, 0.047398303985595705, 0.047405662536621096, 0.04759321594238281, 0.04779647827148437, 0.047745025634765625, 0.047619136810302734, 0.047588287353515626, 0.04744323348999024, 0.0474136962890625, 0.04735823822021484, 0.04733059310913086, 0.04735385513305664, 0.04739468765258789, 0.047444831848144534, 0.04738662338256836, 0.047429630279541016, 0.04740217590332031, 0.0474422721862793, 0.0474362564086914, 0.047387710571289064, 0.047371200561523434, 0.047480831146240236, 0.04756070327758789, 0.0476789436340332, 0.047726207733154294, 0.04773980712890625, 0.04768972778320312, 0.04771430587768555, 0.047667198181152344, 0.04755820846557617, 0.04745379257202149, 0.04745711898803711, 0.047455390930175784, 0.04743040084838867, 0.04748825454711914, 0.04749964904785156, 0.047500896453857425, 0.047607872009277345, 0.04753615951538086, 0.04753641510009766, 0.0475489273071289, 0.04759689712524414, 0.047638847351074216, 0.04774947357177734, 0.047720447540283206, 0.04780166244506836, 0.049584671020507814, 0.04791011047363281, 0.04744204711914062, 0.047243934631347656, 0.04723712158203125, 0.04732291030883789, 0.0472496337890625, 0.04730598449707031, 0.04731110382080078, 0.04727040100097656, 0.04725964736938477, 0.047273983001708986, 0.04726742553710937, 0.04732524871826172, 0.047417278289794924, 0.047417758941650394, 0.04738662338256836, 0.0474337272644043, 0.04753612899780273, 0.047720447540283206, 0.047906814575195314, 0.047825950622558594, 0.04768662261962891, 0.04762009429931641, 0.04746854400634765, 0.047446014404296875, 0.04739686584472656, 0.047372287750244144, 0.04739481735229492, 0.047339519500732424, 0.047383678436279296, 0.04733529663085938, 0.04734809494018555, 0.047370880126953126, 0.047376224517822266, 0.04749843215942383, 0.04743036651611328, 0.04743398284912109, 0.04757196807861328, 0.04764313507080078, 0.04775775909423828, 0.04772051239013672, 0.04767948913574219, 0.047695358276367186, 0.04772915267944336, 0.047597312927246095, 0.047569023132324216, 0.047437950134277346, 0.04749107360839844, 0.047472640991210936, 0.04749107360839844, 0.04751139068603515, 0.047470497131347655, 0.047495071411132815, 0.04752009582519531, 0.04753561782836914, 0.047495681762695315, 0.04759347152709961, 0.04755628967285156, 0.04768175888061523, 0.047734878540039063, 0.047720447540283206, 0.04779568099975586, 0.04965206527709961, 0.04793113708496094, 0.04740313720703125, 0.0472388801574707, 0.047180191040039066, 0.047222782135009765, 0.047255550384521484, 0.04727807998657227, 0.04729788970947266, 0.04726643371582031, 0.047271968841552735, 0.04730470275878906, 0.04725907135009766, 0.04729008102416992, 0.04728713607788086, 0.04732307052612305, 0.04742278289794922, 0.04739763259887695, 0.04739686584472656, 0.04758323287963867, 0.04775680160522461, 0.04771456146240234, 0.047648448944091794, 0.047538753509521484, 0.047435585021972655, 0.04740729522705078, 0.04737206268310547, 0.04738479995727539, 0.04738457489013672, 0.04736819076538086, 0.04754140853881836, 0.047446880340576175, 0.04739072036743164, 0.047427230834960935, 0.04746684646606445, 0.047373855590820316, 0.047471073150634764, 0.04749926376342774, 0.04749926376342774, 0.04765695953369141, 0.04768880081176758, 0.047670177459716793, 0.04775526428222656, 0.0478023681640625, 0.04770816040039062, 0.04758035278320313, 0.047506240844726565, 0.04749107360839844, 0.04752777481079101, 0.04750966262817383, 0.047505409240722656, 0.047670433044433594, 0.04753289413452148, 0.04759056091308594, 0.047491134643554686, 0.04758403015136719, 0.04760575866699219, 0.047759361267089843, 0.04759142303466797, 0.04767241668701172, 0.047685855865478514, 0.047727294921875, 0.04778147125244141, 0.04976339340209961, 0.04793017578125, 0.047392959594726565, 0.0472564811706543, 0.04722175979614258, 0.047263744354248044, 0.04721430587768555, 0.04729417419433594, 0.047276607513427736, 0.0472509765625, 0.04735023880004883, 0.047306751251220705, 0.04728972625732422, 0.047280769348144534, 0.04725091171264648, 0.04734799957275391, 0.04740927886962891, 0.04734543991088867, 0.04732144165039062, 0.047533313751220704, 0.04771916961669922, 0.047623809814453126, 0.04763891220092773, 0.04760575866699219, 0.04756070327758789, 0.04744140625, 0.047384254455566405, 0.04736083221435547, 0.047355903625488284, 0.04736115264892578, 0.047428096771240234, 0.047444351196289064, 0.04741024017333984, 0.0474447021484375, 0.04740476989746094, 0.047411231994628905, 0.04753046417236328, 0.04756633758544922, 0.04752601623535156, 0.04758975982666016, 0.04764179229736328, 0.04768851089477539, 0.047710208892822265, 0.04763555145263672, 0.04763536071777344, 0.04758937454223633, 0.04750950241088867, 0.047497215270996096, 0.04750726318359375, 0.0474945297241211, 0.04760652923583984, 0.04754848098754883, 0.04749430465698242, 0.04747760009765625, 0.047462398529052735, 0.047554561614990234, 0.04758291244506836, 0.04757126235961914, 0.04760355377197266, 0.047635776519775394, 0.04764937591552734, 0.04772480010986328, 0.04786588668823242]",tokens/s,21.040308334440933,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 214787 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,890.114048,3361.603584,0.0,2959.081472,2942.567424,s,1,7.11777880859375,7.11777880859375,0.0,7.11777880859375,7.11777880859375,7.11777880859375,7.11777880859375,[7.11777880859375],,kWh,5.734931387496545e-06,6.254675486119494e-07,1.8288903520097266e-06,8.189289288118221e-06,,MB,1226.170368,3552.444416,0.0,3139.436544,3105.830912,s,10,2.3877587585449223,0.23877587585449223,0.0012165774227066303,0.23863690948486327,0.24020257720947266,0.2407164695739746,0.24112758346557617,"[0.24123036193847655, 0.23794114685058593, 0.23720338439941407, 0.23824755859375, 0.23851280212402343, 0.2391829833984375, 0.24008837890625, 0.2387610168457031, 0.23950143432617188, 0.23708969116210937]",tokens/s,1072.1351103157674,kWh,7.235816569105898e-06,7.979843088174643e-07,4.800850724146542e-06,1.2834651602069904e-05,tokens/kWh,19946003.0499553,MB,1256.448,3594.387456,0.0,3181.379584,3162.0096,s,10,13.478092651367188,1.3478092651367188,0.004220818933189443,1.3473580322265626,1.3512488403320313,1.354245086669922,1.3566420837402344,"[1.3444010009765626, 1.3440445556640626, 1.346950927734375, 1.3492216796875, 1.3500196533203126, 1.3572413330078126, 1.3505830078125, 1.347625244140625, 1.3470908203125, 1.3409144287109376]",tokens/s,46.74251886346056,kWh,3.9216423309227425e-05,4.325095407177311e-06,2.475056180585239e-05,6.829208052225712e-05,tokens/kWh,922508.1373742542,,s,630,13.475731960296635,0.021390050730629575,0.00026329805632146813,0.021344255447387696,0.0215390567779541,0.021664690685272218,0.02239361450195313,"[0.02167807960510254, 0.02146713638305664, 0.02126438331604004, 0.021376031875610352, 0.021270816802978515, 0.02124665641784668, 0.021235712051391603, 0.021212671279907228, 0.021258079528808593, 0.021234336853027343, 0.02122710418701172, 0.021342592239379884, 0.02133350372314453, 0.02135503959655762, 0.021374975204467773, 0.021372928619384765, 0.02138012886047363, 0.021330751419067383, 0.021392576217651366, 0.021289951324462892, 0.021298208236694337, 0.02119340705871582, 0.021297439575195313, 0.02123936080932617, 0.021223232269287108, 0.02117919921875, 0.02118364715576172, 0.021295360565185547, 0.02125209617614746, 0.021309856414794923, 0.021319263458251952, 0.02132419204711914, 0.021375104904174803, 0.021298688888549806, 0.021346527099609373, 0.02121334457397461, 0.021296543121337892, 0.021301855087280275, 0.021344255447387696, 0.02128895950317383, 0.021269823074340822, 0.02149616050720215, 0.021690719604492186, 0.02152239990234375, 0.021520416259765626, 0.021377023696899415, 0.021374975204467773, 0.021440799713134766, 0.02139107131958008, 0.021430015563964844, 0.021408000946044923, 0.021356864929199217, 0.021472063064575195, 0.021270879745483397, 0.02129955291748047, 0.021210592269897462, 0.021354751586914064, 0.021369312286376955, 0.021394752502441407, 0.021379776000976562, 0.02132329559326172, 0.021285343170166015, 0.021315071105957033, 0.021526527404785157, 0.02143846321105957, 0.021395456314086913, 0.021235712051391603, 0.021229759216308593, 0.021190464019775392, 0.021214719772338866, 0.021226112365722655, 0.021893184661865236, 0.021287872314453126, 0.02127142333984375, 0.02123161506652832, 0.021229280471801757, 0.0213055362701416, 0.02132387161254883, 0.02127462387084961, 0.02124185562133789, 0.02121881675720215, 0.021250463485717772, 0.02132147216796875, 0.02128879928588867, 0.02117683219909668, 0.02116559982299805, 0.021244384765625, 0.021292032241821288, 0.021424928665161134, 0.021551584243774412, 0.021393152236938478, 0.021354496002197267, 0.021288864135742186, 0.02120479965209961, 0.02130463981628418, 0.021238367080688478, 0.02115417671203613, 0.021213056564331055, 0.021223552703857423, 0.021245376586914062, 0.021188447952270508, 0.021308223724365236, 0.021306495666503906, 0.0212193603515625, 0.02118124771118164, 0.021309375762939453, 0.021259647369384765, 0.02274691200256348, 0.0213055362701416, 0.021381120681762695, 0.021317472457885744, 0.021257024765014648, 0.021733375549316408, 0.02124185562133789, 0.021364736557006835, 0.021178367614746094, 0.021202495574951172, 0.021160383224487305, 0.021323776245117186, 0.021259552001953126, 0.021254400253295898, 0.021254623413085937, 0.02127872085571289, 0.021755903244018555, 0.021626880645751953, 0.021319679260253906, 0.021778432846069336, 0.021636543273925782, 0.02130796813964844, 0.021468704223632812, 0.021307071685791015, 0.021371488571166993, 0.021256383895874024, 0.02130534362792969, 0.021396671295166016, 0.021358495712280275, 0.021363616943359375, 0.021341440200805663, 0.021446687698364258, 0.021328224182128906, 0.021361024856567382, 0.02126438331604004, 0.021362335205078124, 0.021270879745483397, 0.02164316749572754, 0.02146928024291992, 0.021380607604980468, 0.021306047439575194, 0.021326847076416015, 0.021318208694458007, 0.021418176651000976, 0.021397056579589843, 0.021273183822631835, 0.02138057518005371, 0.021497343063354494, 0.021240512847900392, 0.021276159286499022, 0.02126883125305176, 0.021467552185058594, 0.021321727752685548, 0.021344255447387696, 0.02125619125366211, 0.02135171127319336, 0.021276607513427734, 0.021338911056518556, 0.021286624908447266, 0.021416223526000977, 0.02127667236328125, 0.021307552337646484, 0.021243040084838866, 0.02123641586303711, 0.021405696868896484, 0.021356544494628905, 0.021489664077758788, 0.021436416625976562, 0.02142425537109375, 0.021761920928955077, 0.021326944351196288, 0.021410720825195313, 0.021257280349731445, 0.021477407455444335, 0.021377983093261718, 0.021353664398193358, 0.021301023483276366, 0.021410816192626952, 0.02147315216064453, 0.021405471801757812, 0.021405536651611327, 0.02139801597595215, 0.021768192291259765, 0.021594112396240234, 0.021438047409057616, 0.02139132881164551, 0.021334463119506836, 0.02143177604675293, 0.02133967971801758, 0.02134844779968262, 0.021334943771362306, 0.021344255447387696, 0.021340160369873046, 0.021327871322631836, 0.021303007125854492, 0.021395744323730467, 0.021313535690307618, 0.02136000061035156, 0.021614336013793947, 0.02132192039489746, 0.02147990417480469, 0.02128883171081543, 0.022777631759643556, 0.021436927795410156, 0.02142736053466797, 0.0214168643951416, 0.021214847564697267, 0.02134217643737793, 0.021354080200195313, 0.02137708854675293, 0.021365503311157226, 0.021342208862304687, 0.02127462387084961, 0.021313631057739257, 0.021451711654663086, 0.02138175964355469, 0.02137286376953125, 0.021325952529907228, 0.021686559677124025, 0.021763711929321288, 0.02139174461364746, 0.021387264251708983, 0.021366783142089844, 0.021796863555908205, 0.021329376220703126, 0.02133580780029297, 0.021311872482299803, 0.021402015686035156, 0.02130534362792969, 0.02168169593811035, 0.02138569641113281, 0.02123366355895996, 0.021328927993774414, 0.021371999740600587, 0.021327392578125, 0.021288639068603517, 0.021262527465820313, 0.021316064834594726, 0.021204351425170898, 0.02155897521972656, 0.021277471542358397, 0.021439680099487303, 0.021360671997070313, 0.021343168258666993, 0.021286815643310548, 0.022077152252197266, 0.021530336380004882, 0.021352703094482423, 0.021215551376342772, 0.021347711563110352, 0.02138902473449707, 0.0214102725982666, 0.021407583236694335, 0.02166774368286133, 0.02138764762878418, 0.021283424377441407, 0.021411392211914064, 0.021335487365722657, 0.021359359741210938, 0.021647680282592775, 0.02131113624572754, 0.021370880126953123, 0.02126233673095703, 0.02126063919067383, 0.021289823532104492, 0.021651615142822267, 0.02137392044067383, 0.021418880462646485, 0.02129724884033203, 0.021371103286743163, 0.021637407302856446, 0.021358816146850586, 0.021370880126953123, 0.021452512741088867, 0.021398111343383788, 0.021452447891235352, 0.02150543975830078, 0.021475967407226564, 0.021384544372558593, 0.02149238395690918, 0.021344160079956053, 0.021419136047363282, 0.02143465614318848, 0.021390016555786134, 0.021379072189331053, 0.02145280075073242, 0.021350400924682617, 0.02144256019592285, 0.02131990432739258, 0.021353664398193358, 0.021543519973754883, 0.021465087890625, 0.021557247161865235, 0.02132524871826172, 0.02131171226501465, 0.021350751876831053, 0.021343936920166017, 0.021683712005615235, 0.021412672042846678, 0.02148080062866211, 0.021387392044067383, 0.02132828712463379, 0.021660959243774414, 0.021406591415405272, 0.021356063842773436, 0.021371328353881835, 0.021563392639160156, 0.021386495590209963, 0.02163804817199707, 0.021598016738891602, 0.021481151580810546, 0.021509599685668946, 0.021449663162231444, 0.02130233573913574, 0.02131977653503418, 0.021358848571777344, 0.021354303359985352, 0.02160313606262207, 0.021396768569946288, 0.021327743530273436, 0.021402463912963868, 0.021423583984375, 0.021373472213745116, 0.021327871322631836, 0.021296607971191407, 0.0214431037902832, 0.021317632675170898, 0.021594112396240234, 0.02165760040283203, 0.021370880126953123, 0.02132124710083008, 0.02153856086730957, 0.023720672607421875, 0.02185215950012207, 0.02162073516845703, 0.021517440795898436, 0.02149260711669922, 0.02179043197631836, 0.021496095657348634, 0.021370880126953123, 0.021398591995239257, 0.021651647567749024, 0.021365312576293944, 0.021362112045288085, 0.021314304351806642, 0.021338464736938477, 0.021659296035766603, 0.02373222351074219, 0.021440511703491212, 0.021400896072387696, 0.02148387145996094, 0.021511615753173827, 0.021357824325561523, 0.02134364891052246, 0.02131488037109375, 0.0212957763671875, 0.021358879089355468, 0.021555200576782226, 0.0234334716796875, 0.02140457534790039, 0.021350496292114256, 0.021331872940063477, 0.021446720123291015, 0.02126268768310547, 0.021324224472045898, 0.02140188789367676, 0.02130499267578125, 0.02140166473388672, 0.021409791946411134, 0.021403648376464843, 0.021378143310546875, 0.021938175201416017, 0.021687488555908203, 0.021509151458740234, 0.021372127532958984, 0.02137779235839844, 0.021432064056396485, 0.021304384231567382, 0.024806400299072266, 0.02151452827453613, 0.021268192291259765, 0.02122137641906738, 0.02163302421569824, 0.02130112075805664, 0.021262399673461913, 0.021267616271972656, 0.02122025680541992, 0.021252191543579102, 0.02123356819152832, 0.02127052879333496, 0.021200895309448242, 0.021309440612792968, 0.02128281593322754, 0.021485759735107423, 0.02132671928405762, 0.021302207946777344, 0.02127462387084961, 0.021311199188232422, 0.02132588768005371, 0.021356767654418945, 0.02133145523071289, 0.021463647842407226, 0.02133523178100586, 0.021400127410888672, 0.021280927658081053, 0.02140118408203125, 0.02123321533203125, 0.021267295837402344, 0.02117024040222168, 0.021219263076782225, 0.021317440032958983, 0.02130748748779297, 0.021321823120117187, 0.021346303939819337, 0.021196800231933592, 0.02129689598083496, 0.021459423065185546, 0.021501728057861328, 0.021317632675170898, 0.021319679260253906, 0.021437824249267576, 0.02140224075317383, 0.02140572738647461, 0.021628896713256837, 0.02146268844604492, 0.02143471908569336, 0.0214466552734375, 0.021557247161865235, 0.021616287231445312, 0.021510496139526367, 0.021413183212280272, 0.02141254425048828, 0.021702655792236326, 0.02135817527770996, 0.022038976669311525, 0.021688896179199217, 0.021467103958129882, 0.02136300849914551, 0.021380607604980468, 0.021352672576904298, 0.021325376510620116, 0.02131987190246582, 0.021327423095703124, 0.021315584182739256, 0.021406272888183593, 0.02138534355163574, 0.021425535202026367, 0.021238399505615235, 0.021346303939819337, 0.021465087890625, 0.021423168182373047, 0.021373279571533205, 0.021425792694091797, 0.021382112503051758, 0.021561344146728514, 0.021425344467163085, 0.02142255973815918, 0.021293279647827148, 0.02138105583190918, 0.02152262306213379, 0.02129033660888672, 0.021442432403564454, 0.021474336624145506, 0.021235456466674806, 0.021534431457519532, 0.021390783309936524, 0.021279712677001954, 0.02131100845336914, 0.021377151489257812, 0.021325279235839843, 0.021340927124023436, 0.02134169578552246, 0.02130406379699707, 0.021280511856079102, 0.021381120681762695, 0.021350400924682617, 0.021364736557006835, 0.021344095230102538, 0.021271839141845703, 0.021379615783691405, 0.021330272674560547, 0.021342208862304687, 0.021409791946411134, 0.021247167587280274, 0.02144748878479004, 0.021315584182739256, 0.021350400924682617, 0.021302560806274413, 0.02139619255065918, 0.021399551391601563, 0.021325536727905273, 0.021555328369140626, 0.021374847412109373, 0.021444896697998046, 0.021268064498901368, 0.021469856262207033, 0.021337760925292968, 0.021737472534179687, 0.02149990463256836, 0.02131385612487793, 0.021274303436279295, 0.021241983413696288, 0.021213056564331055, 0.021256383895874024, 0.021325664520263674, 0.021962560653686524, 0.021327104568481445, 0.02133625602722168, 0.021336959838867187, 0.021407007217407226, 0.021417600631713867, 0.021490751266479494, 0.02124991989135742, 0.021321727752685548, 0.021325824737548828, 0.02134163284301758, 0.021350976943969726, 0.021345888137817383, 0.021319168090820313, 0.02132681655883789, 0.02134009552001953, 0.02134844779968262, 0.02146499252319336, 0.02125004768371582, 0.021292768478393554, 0.021311487197875977, 0.021244192123413087, 0.02132348823547363, 0.021328256607055663, 0.0213253116607666, 0.021244319915771484, 0.02123980712890625, 0.02123347282409668, 0.02128505516052246, 0.021403999328613282, 0.021399200439453123, 0.021395456314086913, 0.021643264770507813, 0.021323616027832032, 0.021311071395874022, 0.0213143367767334, 0.021246912002563477, 0.02151100730895996, 0.02142211151123047, 0.02126367950439453, 0.02128518486022949, 0.021518367767333985, 0.022434112548828124, 0.021350400924682617, 0.021251615524291993, 0.021440031051635743, 0.021293983459472657, 0.021327903747558594, 0.021362688064575194, 0.021526527404785157, 0.021354496002197267, 0.021364639282226563, 0.021327840805053712, 0.021405120849609376, 0.021419872283935548, 0.021483200073242188, 0.02127702331542969, 0.021380447387695314, 0.021226432800292967, 0.021274208068847656, 0.021276479721069337, 0.02110659217834473, 0.02128895950317383, 0.021141952514648437, 0.02121651268005371, 0.021282848358154298, 0.021234655380249025, 0.02128486442565918, 0.02126131248474121, 0.02127359962463379, 0.021225568771362304, 0.02134783935546875, 0.021361055374145507, 0.021196800231933592, 0.02115782356262207, 0.02115078353881836, 0.021197887420654298, 0.021212543487548828, 0.021176511764526368, 0.021356735229492187, 0.02116815948486328, 0.02118841552734375, 0.02123401641845703, 0.021301248550415038, 0.02126268768310547, 0.021203712463378908, 0.021241792678833006, 0.021316864013671874, 0.021290719985961912, 0.021299327850341797, 0.021181791305541993, 0.021221920013427733, 0.021213184356689452, 0.021181791305541993, 0.021240480422973634, 0.021440511703491212, 0.021180479049682618, 0.022294464111328124, 0.021247648239135743, 0.021377376556396484, 0.02145075225830078, 0.021274112701416017, 0.02120547294616699, 0.02118659210205078, 0.021157888412475585, 0.021217248916625978, 0.021206527709960937, 0.021234207153320313, 0.021236991882324217, 0.021177024841308595, 0.021190303802490235, 0.021207456588745118, 0.021358528137207032, 0.021362592697143554, 0.021579296112060546, 0.02136307144165039, 0.02138742446899414, 0.0214036808013916]",tokens/s,46.75070726073808,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.20064,813.563904,0.0,411.041792,391.374848,s,1,7.5011484375,7.5011484375,0.0,7.5011484375,7.5011484375,7.5011484375,7.5011484375,[7.5011484375],,kWh,4.789797104149329e-06,5.210807253462897e-07,1.8386125820030186e-06,7.149490411498637e-06,,MB,1205.82144,886.964224,0.0,473.956352,454.832128,s,16,0.37647983551025394,0.023529989719390867,0.0004432004529263146,0.023428335189819335,0.023519120216369628,0.023952624320983884,0.024972643089294432,"[0.02522764778137207, 0.023462623596191407, 0.023489343643188478, 0.02352761650085449, 0.02336899185180664, 0.023451263427734376, 0.023377599716186522, 0.02345849609375, 0.02337388801574707, 0.023447647094726562, 0.023510623931884765, 0.02338268852233887, 0.023399967193603516, 0.02330201530456543, 0.023290399551391602, 0.02340902328491211]",tokens/s,10879.732760317891,kWh,7.896364508445634e-07,8.708317587774992e-08,5.205897558108036e-07,1.3973093825331167e-06,tokens/kWh,183209247.14318427,MB,1231.07328,914.2272,0.0,501.219328,454.834688,s,16,10.195871154785154,0.6372419471740722,0.009555865764571658,0.6376006774902343,0.6465556945800781,0.6472247161865234,0.6480600799560546,"[0.6226619873046875, 0.6462347412109375, 0.6482689208984375, 0.6455870971679688, 0.6468766479492187, 0.6444556884765625, 0.636365234375, 0.6316265258789062, 0.6375317993164062, 0.6410673217773437, 0.64365283203125, 0.6376695556640625, 0.6328178100585937, 0.610504638671875, 0.6345709228515625, 0.6359794311523438]",tokens/s,98.86354826354612,kWh,1.8264124590303183e-05,2.0142218941230393e-06,7.424052016564203e-06,2.7702398500990426e-05,tokens/kWh,2274171.31400184,,s,1008,10.18681973361968,0.010105971957956041,0.0002698307786307568,0.01012820816040039,0.010305488014221192,0.010376326704025269,0.010781483201980588,"[0.009750975608825683, 0.009858240127563476, 0.009807744026184083, 0.009772000312805175, 0.009736191749572755, 0.009783295631408692, 0.009719807624816895, 0.009689087867736817, 0.009766912460327149, 0.009748031616210937, 0.009677439689636231, 0.009747455596923828, 0.009724575996398926, 0.009702655792236328, 0.00979152011871338, 0.009708415985107422, 0.009787391662597657, 0.009690560340881348, 0.009902655601501465, 0.00969865608215332, 0.009669280052185059, 0.009789600372314453, 0.009729887962341309, 0.009764736175537109, 0.009709471702575684, 0.009693408012390136, 0.009641440391540527, 0.01016710376739502, 0.0097128324508667, 0.009660736083984375, 0.009811327934265137, 0.009802399635314941, 0.009782591819763183, 0.00976371192932129, 0.009793536186218262, 0.009760288238525391, 0.009763135910034179, 0.009785504341125488, 0.009887743949890136, 0.010135519981384277, 0.00987769603729248, 0.010046303749084473, 0.010071040153503418, 0.009967616081237793, 0.010059776306152344, 0.009963359832763672, 0.009906335830688477, 0.010159744262695312, 0.009994784355163574, 0.009946975708007812, 0.009890975952148438, 0.010462016105651855, 0.010045215606689454, 0.01, 0.010085023880004883, 0.010092512130737304, 0.010072064399719239, 0.010039199829101562, 0.01017580795288086, 0.01003395175933838, 0.01011680030822754, 0.010004799842834472, 0.010163583755493165, 0.010292063713073731, 0.013477760314941406, 0.01069222354888916, 0.010584832191467285, 0.010567423820495605, 0.01019696044921875, 0.010168191909790038, 0.01016643238067627, 0.010186367988586426, 0.010377568244934082, 0.010653408050537109, 0.010166591644287109, 0.010136575698852538, 0.010134528160095215, 0.010112159729003907, 0.01013436794281006, 0.00999833583831787, 0.010079456329345702, 0.010007552146911621, 0.010784735679626465, 0.010114879608154296, 0.010040736198425293, 0.010738271713256836, 0.010172575950622558, 0.010137439727783203, 0.01004748821258545, 0.01004678440093994, 0.009984352111816407, 0.009980416297912598, 0.009959072113037109, 0.010080448150634766, 0.010376864433288575, 0.010276224136352539, 0.010273759841918945, 0.009996352195739746, 0.010054752349853516, 0.010031583786010743, 0.01007862377166748, 0.010088768005371094, 0.010085375785827636, 0.010160063743591309, 0.010104991912841798, 0.010271039962768555, 0.010086655616760254, 0.010112895965576171, 0.010125439643859863, 0.010162240028381347, 0.010407296180725098, 0.010130016326904297, 0.010053728103637695, 0.010205056190490723, 0.010098688125610352, 0.010277888298034669, 0.010116095542907716, 0.010112447738647461, 0.01010540771484375, 0.010254336357116698, 0.010158080101013184, 0.010279040336608886, 0.01021132755279541, 0.010205056190490723, 0.010291199684143066, 0.010225376129150391, 0.010102335929870605, 0.010263168334960937, 0.010293055534362792, 0.010228992462158203, 0.010205951690673828, 0.010362303733825684, 0.01033033561706543, 0.010289504051208495, 0.01026153564453125, 0.010268896102905274, 0.01032857608795166, 0.010365183830261231, 0.010204704284667968, 0.010152416229248047, 0.010342528343200683, 0.010250240325927735, 0.010308544158935546, 0.010378175735473633, 0.010241439819335937, 0.010291584014892578, 0.010236448287963867, 0.01027244758605957, 0.010612832069396973, 0.0103056001663208, 0.010272095680236817, 0.010221728324890137, 0.010286751747131348, 0.010250816345214844, 0.010227871894836425, 0.010214591979980469, 0.010328864097595215, 0.010201215744018555, 0.010262080192565918, 0.010400064468383789, 0.010295392036437988, 0.010305439949035645, 0.0104017915725708, 0.010268671989440918, 0.010250240325927735, 0.01043455982208252, 0.01023692798614502, 0.010254783630371094, 0.010312255859375, 0.010302528381347657, 0.010324000358581543, 0.01031056022644043, 0.010186047554016113, 0.010260607719421387, 0.01021945571899414, 0.010324831962585448, 0.010217151641845702, 0.010204287528991699, 0.010265631675720215, 0.010222751617431641, 0.010269472122192383, 0.010223615646362304, 0.010315775871276855, 0.010260479927062988, 0.010432512283325195, 0.01031987190246582, 0.010242015838623046, 0.010262463569641113, 0.010188960075378417, 0.010237695693969727, 0.010384096145629882, 0.010295295715332031, 0.010305024147033692, 0.010172863960266114, 0.010231871604919434, 0.010162176132202149, 0.010286080360412597, 0.010144767761230468, 0.010184703826904297, 0.010170368194580079, 0.010332223892211913, 0.010342111587524414, 0.0102258882522583, 0.010215167999267577, 0.010261759757995606, 0.010255359649658203, 0.01021951961517334, 0.010252287864685058, 0.01018665599822998, 0.010245280265808105, 0.01013036823272705, 0.010133536338806152, 0.010326047897338868, 0.010215359687805176, 0.01021951961517334, 0.010114751815795898, 0.010187071800231933, 0.010215583801269532, 0.010211199760437011, 0.010145343780517579, 0.010168736457824706, 0.01047715187072754, 0.01027113628387451, 0.010218784332275391, 0.010164511680603028, 0.01020486354827881, 0.01020207977294922, 0.01020854377746582, 0.010273599624633789, 0.010286784172058106, 0.010276864051818848, 0.010235039710998536, 0.010320416450500488, 0.01029152011871338, 0.010360383987426757, 0.01028656005859375, 0.010186976432800292, 0.010279680252075195, 0.010248191833496094, 0.010245984077453613, 0.010236063957214355, 0.010274944305419921, 0.010237343788146972, 0.010234335899353027, 0.010162176132202149, 0.010136735916137696, 0.010246656417846679, 0.010248703956604004, 0.01021116828918457, 0.010176671981811523, 0.01033574390411377, 0.010215583801269532, 0.010179360389709473, 0.010281056404113769, 0.010201279640197753, 0.010347583770751952, 0.010280799865722657, 0.010283935546875, 0.010182496070861816, 0.01031612777709961, 0.010299200057983399, 0.010401727676391602, 0.010463359832763672, 0.010287039756774903, 0.01033414363861084, 0.010215488433837891, 0.010233856201171876, 0.010256383895874023, 0.01022156810760498, 0.010225536346435547, 0.010163455963134766, 0.010187647819519042, 0.010221664428710937, 0.01028700828552246, 0.010168319702148437, 0.010258336067199707, 0.010304672241210937, 0.010367936134338378, 0.010425503730773927, 0.01013814353942871, 0.010153440475463868, 0.010136416435241699, 0.010536767959594727, 0.010251935958862304, 0.010240192413330079, 0.01034652805328369, 0.010172736167907715, 0.010152064323425292, 0.010232000350952149, 0.010249695777893066, 0.010168895721435547, 0.010122655868530273, 0.010213855743408204, 0.010108575820922852, 0.010270367622375489, 0.0106910400390625, 0.010254336357116698, 0.01015567970275879, 0.010233504295349121, 0.010303872108459473, 0.010195263862609863, 0.010102784156799317, 0.010065183639526368, 0.010242303848266602, 0.010696736335754395, 0.010250720024108886, 0.010140800476074218, 0.010190879821777344, 0.01025113582611084, 0.010186719894409179, 0.01028502368927002, 0.010188799858093261, 0.010350687980651856, 0.010205087661743164, 0.010404031753540039, 0.010308095932006836, 0.010306847572326661, 0.010329728126525878, 0.01027286434173584, 0.010301888465881348, 0.010232383728027344, 0.010504063606262206, 0.010283007621765136, 0.010209280014038086, 0.010210783958435058, 0.010215071678161622, 0.01021235179901123, 0.010273759841918945, 0.010269632339477539, 0.010273920059204102, 0.010168671607971191, 0.010223199844360351, 0.0101746244430542, 0.010300448417663574, 0.010164031982421875, 0.01017846393585205, 0.010184703826904297, 0.010241536140441895, 0.010211104393005372, 0.010170368194580079, 0.01021020793914795, 0.01014896011352539, 0.0102325439453125, 0.010166336059570312, 0.010298368453979492, 0.01019315242767334, 0.010319775581359863, 0.010236672401428223, 0.010229791641235352, 0.01033948802947998, 0.010191007614135742, 0.010244799613952636, 0.010149888038635254, 0.010272064208984376, 0.010386112213134766, 0.010244095802307129, 0.010295136451721192, 0.010195103645324707, 0.010262528419494628, 0.010253567695617676, 0.010191391944885254, 0.010080479621887206, 0.010295295715332031, 0.01021132755279541, 0.010205183982849121, 0.010128543853759766, 0.010091423988342285, 0.010204511642456055, 0.010105440139770508, 0.01022976016998291, 0.010094079971313476, 0.010095104217529297, 0.010123264312744141, 0.010140800476074218, 0.010208127975463867, 0.01013759994506836, 0.010162336349487305, 0.010098496437072753, 0.010019071578979492, 0.010189536094665528, 0.010170016288757324, 0.01012508773803711, 0.01018057632446289, 0.010142304420471192, 0.010171903610229491, 0.010119680404663087, 0.010202239990234375, 0.010122112274169922, 0.010217472076416016, 0.010147839546203614, 0.01017683219909668, 0.010200768470764161, 0.01012940788269043, 0.010167840003967286, 0.010077823638916015, 0.010197888374328614, 0.01006764793395996, 0.010195391654968262, 0.010046879768371582, 0.010185376167297363, 0.010019743919372558, 0.010193440437316894, 0.010094911575317382, 0.010182687759399414, 0.010100607872009277, 0.010176639556884765, 0.010057727813720703, 0.010067968368530274, 0.010152000427246094, 0.010081503868103027, 0.010097375869750977, 0.010076160430908204, 0.010166272163391114, 0.010021056175231934, 0.010026975631713867, 0.010035200119018555, 0.010030943870544434, 0.010051584243774414, 0.010128479957580566, 0.009999008178710937, 0.010035200119018555, 0.010088704109191894, 0.010061823844909668, 0.010000384330749512, 0.010088447570800782, 0.009946623802185058, 0.009978495597839355, 0.009959296226501465, 0.00993603229522705, 0.009966431617736816, 0.010037055969238281, 0.009935040473937989, 0.010016960144042969, 0.010264415740966797, 0.010243231773376466, 0.010132224082946777, 0.010137215614318847, 0.009976256370544433, 0.010003775596618652, 0.0099683198928833, 0.010024703979492188, 0.009920703887939452, 0.01017632007598877, 0.010172608375549317, 0.010046367645263671, 0.01016105556488037, 0.010126336097717285, 0.010152159690856933, 0.010113056182861329, 0.010181376457214356, 0.010064031600952148, 0.01054319953918457, 0.010229503631591797, 0.010153984069824219, 0.010106880187988282, 0.010140928268432617, 0.010197759628295899, 0.010115103721618652, 0.010420191764831542, 0.01009391975402832, 0.010021535873413086, 0.009996288299560547, 0.010012672424316407, 0.009865216255187988, 0.009971551895141602, 0.00986128044128418, 0.009871359825134277, 0.009981951713562011, 0.009971072196960449, 0.00981817626953125, 0.00990880012512207, 0.009940704345703126, 0.010350879669189452, 0.010026016235351563, 0.010005439758300781, 0.00996720027923584, 0.010030528068542481, 0.009912351608276367, 0.009987135887145997, 0.009868479728698731, 0.009982175827026367, 0.009882111549377442, 0.009928256034851075, 0.009840415954589845, 0.009926495552062988, 0.009894783973693849, 0.009910207748413086, 0.009918496131896973, 0.00987929630279541, 0.009951071739196777, 0.010118592262268066, 0.009996512413024902, 0.009849023818969727, 0.009839455604553222, 0.009998016357421875, 0.0098920316696167, 0.009940799713134765, 0.00991590404510498, 0.009977503776550293, 0.009954367637634277, 0.010176287651062012, 0.009991968154907226, 0.009883872032165527, 0.009951168060302735, 0.009834688186645507, 0.010206432342529296, 0.011384672164916992, 0.010064640045166016, 0.010162176132202149, 0.010010623931884765, 0.010147839546203614, 0.010055647850036622, 0.012711775779724122, 0.010721471786499024, 0.01017859172821045, 0.010131679534912109, 0.010133248329162597, 0.010024959564208985, 0.010058048248291016, 0.010032832145690918, 0.010233856201171876, 0.009971743583679199, 0.010052736282348634, 0.010079071998596192, 0.010045439720153808, 0.009947232246398926, 0.009958368301391601, 0.01014025592803955, 0.010124863624572754, 0.009997183799743653, 0.010035103797912597, 0.009916511535644532, 0.009982208251953124, 0.009989791870117188, 0.00996339225769043, 0.00990835189819336, 0.010012672424316407, 0.00994099235534668, 0.010016672134399414, 0.010053728103637695, 0.010077407836914062, 0.00992080020904541, 0.009988608360290528, 0.010024319648742676, 0.009950143814086914, 0.009953023910522461, 0.009973664283752442, 0.009947392463684082, 0.010131423950195312, 0.009981760025024414, 0.009979583740234376, 0.009982272148132324, 0.010049568176269532, 0.010053248405456542, 0.010092927932739257, 0.00990118408203125, 0.01004047966003418, 0.009924287796020509, 0.010164287567138672, 0.010098624229431152, 0.010061823844909668, 0.010010368347167968, 0.010152607917785645, 0.010034784317016602, 0.010106240272521973, 0.010052288055419922, 0.010084287643432617, 0.01020297622680664, 0.010199392318725586, 0.010219327926635742, 0.010113216400146485, 0.01026028823852539, 0.010114496231079101, 0.01021731185913086, 0.010112000465393066, 0.010164128303527833, 0.010079423904418945, 0.010143903732299805, 0.010097311973571778, 0.010119071960449219, 0.010109024047851562, 0.010104191780090333, 0.01022755241394043, 0.010094911575317382, 0.01021395206451416, 0.01020025634765625, 0.010207967758178711, 0.010036255836486816, 0.010058719635009766, 0.01008569622039795, 0.0101725435256958, 0.010070015907287597, 0.010111552238464355, 0.010071840286254883, 0.01015011215209961, 0.010086400032043457, 0.010100799560546875, 0.01010643196105957, 0.010074496269226074, 0.010190719604492188, 0.010063808441162109, 0.010147904396057129, 0.010197248458862305, 0.010293215751647948, 0.010188063621520997, 0.010166144371032715, 0.010056447982788087, 0.010149888038635254, 0.010069408416748048, 0.010232416152954102, 0.010071999549865722, 0.010055744171142578, 0.01017801570892334, 0.010256383895874023, 0.010096575736999512, 0.010031040191650391, 0.010121024131774903, 0.010162272453308106, 0.0100830717086792, 0.010081567764282227, 0.010149696350097655, 0.010134719848632812, 0.010184351921081542, 0.010104351997375489, 0.010333760261535645, 0.010284000396728516, 0.01149459171295166, 0.01024623966217041, 0.010195679664611816, 0.010128512382507324, 0.010132736206054687, 0.01017728042602539, 0.010358783721923828, 0.010188639640808106, 0.010213536262512207, 0.010262528419494628, 0.010323391914367677, 0.010291775703430176, 0.01009436798095703, 0.01025382423400879, 0.010156864166259766, 0.010178336143493652, 0.010553471565246582, 0.01021951961517334, 0.010233856201171876, 0.01015401554107666, 0.010373151779174805, 0.010182463645935058, 0.010177663803100586, 0.01032096004486084, 0.01023583984375, 0.010139840126037598, 0.010243904113769532, 0.010295295715332031, 0.010163264274597168, 0.010313823699951171, 0.010154144287109375, 0.010355296134948731, 0.010401247978210449, 0.010293984413146972, 0.010114591598510742, 0.010155743598937988, 0.010590880393981934, 0.010333888053894043, 0.010215744018554688, 0.010162336349487305, 0.010263808250427246, 0.010237824440002442, 0.010214367866516113, 0.010233599662780762, 0.010153056144714356, 0.010118176460266113, 0.010101728439331055, 0.010195872306823731, 0.010108927726745605, 0.010199040412902831, 0.010124480247497559, 0.010088864326477051, 0.010033568382263184, 0.010186752319335938, 0.010059776306152344, 0.010123040199279785, 0.010125599861145019, 0.010237536430358886, 0.010211008071899414, 0.010168959617614746, 0.010215456008911133, 0.010063488006591797, 0.01013929557800293, 0.009984736442565917, 0.010134880065917968, 0.01012598419189453, 0.01022976016998291, 0.010051584243774414, 0.010100735664367675, 0.010123295783996582, 0.0101112003326416, 0.010137344360351562, 0.010127360343933106, 0.010114591598510742, 0.009989760398864745, 0.010113887786865235, 0.010207232475280761, 0.010043168067932129, 0.010003775596618652, 0.01007094383239746, 0.009995648384094238, 0.010054335594177247, 0.00994700813293457, 0.010098591804504394, 0.010054847717285157, 0.010000639915466309, 0.009937664031982422, 0.010012639999389648, 0.01009663963317871, 0.010119168281555176, 0.00994819164276123, 0.010103775978088378, 0.009899104118347168, 0.009913408279418945, 0.009938240051269531, 0.010056032180786132, 0.010082624435424804, 0.010186528205871582, 0.010034463882446288, 0.00996342372894287, 0.009970591545104981, 0.010035200119018555, 0.01002905559539795, 0.010122400283813476, 0.010158944129943847, 0.010172415733337402, 0.010375328063964843, 0.010137439727783203, 0.010168319702148437, 0.01036672019958496, 0.010141951560974121, 0.010180224418640136, 0.01016256046295166, 0.010118304252624511, 0.010084608078002929, 0.010115008354187011, 0.010211584091186523, 0.010041567802429199, 0.010090304374694823, 0.010171839714050293, 0.01028940773010254, 0.010223775863647462, 0.010350591659545898, 0.010335904121398926, 0.010235967636108399, 0.010164352416992187, 0.010218239784240723, 0.010264384269714355, 0.010282303810119629, 0.010215583801269532, 0.010269151687622071, 0.010270079612731934, 0.010522975921630859, 0.010184032440185547, 0.010182815551757813, 0.010427040100097656, 0.010266304016113281, 0.01016438388824463, 0.010335904121398926, 0.010238304138183593, 0.010201087951660156, 0.010389023780822754, 0.010217503547668457, 0.010201536178588867, 0.010325759887695312, 0.010163455963134766, 0.010304703712463378, 0.010225407600402832, 0.010114463806152343, 0.01010540771484375, 0.010081536293029785, 0.010068767547607422, 0.010143391609191895, 0.010115519523620605, 0.010098464012145996, 0.010106111526489258, 0.010029088020324707, 0.010001376152038574, 0.00995132827758789, 0.00999407958984375, 0.010007840156555176, 0.010037983894348144, 0.009979231834411621, 0.010115743637084961, 0.01004371166229248, 0.0100512638092041, 0.010196991920471191, 0.010085439682006836, 0.010034111976623536, 0.010180383682250977, 0.009944671630859376, 0.009949824333190919, 0.009846783638000489, 0.00998739242553711, 0.00980611228942871, 0.00986905574798584, 0.009718655586242676, 0.009703200340270996, 0.00966659164428711, 0.009659775733947754, 0.009833056449890137, 0.00977235221862793, 0.009759424209594726, 0.009849023818969727, 0.010460160255432128, 0.009840864181518555, 0.009799648284912109, 0.009740927696228027, 0.009952511787414551, 0.009687328338623047, 0.009735712051391602, 0.009687104225158692, 0.009641983985900878, 0.009425984382629395, 0.009653247833251954, 0.009656288146972657, 0.009687007904052735, 0.009647392272949218, 0.009736672401428222, 0.00962716770172119, 0.009734880447387696, 0.009779199600219727, 0.009793087959289551, 0.009825056076049804, 0.009672351837158203, 0.00970751953125, 0.009690879821777344, 0.009954655647277832, 0.009703935623168946, 0.00968735980987549, 0.009674015998840333, 0.009597760200500488, 0.009679072380065918, 0.009625375747680664, 0.009711711883544923, 0.009730143547058106, 0.00957852840423584, 0.009678624153137206, 0.009624608039855957, 0.009718400001525879, 0.009662912368774415, 0.009652128219604492, 0.009760448455810547, 0.009625920295715332, 0.009664768218994141, 0.009583552360534668, 0.009691072463989257, 0.009643136024475097, 0.009587712287902832, 0.009706239700317382, 0.009689087867736817, 0.009643232345581055, 0.009642784118652343, 0.00961740779876709, 0.009629407882690429, 0.009593343734741211, 0.009733247756958007, 0.009576800346374512, 0.009594207763671874, 0.009663552284240722, 0.00960700798034668, 0.009672736167907715, 0.00962070369720459, 0.00967686367034912, 0.009599712371826172, 0.009654144287109374, 0.009656415939331055, 0.009730112075805665, 0.009727999687194825, 0.009826208114624023, 0.00985097599029541, 0.009768095970153809, 0.00975881576538086, 0.00977724838256836, 0.009757408142089844, 0.00976585578918457, 0.009525152206420898, 0.009715135574340821, 0.009800127983093262, 0.009809120178222656, 0.009913344383239747, 0.00984828758239746, 0.009968159675598144, 0.00990822410583496, 0.010144960403442383, 0.009895936012268066, 0.009933631896972656, 0.009959424018859863, 0.00994099235534668, 0.009942943572998048, 0.010010368347167968, 0.009859456062316894, 0.009871487617492675, 0.00978547191619873, 0.009866975784301757, 0.009818112373352051, 0.00992460823059082, 0.009899359703063965, 0.009937567710876465, 0.00995315170288086, 0.010023039817810058, 0.010072416305541992, 0.00994700813293457, 0.009946623802185058, 0.00988809585571289, 0.010032256126403809, 0.009910655975341796, 0.009983839988708497, 0.009896351814270019, 0.010025152206420898, 0.009988096237182617, 0.010053631782531738, 0.010053631782531738, 0.010058815956115724, 0.010246912002563476, 0.01037228775024414, 0.01125228786468506, 0.010496064186096192, 0.010297216415405274, 0.010263039588928222, 0.010174495697021484, 0.01059334373474121, 0.010128191947937012, 0.010286335945129394, 0.010232095718383788, 0.01018723201751709, 0.010217375755310058, 0.010158047676086426, 0.010213600158691407, 0.010135552406311036, 0.010125120162963868, 0.010116543769836426, 0.01017852783203125, 0.010103584289550781, 0.01033011245727539, 0.010291199684143066, 0.010151935577392577, 0.010196991920471191, 0.01011747169494629, 0.010339903831481933, 0.010211104393005372, 0.010283679962158203, 0.010418111801147462, 0.010346240043640136, 0.010840383529663086, 0.011880160331726074, 0.01080992031097412, 0.010446751594543457, 0.010285120010375976, 0.0101844482421875, 0.010128543853759766, 0.01012822437286377, 0.010151647567749023, 0.010045727729797364, 0.010209183692932128, 0.010073984146118164, 0.01003651237487793, 0.010002880096435547, 0.01009718418121338, 0.010041664123535157, 0.010109919548034668, 0.010189279556274414, 0.01012758445739746, 0.01013759994506836, 0.010061887741088868, 0.010059103965759278, 0.010198911666870118, 0.010123040199279785, 0.010054368019104004, 0.00986736011505127, 0.009725600242614747, 0.009703328132629394, 0.009627327919006348, 0.009642271995544434, 0.009617024421691895, 0.011385824203491211, 0.011654879570007324, 0.009765151977539063, 0.009885151863098145, 0.010060640335083007, 0.010262399673461915, 0.010118975639343261, 0.00996275234222412, 0.009928511619567872, 0.00983340835571289, 0.009812159538269043, 0.00977023983001709, 0.010234239578247071, 0.009769215583801269, 0.009776384353637695, 0.00974067211151123, 0.009826784133911132, 0.009737215995788574, 0.009737055778503418, 0.009721471786499023, 0.009720191955566407, 0.009811967849731444, 0.00979152011871338, 0.009782400131225586, 0.009665472030639648, 0.009786304473876952, 0.009743328094482423]",tokens/s,98.95139271712887,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 241654 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 500.12 MiB is free. Process 213938 has 14.25 GiB memory in use. Of the allocated memory 14.13 GiB is allocated by PyTorch, and 8.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 312.12 MiB is free. Process 216922 has 14.43 GiB memory in use. Of the allocated memory 14.31 GiB is allocated by PyTorch, and 12.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 634, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 402, in __init__ super().__init__(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 102, in __init__ self.query_key_value = nn.Linear(config.hidden_size, 3 * config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 218409 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 241231 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,888.868864,725.48352,0.0,322.961408,314.743808,s,1,7.14170947265625,7.14170947265625,0.0,7.14170947265625,7.14170947265625,7.14170947265625,7.14170947265625,[7.14170947265625],,kWh,3.99466019165023e-06,4.314964665036102e-07,8.977784960034763e-07,5.323935154157317e-06,,MB,1198.518272,817.758208,0.0,404.750336,391.119872,s,34,0.24065177726745604,0.007077993449042826,0.00017591592288654636,0.00701904010772705,0.0071675007343292236,0.0073290304660797116,0.007780234627723694,"[0.007172031879425049, 0.007076640129089356, 0.00697705602645874, 0.007911168098449706, 0.0070193281173706054, 0.006993408203125, 0.0069879360198974606, 0.007123775959014892, 0.006994304180145264, 0.007008224010467529, 0.006986336231231689, 0.006992544174194336, 0.0070702719688415525, 0.007010335922241211, 0.007018752098083496, 0.007014592170715332, 0.006993343830108642, 0.0070013442039489745, 0.006968255996704102, 0.007007455825805664, 0.007514400005340576, 0.00709878396987915, 0.007008224010467529, 0.0071569280624389645, 0.007068672180175781, 0.007035007953643799, 0.007036159992218018, 0.0069757761955261234, 0.007017600059509277, 0.0070380158424377445, 0.007074944019317627, 0.007019519805908203, 0.0070514240264892575, 0.0072292160987854]",tokens/s,36168.44263039259,kWh,2.220097898320257e-07,2.4483890918557158e-08,1.464365345551474e-07,3.929302153057303e-07,tokens/kWh,651515180.1212642,MB,1210.351616,832.438272,0.0,419.4304,391.122432,s,34,9.94111947631836,0.29238586695054,0.0028572064022739673,0.291315444946289,0.29513064270019534,0.2982619689941406,0.301018515625,"[0.30074215698242185, 0.29060140991210937, 0.29516311645507814, 0.2911128234863281, 0.291220947265625, 0.2906497497558594, 0.29374453735351563, 0.28955538940429687, 0.2890251770019531, 0.2943091735839844, 0.2914686279296875, 0.2902822570800781, 0.29140994262695313, 0.29434454345703126, 0.290657958984375, 0.28978897094726563, 0.2941588439941406, 0.2908852233886719, 0.2902734069824219, 0.2948214721679687, 0.29047268676757815, 0.29393109130859374, 0.2916687927246094, 0.3011546325683594, 0.2915433349609375, 0.29082083129882813, 0.29505487060546876, 0.2901121520996094, 0.29144561767578125, 0.2910323486328125, 0.29692648315429687, 0.2919396057128906, 0.29062445068359377, 0.29017684936523436]",tokens/s,215.4686909359305,kWh,8.337983727669399e-06,9.195365076139336e-07,3.5193521123588147e-06,1.2776872347642149e-05,tokens/kWh,4930784.176741506,,s,2142,9.926336992263781,0.004634144254091407,0.0001979790305642611,0.004602335929870605,0.004673680019378661,0.004729827165603637,0.00592665204048157,"[0.004432799816131591, 0.004697887897491455, 0.004743231773376465, 0.004728672027587891, 0.004911424160003662, 0.004700352191925048, 0.004695871829986572, 0.004695551872253418, 0.0049238080978393555, 0.00467903995513916, 0.004659264087677002, 0.004649824142456055, 0.00470201587677002, 0.004663296222686767, 0.0046711678504943845, 0.004639039993286133, 0.004622335910797119, 0.004613696098327636, 0.004638336181640625, 0.004608831882476807, 0.004654304027557373, 0.004668191909790039, 0.004631968021392823, 0.004629087924957275, 0.0046059517860412595, 0.004783967971801758, 0.004612512111663818, 0.004628575801849365, 0.004625919818878174, 0.004608160018920899, 0.00461568021774292, 0.004617919921875, 0.004647744178771973, 0.004636672019958496, 0.004612095832824707, 0.004614143848419189, 0.004700160026550293, 0.0051586880683898925, 0.0061231679916381836, 0.006171072006225586, 0.006178048133850098, 0.00630185604095459, 0.004756031990051269, 0.004638304233551025, 0.0046392960548400876, 0.004650911808013916, 0.00473526382446289, 0.0046815361976623535, 0.004630527973175049, 0.004632575988769531, 0.0046237120628356936, 0.004643136024475097, 0.0046525120735168455, 0.004752255916595459, 0.004636672019958496, 0.004630527973175049, 0.004630080223083496, 0.004634624004364014, 0.00462227201461792, 0.004649472236633301, 0.004721888065338135, 0.004617216110229493, 0.004631872177124023, 0.0043320322036743165, 0.004604000091552735, 0.004647071838378906, 0.004624576091766358, 0.00461740779876709, 0.00458515214920044, 0.004592383861541748, 0.0045948801040649415, 0.004600671768188477, 0.004583327770233154, 0.004610144138336182, 0.004603903770446777, 0.004585504055023193, 0.004585663795471192, 0.004602687835693359, 0.004575551986694336, 0.004596384048461914, 0.004611519813537598, 0.004573760032653809, 0.004568672180175782, 0.004575647830963135, 0.004593472003936768, 0.004589280128479004, 0.004575712203979492, 0.0047610878944396974, 0.004848127841949463, 0.004607999801635742, 0.004615520000457764, 0.004591455936431885, 0.0046211199760437015, 0.004644800186157227, 0.004591616153717041, 0.004616256237030029, 0.0046079039573669435, 0.0045857601165771485, 0.004581183910369873, 0.004636799812316895, 0.004611968040466308, 0.004639840126037598, 0.004600607872009278, 0.004607359886169434, 0.004588287830352783, 0.004613759994506836, 0.004675104141235352, 0.004580192089080811, 0.004583136081695556, 0.004585663795471192, 0.004589663982391357, 0.004581376075744629, 0.004589280128479004, 0.0046144318580627446, 0.0045957121849060055, 0.004675360202789307, 0.004579552173614502, 0.004673535823822021, 0.00459113597869873, 0.004599743843078613, 0.004606048107147217, 0.004666848182678223, 0.004575295925140381, 0.00458844804763794, 0.004583456039428711, 0.004616159915924073, 0.004348896026611328, 0.004629568099975586, 0.004604800224304199, 0.004618495941162109, 0.004598720073699951, 0.004589471817016601, 0.004582047939300537, 0.004595967769622802, 0.004615647792816162, 0.004620831966400146, 0.004624383926391602, 0.00459980821609497, 0.0045957121849060055, 0.0046031041145324705, 0.004618175983428955, 0.004620384216308593, 0.004588287830352783, 0.0045875201225280765, 0.00463212776184082, 0.004604351997375488, 0.004632736206054687, 0.004624224185943603, 0.004639904022216797, 0.00462227201461792, 0.004606688022613525, 0.004597631931304932, 0.00458784008026123, 0.004631807804107666, 0.00461900806427002, 0.004597760200500488, 0.00457260799407959, 0.004624959945678711, 0.004702208042144776, 0.004632736206054687, 0.0046365118026733395, 0.004610047817230224, 0.004601856231689453, 0.004762911796569824, 0.004589312076568604, 0.004858240127563477, 0.004602464199066162, 0.00552675199508667, 0.005986847877502441, 0.006224127769470214, 0.00464243221282959, 0.004632959842681885, 0.0046449599266052246, 0.004761568069458008, 0.004620128154754639, 0.004622719764709473, 0.004597472190856934, 0.004576320171356201, 0.004600768089294434, 0.004607999801635742, 0.004579328060150147, 0.0045875201225280765, 0.004589759826660157, 0.004574143886566162, 0.0047497920989990235, 0.004612512111663818, 0.004589568138122559, 0.004578432083129883, 0.004598656177520752, 0.004333407878875733, 0.004609695911407471, 0.004632927894592285, 0.004734911918640137, 0.004614367961883545, 0.004610047817230224, 0.004610047817230224, 0.004612095832824707, 0.004625472068786621, 0.004629439830780029, 0.004612127780914307, 0.0045977277755737305, 0.004597311973571778, 0.004610496044158936, 0.004609407901763916, 0.004629119873046875, 0.004625631809234619, 0.004630943775177002, 0.004596096038818359, 0.004583424091339112, 0.004595424175262451, 0.004598048210144043, 0.004611711978912353, 0.004606560230255127, 0.004597536087036133, 0.0045957121849060055, 0.004593664169311523, 0.004581056118011475, 0.0046079039573669435, 0.004644608020782471, 0.004612768173217773, 0.004616352081298828, 0.0046221761703491215, 0.004622335910797119, 0.00459980821609497, 0.004607999801635742, 0.004661248207092285, 0.004593376159667969, 0.00460214376449585, 0.005117951869964599, 0.004627999782562256, 0.004653279781341553, 0.0046039681434631346, 0.0046236801147460935, 0.004599711894989014, 0.004581855773925781, 0.004581888198852539, 0.0046113600730896, 0.00461897611618042, 0.004589536190032959, 0.004587423801422119, 0.004581727981567383, 0.004602719783782959, 0.004612160205841064, 0.004631680011749267, 0.004599552154541015, 0.004597152233123779, 0.004591775894165039, 0.004602015972137451, 0.004589280128479004, 0.004613952159881592, 0.004597631931304932, 0.0046003518104553226, 0.004339136123657226, 0.004619840145111084, 0.0046080641746520995, 0.004694816112518311, 0.004602015972137451, 0.004591616153717041, 0.004619520187377929, 0.00459987211227417, 0.00462713623046875, 0.004616511821746826, 0.004678688049316406, 0.004607776165008545, 0.0046275839805603024, 0.004615647792816162, 0.00458735990524292, 0.004577087879180908, 0.004579135894775391, 0.004596735954284668, 0.004605760097503662, 0.004614143848419189, 0.004769440174102783, 0.004612448215484619, 0.004591616153717041, 0.004593535900115967, 0.004659327983856201, 0.004627488136291504, 0.004574175834655762, 0.00459980821609497, 0.004596992015838623, 0.0046039681434631346, 0.004599967956542969, 0.004610559940338135, 0.004599232196807862, 0.004604512214660644, 0.004597760200500488, 0.004611584186553955, 0.0045790719985961915, 0.0046169919967651366, 0.0046254081726074215, 0.0045946559906005855, 0.004597599983215332, 0.004593088150024414, 0.004590047836303711, 0.004611455917358399, 0.00462332820892334, 0.004619328022003174, 0.004778848171234131, 0.0047636480331420894, 0.004598944187164306, 0.004609951972961426, 0.004631423950195312, 0.0046408319473266605, 0.004627552032470703, 0.004618591785430908, 0.0046310720443725585, 0.004593440055847168, 0.004612095832824707, 0.004619999885559082, 0.004634463787078857, 0.0046373758316040035, 0.004624383926391602, 0.004603295803070068, 0.004602464199066162, 0.004415296077728272, 0.004634431838989258, 0.0046200637817382815, 0.004593632221221924, 0.004577951908111572, 0.004599584102630615, 0.004607999801635742, 0.004576543807983399, 0.004606688022613525, 0.00470963191986084, 0.004608704090118408, 0.004584671974182129, 0.004662399768829346, 0.004587232112884521, 0.004593664169311523, 0.004603839874267578, 0.004589632034301758, 0.004577504158020019, 0.004597536087036133, 0.004575232028961182, 0.004591616153717041, 0.004603903770446777, 0.004633696079254151, 0.004586400032043457, 0.004581376075744629, 0.004585472106933594, 0.004595680236816406, 0.004595392227172852, 0.00462063980102539, 0.004607999801635742, 0.004602880001068115, 0.004570112228393554, 0.004569087982177734, 0.004589600086212158, 0.004609439849853516, 0.00462614393234253, 0.0045924801826477055, 0.004612095832824707, 0.00456876802444458, 0.0045898880958557126, 0.004591616153717041, 0.004663296222686767, 0.004724287986755371, 0.004598207950592041, 0.004595392227172852, 0.004597536087036133, 0.0046284799575805665, 0.004607647895812988, 0.004651008129119873, 0.00458841609954834, 0.004616191864013672, 0.004564000129699707, 0.004577568054199219, 0.004582079887390136, 0.0047002239227294925, 0.0046560959815979, 0.004621535778045654, 0.004614975929260254, 0.0046089601516723635, 0.004579296112060547, 0.004613823890686035, 0.004757984161376953, 0.004628320217132568, 0.004335616111755371, 0.0046014080047607426, 0.004788671970367432, 0.004610047817230224, 0.004593664169311523, 0.004835328102111816, 0.0067420477867126466, 0.0059350719451904295, 0.004876287937164306, 0.004623616218566894, 0.004623104095458985, 0.004634367942810059, 0.004632832050323487, 0.004603807926177979, 0.004586976051330566, 0.004618336200714111, 0.004587264060974121, 0.004578080177307129, 0.00455679988861084, 0.004589663982391357, 0.004577184200286865, 0.00457263994216919, 0.004639264106750488, 0.004603903770446777, 0.0045866241455078125, 0.0047021441459655765, 0.004596672058105469, 0.004557983875274658, 0.004581823825836182, 0.0045756158828735355, 0.004583456039428711, 0.004575039863586426, 0.004612127780914307, 0.004573184013366699, 0.004585279941558838, 0.004578720092773438, 0.004580287933349609, 0.0045775361061096195, 0.004634560108184815, 0.004606080055236816, 0.0045866241455078125, 0.004573919773101807, 0.004579103946685791, 0.004576863765716553, 0.004579616069793701, 0.00456928014755249, 0.004605663776397705, 0.0045920958518981935, 0.0045895037651062015, 0.00458739185333252, 0.004587200164794922, 0.004577600002288819, 0.0045911998748779295, 0.00459606409072876, 0.004587584018707276, 0.0045875201225280765, 0.004579328060150147, 0.004578464031219483, 0.004582240104675293, 0.004607168197631836, 0.004612927913665772, 0.004568319797515869, 0.004571968078613281, 0.004300320148468017, 0.004614783763885498, 0.004582880020141601, 0.004573728084564209, 0.004593664169311523, 0.004577280044555664, 0.004583775997161865, 0.00457692813873291, 0.0045875201225280765, 0.004573184013366699, 0.004583360195159912, 0.00459769582748413, 0.0045896959304809574, 0.004572671890258789, 0.004575168132781983, 0.004575808048248291, 0.004584832191467285, 0.004591487884521485, 0.004626368045806885, 0.004573631763458252, 0.0045940160751342775, 0.004585504055023193, 0.00459769582748413, 0.004585536003112793, 0.004599584102630615, 0.004604127883911133, 0.004663296222686767, 0.0046284799575805665, 0.004597760200500488, 0.004589568138122559, 0.0046626877784729, 0.004606239795684815, 0.004589600086212158, 0.004661375999450683, 0.004591775894165039, 0.004597760200500488, 0.004599967956542969, 0.004620128154754639, 0.004614143848419189, 0.004591519832611084, 0.0045994558334350585, 0.004587808132171631, 0.004587456226348877, 0.004571360111236572, 0.004600895881652832, 0.00460588788986206, 0.004593760013580322, 0.004577600002288819, 0.00462227201461792, 0.004592160224914551, 0.004560800075531006, 0.004602079868316651, 0.0045847039222717285, 0.0045718722343444824, 0.0045773119926452634, 0.004581376075744629, 0.004589568138122559, 0.004591616153717041, 0.004593664169311523, 0.0045875201225280765, 0.004589568138122559, 0.004572224140167236, 0.004580160140991211, 0.004321824073791504, 0.004561984062194825, 0.004558815956115723, 0.004567071914672851, 0.004689919948577881, 0.004643519878387451, 0.004567039966583252, 0.004646912097930909, 0.004552864074707032, 0.0045890240669250485, 0.004600192070007324, 0.004562143802642822, 0.0045668802261352535, 0.0045632638931274415, 0.004573599815368652, 0.004632800102233887, 0.004577280044555664, 0.004607999801635742, 0.004596928119659424, 0.004573984146118164, 0.004601247787475586, 0.004571775913238526, 0.0045875201225280765, 0.004620351791381836, 0.004601471900939942, 0.004569407939910889, 0.004605984210968018, 0.004577248096466064, 0.004555071830749512, 0.004557759761810303, 0.0045903358459472654, 0.004589568138122559, 0.004579328060150147, 0.004646592140197754, 0.004570464134216309, 0.004596704006195068, 0.004556447982788086, 0.004606080055236816, 0.004587744235992432, 0.004574719905853271, 0.004571040153503418, 0.004588128089904785, 0.004577280044555664, 0.004601952075958252, 0.004568863868713379, 0.004575168132781983, 0.0045651841163635255, 0.004582752227783203, 0.004607647895812988, 0.004582655906677246, 0.004578527927398682, 0.004587264060974121, 0.004571936130523682, 0.004570943832397461, 0.0045766720771789554, 0.004583680152893067, 0.004602399826049805, 0.00455679988861084, 0.004599071979522705, 0.004561183929443359, 0.004576799869537354, 0.004565375804901123, 0.004553247928619384, 0.004316959857940674, 0.004567967891693115, 0.004577087879180908, 0.004595039844512939, 0.004604767799377441, 0.004581376075744629, 0.004599679946899414, 0.004589183807373047, 0.004575744152069092, 0.004586751937866211, 0.00463318395614624, 0.004583583831787109, 0.004618112087249756, 0.004685567855834961, 0.004594048023223877, 0.004600031852722168, 0.004665472030639648, 0.004607647895812988, 0.004567039966583252, 0.004577184200286865, 0.004796512126922607, 0.0046059517860412595, 0.004614143848419189, 0.004624383926391602, 0.004601215839385987, 0.00458406400680542, 0.004580959796905517, 0.0045807681083679195, 0.004587776184082031, 0.004580095767974853, 0.004607999801635742, 0.004581376075744629, 0.004747263908386231, 0.004595424175262451, 0.004606239795684815, 0.00467964792251587, 0.006889503955841064, 0.005631968021392823, 0.005195807933807373, 0.004624224185943603, 0.004679840087890625, 0.004620287895202637, 0.004632575988769531, 0.0046059517860412595, 0.004581376075744629, 0.004617599964141845, 0.004587903976440429, 0.004591455936431885, 0.004780447959899902, 0.004614048004150391, 0.004587584018707276, 0.004586559772491455, 0.004576223850250244, 0.004568096160888672, 0.004561888217926025, 0.004580512046813965, 0.0046068158149719235, 0.004577184200286865, 0.004568543910980225, 0.0045862398147583005, 0.004564864158630371, 0.0045565757751464845, 0.004586048126220703, 0.004300191879272461, 0.004604735851287842, 0.004571231842041015, 0.0045847039222717285, 0.004578048229217529, 0.004574592113494873, 0.004581120014190674, 0.004600768089294434, 0.004626368045806885, 0.004585055828094482, 0.004589983940124512, 0.0045875201225280765, 0.004586527824401856, 0.004577824115753174, 0.004581823825836182, 0.004593664169311523, 0.004572896003723145, 0.004559232234954834, 0.004565983772277832, 0.004576191902160644, 0.004741119861602783, 0.004626207828521729, 0.004620512008666992, 0.004680831909179687, 0.004604991912841797, 0.004564191818237305, 0.004564671993255615, 0.0045989761352539064, 0.004619999885559082, 0.00465715217590332, 0.004603136062622071, 0.004578048229217529, 0.004563039779663086, 0.004574111938476563, 0.0046209278106689455, 0.0046022400856018065, 0.004573184013366699, 0.004579328060150147, 0.004591648101806641, 0.00460697603225708, 0.005997151851654053, 0.004647295951843261, 0.004600927829742431, 0.004577824115753174, 0.004600160121917725, 0.004706175804138184, 0.004618495941162109, 0.004609951972961426, 0.004685791969299316, 0.0046100797653198244, 0.0045892162322998045, 0.004589471817016601, 0.004608128070831299, 0.004628543853759766, 0.004581632137298584, 0.004681727886199951, 0.004570752143859863, 0.004579648017883301, 0.004615295886993409, 0.004617055892944336, 0.0045931200981140135, 0.004579967975616455, 0.004583424091339112, 0.00457862377166748, 0.004611072063446045, 0.004597760200500488, 0.004601151943206787, 0.004571839809417725, 0.004567039966583252, 0.004572319984436035, 0.004565567970275879, 0.004567327976226806, 0.004587615966796875, 0.004616223812103272, 0.004675615787506104, 0.004601823806762695, 0.004605631828308105, 0.004583615779876709, 0.0045938239097595215, 0.004622240066528321, 0.004612031936645508, 0.004673535823822021, 0.0045957121849060055, 0.004597760200500488, 0.004591616153717041, 0.0045875201225280765, 0.004614143848419189, 0.004576288223266602, 0.004590112209320069, 0.004581823825836182, 0.004796095848083496, 0.004573503971099854, 0.004616191864013672, 0.004599232196807862, 0.00463046407699585, 0.004639359951019287, 0.004582464218139648, 0.004561855792999268, 0.004589663982391357, 0.004593567848205566, 0.004601856231689453, 0.004596992015838623, 0.004610911846160888, 0.004603360176086426, 0.004609888076782227, 0.004624447822570801, 0.004610559940338135, 0.004595903873443603, 0.004580832004547119, 0.004591328144073486, 0.00456879997253418, 0.004567999839782715, 0.004597343921661377, 0.00464899206161499, 0.004594048023223877, 0.00459980821609497, 0.004583424091339112, 0.0045649919509887695, 0.004687295913696289, 0.004605504035949707, 0.004569183826446533, 0.004582399845123291, 0.004568736076354981, 0.0045689277648925785, 0.004581791877746582, 0.004576576232910156, 0.004325439929962158, 0.004607615947723388, 0.004591519832611084, 0.004568064212799072, 0.004599520206451416, 0.004571135997772217, 0.004571135997772217, 0.004567039966583252, 0.004600992202758789, 0.004594143867492676, 0.004596096038818359, 0.004601856231689453, 0.0045931520462036135, 0.004585984230041504, 0.004599552154541015, 0.0046202239990234375, 0.004599552154541015, 0.004606527805328369, 0.004575232028961182, 0.0045649919509887695, 0.004590623855590821, 0.004635615825653076, 0.004622335910797119, 0.004607999801635742, 0.004626336097717285, 0.0046152639389038085, 0.0046020479202270505, 0.004612319946289063, 0.004647520065307617, 0.0045957121849060055, 0.0046059517860412595, 0.004710080146789551, 0.004718912124633789, 0.0048718080520629885, 0.004700543880462646, 0.004702271938323975, 0.004615776062011719, 0.004606304168701172, 0.0045808000564575195, 0.004601952075958252, 0.0047231678962707515, 0.004612095832824707, 0.004611648082733154, 0.004846015930175781, 0.004640768051147461, 0.004597568035125733, 0.004623839855194092, 0.0046089601516723635, 0.004600992202758789, 0.004601888179779053, 0.004614751815795898, 0.00459980821609497, 0.0046319360733032226, 0.0046106882095336916, 0.004613279819488525, 0.004602719783782959, 0.00459980821609497, 0.004607999801635742, 0.004589824199676513, 0.004632319927215576, 0.004603456020355225, 0.004792543888092041, 0.004608448028564453, 0.0063836159706115725, 0.004902175903320313, 0.0061708159446716305, 0.004673823833465576, 0.0046410241127014164, 0.0046119999885559084, 0.00471020793914795, 0.004619999885559082, 0.004623136043548584, 0.0045872960090637205, 0.004616191864013672, 0.004590879917144775, 0.004579999923706055, 0.004596896171569824, 0.004579423904418945, 0.004597663879394531, 0.004603839874267578, 0.004608992099761963, 0.0045895037651062015, 0.0045931200981140135, 0.0045977277755737305, 0.004592031955718994, 0.004579552173614502, 0.004631743907928467, 0.004623424053192138, 0.004596640110015869, 0.004598336219787598, 0.0046039681434631346, 0.0045734081268310545, 0.00459980821609497, 0.004605023860931396, 0.004590271949768066, 0.004622560024261474, 0.004615359783172608, 0.004577824115753174, 0.004639008045196533, 0.004605184078216553, 0.004616352081298828, 0.004590400218963623, 0.004605728149414063, 0.004591839790344238, 0.004630303859710693, 0.0045808000564575195, 0.004581952095031738, 0.004624383926391602, 0.004590720176696777, 0.004587423801422119, 0.004684768199920654, 0.004601856231689453, 0.00459171199798584, 0.004622047901153565, 0.004595903873443603, 0.004598944187164306, 0.0046068158149719235, 0.004593664169311523, 0.004593664169311523, 0.004620128154754639, 0.004612256050109864, 0.00460422420501709, 0.004591296195983887, 0.004614143848419189, 0.004607359886169434, 0.0046080322265625, 0.0043350720405578615, 0.0046044478416442875, 0.004618239879608154, 0.0045888319015502926, 0.004580063819885254, 0.004591616153717041, 0.004672575950622559, 0.004584224224090576, 0.004617695808410645, 0.004573887825012207, 0.004581696033477784, 0.004574431896209717, 0.004650688171386719, 0.004684095859527588, 0.004604383945465088, 0.004601856231689453, 0.004581376075744629, 0.004581376075744629, 0.004599199771881103, 0.004596127986907959, 0.004635072231292724, 0.004589087963104248, 0.004591648101806641, 0.004554143905639648, 0.004569056034088134, 0.004562943935394287, 0.004716576099395752, 0.004596511840820313, 0.0046284480094909665, 0.004599040031433106, 0.004600607872009278, 0.004599520206451416, 0.004583712100982666, 0.0045895037651062015, 0.004620384216308593, 0.004642784118652344, 0.004583680152893067, 0.0048423042297363285, 0.004576223850250244, 0.004595456123352051, 0.004588895797729492, 0.004617087841033936, 0.004593664169311523, 0.004605343818664551, 0.004575647830963135, 0.004633823871612549, 0.004651999950408935, 0.004650559902191162, 0.00465334415435791, 0.004814271926879883, 0.004593664169311523, 0.004586207866668701, 0.004585472106933594, 0.004571167945861816, 0.004595615863800049, 0.00465718412399292, 0.004579360008239746, 0.004619520187377929, 0.0045862398147583005, 0.0045875201225280765, 0.004596767902374267, 0.004602880001068115, 0.004583680152893067, 0.004337247848510742, 0.004600255966186523, 0.0046059517860412595, 0.004590752124786377, 0.004594528198242188, 0.0046035838127136235, 0.0045939841270446775, 0.004614143848419189, 0.0045957121849060055, 0.00458735990524292, 0.00460211181640625, 0.004589471817016601, 0.004627776145935058, 0.004692671775817871, 0.004590591907501221, 0.004574431896209717, 0.004601632118225098, 0.004595263957977295, 0.004598207950592041, 0.004593215942382813, 0.004587999820709229, 0.004581344127655029, 0.004605504035949707, 0.004585919857025146, 0.0045957121849060055, 0.004616447925567627, 0.004597119808197022, 0.004587903976440429, 0.004586719989776612, 0.004577856063842773, 0.00458351993560791, 0.0045929279327392575, 0.00464572811126709, 0.004607423782348633, 0.004597343921661377, 0.0045864639282226565, 0.004605855941772461, 0.0045948481559753415, 0.004598720073699951, 0.004589856147766113, 0.004578303813934326, 0.004573503971099854, 0.004586944103240967, 0.0045760002136230465, 0.0045689277648925785, 0.004673696041107177, 0.004628704071044922, 0.004579455852508545, 0.004588768005371093, 0.004582240104675293, 0.004573247909545899, 0.004593183994293213, 0.004632480144500732, 0.004583072185516357, 0.004586143970489502, 0.004576608180999756, 0.004626368045806885, 0.004670176029205322, 0.004577248096466064, 0.004605823993682861, 0.004577439785003662, 0.004551680088043213, 0.0045660161972045895, 0.004331520080566406, 0.004833055973052979, 0.004565120220184326, 0.004585472106933594, 0.00459171199798584, 0.004573184013366699, 0.004581376075744629, 0.004579328060150147, 0.004583424091339112, 0.004570496082305908, 0.004573472023010254, 0.004585824012756348, 0.00456822395324707, 0.004570144176483154, 0.004568384170532226, 0.004581888198852539, 0.00455679988861084, 0.004597343921661377, 0.0046139202117919925, 0.004687744140625, 0.004614208221435547, 0.004614272117614746, 0.004608575820922852, 0.004584671974182129, 0.0046436161994934085, 0.0045875201225280765, 0.004753407955169678, 0.004636288166046143, 0.004600192070007324, 0.004765696048736572, 0.006977536201477051, 0.005701632022857666, 0.0050032639503479, 0.0046059517860412595, 0.004679743766784668, 0.004621600151062012, 0.004613823890686035, 0.004639872074127197, 0.004584383964538574, 0.00458844804763794, 0.004602975845336914, 0.0045883522033691405, 0.0045939841270446775, 0.0046072320938110355, 0.00459552001953125, 0.004585279941558838, 0.004582304000854492, 0.004586559772491455, 0.004581952095031738, 0.004573567867279053, 0.00462227201461792, 0.004582752227783203, 0.004594399929046631, 0.0045732159614562985, 0.0045806078910827636, 0.004573919773101807, 0.004572703838348389, 0.004620160102844238, 0.004586080074310303, 0.004573184013366699, 0.004601856231689453, 0.004575232028961182, 0.004622335910797119, 0.004567039966583252, 0.004597760200500488, 0.004626368045806885, 0.004579391956329346, 0.004584479808807373, 0.004631135940551758, 0.004601920127868652, 0.00468617582321167, 0.0046368961334228515, 0.0046210880279541015, 0.004604896068572998, 0.004596799850463867, 0.004604383945465088, 0.0046079039573669435, 0.004630239963531494, 0.004623263835906982, 0.004607295989990235, 0.004594304084777832, 0.004607583999633789, 0.004592031955718994, 0.004582560062408448, 0.004628704071044922, 0.0045881600379943845, 0.004578432083129883, 0.004602752208709717, 0.004591616153717041, 0.004592991828918457, 0.004610432147979736, 0.004626560211181641, 0.004675615787506104, 0.0046096000671386715, 0.00461023998260498, 0.004608160018920899, 0.004612319946289063, 0.004622335910797119, 0.004613311767578125, 0.004674304008483887, 0.004606016159057617, 0.00458739185333252, 0.004604032039642334, 0.0046015682220458985, 0.0046284480094909665, 0.004581696033477784, 0.004591616153717041, 0.004593760013580322, 0.0048249921798706056, 0.004588672161102295, 0.004617087841033936, 0.004591616153717041, 0.004597760200500488, 0.004593664169311523, 0.004589151859283447, 0.004585216045379639, 0.004606624126434326, 0.004611135959625244, 0.0045779838562011715, 0.0045775361061096195, 0.004598911762237549, 0.004584224224090576, 0.004585087776184082, 0.004700640201568604, 0.00463647985458374, 0.004573184013366699, 0.004339712142944336, 0.004575232028961182, 0.004585472106933594, 0.0045875201225280765, 0.004567039966583252, 0.004599584102630615, 0.004607999801635742, 0.004688096046447754, 0.0046590080261230465, 0.004568416118621826, 0.004567903995513916, 0.004579455852508545, 0.004598656177520752, 0.004596735954284668, 0.0045649919509887695, 0.004661248207092285, 0.0045853757858276365, 0.004583424091339112, 0.004581471920013428, 0.0045875201225280765, 0.0045855998992919925, 0.004589375972747803, 0.004579391956329346, 0.004589568138122559, 0.0045957121849060055, 0.004614143848419189, 0.004640768051147461, 0.004693120002746582, 0.004622879981994629, 0.00460425615310669, 0.004590784072875977, 0.004620480060577393, 0.004645503997802734, 0.004605375766754151, 0.004605567932128906, 0.0047010879516601566, 0.0046098241806030275, 0.004582047939300537, 0.0046208958625793455, 0.004594592094421387, 0.004581471920013428, 0.004589407920837402, 0.004591487884521485, 0.004577568054199219, 0.0045762557983398434, 0.004616896152496338, 0.004583072185516357, 0.004585919857025146, 0.0046564159393310545, 0.004689023971557617, 0.004589375972747803, 0.004616064071655273, 0.004608223915100098, 0.004570911884307862, 0.0045855679512023925, 0.004579360008239746, 0.004584640026092529, 0.004577824115753174, 0.004602464199066162, 0.004716224193572998, 0.004626431941986084, 0.0045875201225280765, 0.004588640213012695, 0.004315008163452148, 0.004617695808410645, 0.004582367897033692, 0.004605440139770508, 0.00458512020111084, 0.004573823928833008, 0.004634528160095215, 0.004579328060150147, 0.004582431793212891, 0.004594304084777832, 0.004625760078430176, 0.004592639923095703, 0.004578656196594239, 0.004579999923706055, 0.004560895919799805, 0.004663296222686767, 0.0045957121849060055, 0.004616191864013672, 0.004585472106933594, 0.004580416202545166, 0.004582335948944092, 0.004589568138122559, 0.004567039966583252, 0.0046217918395996095, 0.004774432182312011, 0.0046059517860412595, 0.004583775997161865, 0.004597087860107422, 0.0045838398933410645, 0.004593056201934814, 0.004602367877960205, 0.004579423904418945, 0.004576704025268555, 0.004579808235168457, 0.004583424091339112, 0.0045866241455078125, 0.0045740799903869625, 0.004680736064910889, 0.004602303981781006, 0.004575776100158691, 0.004585472106933594, 0.004566656112670898, 0.004844192028045654, 0.004613120079040528, 0.004580063819885254, 0.004595168113708496, 0.004581920146942139, 0.004599775791168213, 0.004585087776184082, 0.00460649585723877, 0.0046217598915100095, 0.00467958402633667, 0.0046039681434631346, 0.004590047836303711, 0.004583680152893067, 0.0047707200050354, 0.004643360137939453, 0.004601344108581543, 0.00459769582748413, 0.006679423809051514, 0.005082816123962402, 0.006442592144012451, 0.004638815879821777, 0.004675583839416504, 0.004700128078460693, 0.004628511905670166, 0.004622335910797119, 0.004624383926391602, 0.0046284480094909665, 0.0046059517860412595, 0.004618271827697754, 0.004639840126037598, 0.004613311767578125, 0.004584767818450928, 0.004597631931304932, 0.004608640193939209, 0.004600959777832031, 0.00459446382522583, 0.00459980821609497, 0.004616096019744873, 0.004567232131958008, 0.004624288082122802, 0.004603487968444824, 0.004589439868927002, 0.004588064193725586, 0.004585311889648437, 0.004591775894165039, 0.004599743843078613, 0.004662623882293701, 0.0045937919616699215, 0.00460041618347168, 0.004581376075744629, 0.004591616153717041, 0.0045649919509887695, 0.004683775901794434, 0.004587039947509765, 0.004583903789520264, 0.004585472106933594, 0.004603903770446777, 0.004580416202545166, 0.004587903976440429, 0.00460591983795166, 0.004594272136688233, 0.0045829439163208, 0.004581215858459472, 0.0045797119140625, 0.004589824199676513, 0.0045649919509887695, 0.004655104160308838, 0.004574624061584473, 0.004600192070007324, 0.004586719989776612, 0.004584447860717773, 0.00456444787979126, 0.00465180778503418, 0.004587264060974121, 0.004567039966583252, 0.004575232028961182, 0.004572512149810791, 0.004579999923706055, 0.004582784175872803, 0.004708992004394531, 0.004627744197845459, 0.00460259199142456, 0.004615615844726562, 0.004595615863800049, 0.00432528018951416, 0.004624735832214355, 0.004959424018859863, 0.004651264190673828, 0.0052228479385375975, 0.0049994239807128905, 0.005240928173065185, 0.004770944118499756, 0.004631328105926514, 0.004630335807800293, 0.004612256050109864, 0.004624192237854004, 0.004642943859100342, 0.004636960029602051, 0.004611904144287109, 0.004644800186157227, 0.0046408319473266605, 0.004610047817230224, 0.004589568138122559, 0.004618239879608154, 0.00460319995880127, 0.004586175918579101, 0.004593664169311523, 0.004599775791168213, 0.004591263771057129, 0.00460752010345459, 0.004616576194763184, 0.004579584121704102, 0.0045734081268310545, 0.004790272235870361, 0.0045710082054138184, 0.004583487987518311, 0.004639904022216797, 0.004620672225952149, 0.004614975929260254, 0.004603551864624023, 0.004595776081085205, 0.004588799953460693, 0.00461900806427002, 0.004707712173461914, 0.004604544162750244, 0.004610047817230224, 0.00460591983795166, 0.004597792148590088, 0.004640768051147461, 0.004703231811523437, 0.004654143810272217, 0.004651167869567871, 0.004628255844116211, 0.004708223819732666, 0.004631999969482422, 0.00476639986038208, 0.0046705279350280765, 0.004635583877563476, 0.00466534423828125, 0.004623807907104492, 0.004636672019958496, 0.004779679775238037, 0.004625472068786621, 0.004637983798980713, 0.0046143999099731445, 0.004603807926177979, 0.004589407920837402, 0.004326879978179932, 0.0046003518104553226, 0.004757503986358643, 0.004601856231689453, 0.004605696201324463, 0.004690176010131836, 0.0046284799575805665, 0.00478169584274292, 0.00463424015045166, 0.004596479892730713, 0.004593632221221924, 0.0046011838912963865, 0.004614848136901856, 0.0046179518699646, 0.004614272117614746, 0.0045979199409484865, 0.0045866560935974125, 0.004586336135864258, 0.004583231925964356, 0.004587711811065674, 0.004603903770446777, 0.004611487865447998, 0.004592000007629394, 0.0046061439514160155, 0.005052224159240722, 0.004757440090179444, 0.004609663963317871, 0.004624800205230713, 0.004598015785217285, 0.004603903770446777, 0.004612095832824707, 0.004587423801422119, 0.004587615966796875, 0.004636415958404541, 0.004595967769622802, 0.004595200061798096, 0.004702720165252685, 0.004593887805938721, 0.004579391956329346, 0.004618080139160157, 0.0047041277885437015, 0.004617280006408692, 0.0046212477684021, 0.004585472106933594, 0.004607456207275391, 0.004770431995391846, 0.00470630407333374, 0.004577439785003662, 0.004599199771881103, 0.004618591785430908, 0.00459980821609497, 0.004608320236206055, 0.004613823890686035, 0.004611743927001953, 0.004593056201934814, 0.004616799831390381, 0.004599391937255859, 0.004596704006195068, 0.0046014080047607426, 0.004616640090942383, 0.004618015766143799, 0.004595039844512939, 0.00460214376449585, 0.004347936153411865, 0.004610015869140625, 0.004580448150634766, 0.004600736141204834, 0.004597760200500488, 0.0045763840675354, 0.004592639923095703, 0.004603744029998779, 0.004653120040893554, 0.0045931520462036135, 0.00461407995223999, 0.00457916784286499, 0.004573472023010254, 0.004587423801422119, 0.004594175815582275, 0.004581376075744629, 0.004601856231689453, 0.004623424053192138, 0.004606912136077881, 0.004779808044433594, 0.0046137280464172365, 0.00540940809249878, 0.0054620480537414555, 0.0065433278083801266, 0.006535200119018555, 0.0046878399848937985, 0.00468342399597168, 0.005933631896972656, 0.006356800079345703, 0.004677599906921387, 0.00466476821899414, 0.004656735897064209, 0.0046286721229553225, 0.0046210880279541015, 0.004716703891754151, 0.004661087989807129, 0.004665120124816895, 0.004700319766998291, 0.0046367359161376956, 0.004976640224456787, 0.005001215934753418, 0.00465715217590332, 0.004821023941040039, 0.004636640071868896, 0.004644864082336426, 0.004653056144714355, 0.004630527973175049, 0.004610047817230224, 0.004640255928039551, 0.00460646390914917, 0.004601856231689453, 0.004700096130371094, 0.0047350401878356934, 0.00459980821609497, 0.0046100797653198244, 0.004617248058319092, 0.004607168197631836, 0.004607423782348633, 0.004636223793029785, 0.004602079868316651, 0.004602496147155762, 0.004590688228607177, 0.00458739185333252, 0.004319295883178711, 0.004614912033081055, 0.004620192050933838, 0.004638207912445068, 0.004608255863189697, 0.00459987211227417, 0.00471289587020874, 0.004590784072875977, 0.004579999923706055, 0.004607999801635742, 0.004603903770446777, 0.00458351993560791, 0.004589151859283447, 0.004689727783203125, 0.0045914239883422855, 0.004596415996551514, 0.004640031814575196, 0.004583871841430664, 0.004634912014007568, 0.004571135997772217, 0.004609344005584717, 0.004580031871795654, 0.004585472106933594, 0.004794367790222168, 0.004616191864013672, 0.004622335910797119, 0.004593664169311523, 0.0045948481559753415, 0.004605984210968018, 0.0046180481910705565, 0.004637695789337158, 0.004629631996154785, 0.0045966081619262696, 0.004590816020965576, 0.004594719886779785, 0.004619328022003174, 0.004634719848632812, 0.004627039909362793, 0.004617311954498291, 0.0046150717735290525, 0.004629983901977539, 0.004627999782562256, 0.0046657280921936035, 0.004594048023223877, 0.004634047985076904, 0.004602687835693359, 0.004609920024871826, 0.004608128070831299, 0.0046423358917236325, 0.004808159828186035, 0.004653920173645019, 0.004689919948577881, 0.004616352081298828, 0.00461027193069458, 0.004648736000061035, 0.004617631912231445, 0.004714335918426514, 0.004615104198455811, 0.0046221442222595215, 0.004601088047027588, 0.004624735832214355, 0.004700575828552246, 0.004616191864013672, 0.0043541440963745116, 0.004629087924957275, 0.004640351772308349, 0.004605728149414063, 0.004649600028991699, 0.004761248111724853, 0.0046444802284240725, 0.004642816066741944, 0.004613887786865234, 0.004609151840209961, 0.004586336135864258, 0.004639872074127197, 0.0046202239990234375, 0.004677567958831787, 0.004597760200500488, 0.004593664169311523, 0.004587200164794922, 0.004647232055664062, 0.0046059517860412595, 0.0045957121849060055, 0.0045957121849060055, 0.0045792322158813474, 0.004636767864227295, 0.004585247993469238, 0.004606080055236816, 0.004587615966796875, 0.004597760200500488, 0.00459980821609497, 0.004577280044555664, 0.004580671787261963, 0.0045779838562011715, 0.004597760200500488, 0.004588960170745849, 0.004577888011932373, 0.004572896003723145, 0.004565279960632324, 0.0045875201225280765, 0.004599711894989014, 0.0046244797706604, 0.004577280044555664, 0.004597760200500488, 0.004583360195159912, 0.004597824096679687, 0.004584767818450928, 0.004596415996551514, 0.004615327835083008, 0.004731743812561035, 0.00459980821609497, 0.004616191864013672, 0.004642784118652344, 0.004615968227386475, 0.00464086389541626, 0.0045871038436889645, 0.004585631847381592, 0.004581791877746582, 0.004590752124786377, 0.0047298879623413085, 0.00471020793914795, 0.0046059517860412595, 0.004583712100982666, 0.004593376159667969, 0.004595232009887695, 0.00466326379776001, 0.004343808174133301, 0.004583424091339112, 0.004771840095520019, 0.0046059517860412595, 0.004624383926391602, 0.004594783782958984, 0.004575424194335938, 0.004559840202331543, 0.004585216045379639, 0.004615551948547363, 0.004670015811920166, 0.00457040023803711, 0.004578080177307129, 0.004610047817230224, 0.0045649919509887695, 0.004632415771484375, 0.00462886381149292, 0.004575007915496826, 0.004593183994293213, 0.0046691842079162596, 0.004606527805328369, 0.004603903770446777, 0.004636127948760987, 0.004674240112304688, 0.004586912155151367, 0.004590208053588867, 0.004581247806549072, 0.004605440139770508, 0.0046208958625793455, 0.004716544151306152, 0.004591616153717041, 0.004597216129302978, 0.004583807945251464, 0.004662975788116455, 0.004626976013183594, 0.00464195203781128, 0.004604320049285889, 0.004612480163574219, 0.004597760200500488, 0.00459932804107666, 0.004587999820709229, 0.004638720035552979, 0.0046254081726074215, 0.0046191678047180175, 0.004616288185119629, 0.004593664169311523, 0.004687871932983399, 0.004679168224334717, 0.004647424221038818, 0.004605728149414063, 0.006467807769775391, 0.005068031787872315, 0.006166528224945069, 0.004623199939727783, 0.00464031982421875, 0.004624000072479248, 0.004622367858886719, 0.0046386241912841795, 0.004620543956756592, 0.004669504165649414, 0.004657631874084473, 0.0046284799575805665, 0.004611743927001953, 0.004335616111755371, 0.00461737585067749, 0.004578144073486328, 0.004581376075744629, 0.004583583831787109, 0.004622111797332764, 0.004595967769622802, 0.00459552001953125, 0.004586944103240967, 0.004581791877746582, 0.004591775894165039, 0.004584671974182129, 0.004719391822814941, 0.004597760200500488, 0.004596831798553466, 0.00460211181640625, 0.004593696117401123, 0.004591904163360596, 0.004570784091949463, 0.004602560043334961, 0.004591616153717041, 0.004585472106933594, 0.004632575988769531, 0.004607999801635742, 0.004592832088470459, 0.004596543788909912, 0.004607999801635742, 0.004572671890258789, 0.004665855884552002, 0.004621632099151611, 0.0045779838562011715, 0.004590911865234375, 0.004637216091156006, 0.004776095867156983, 0.004607999801635742, 0.004679679870605469, 0.004577280044555664, 0.004571135997772217, 0.004616191864013672, 0.004632575988769531, 0.004593664169311523, 0.004581183910369873, 0.004589759826660157, 0.004589183807373047, 0.004598144054412842, 0.004607391834259033, 0.004583456039428711, 0.004591584205627441, 0.00459552001953125, 0.004598559856414795, 0.00459980821609497, 0.004630527973175049, 0.004593664169311523, 0.004575232028961182, 0.004578591823577881, 0.004573919773101807, 0.004569087982177734, 0.004585472106933594, 0.0045957121849060055, 0.004614143848419189, 0.004616064071655273, 0.004570879936218262, 0.004589951992034912, 0.004333471775054932, 0.004606048107147217, 0.0045957121849060055, 0.004624063968658447, 0.004603424072265625, 0.004582176208496094, 0.004612095832824707, 0.004837247848510742, 0.0046035838127136235, 0.004604095935821533, 0.004608255863189697, 0.0045957121849060055, 0.004593664169311523, 0.004600959777832031, 0.004598432064056397, 0.004611648082733154, 0.004631199836730957, 0.004614143848419189, 0.0046139202117919925, 0.004608223915100098, 0.004683263778686523, 0.004640992164611816, 0.004661439895629883, 0.004616096019744873, 0.004581567764282227, 0.004581376075744629, 0.004578464031219483, 0.004601823806762695, 0.0046183037757873534, 0.004651391983032226, 0.004642879962921143, 0.0046096000671386715, 0.004608831882476807, 0.004601664066314697, 0.004612287998199463, 0.004724736213684082, 0.004687039852142334, 0.004619200229644776, 0.004617152214050293, 0.004622719764709473, 0.004606527805328369, 0.004625919818878174, 0.004810368061065674, 0.004590464115142822, 0.004610047817230224, 0.004625792026519775, 0.0046022400856018065, 0.004593632221221924, 0.0046144318580627446, 0.004642047882080078, 0.004596479892730713, 0.004601856231689453, 0.004610112190246582, 0.004588543891906738, 0.004594719886779785, 0.004632480144500732, 0.004607999801635742, 0.004762976169586181, 0.0046109437942504885, 0.004593311786651611, 0.00464899206161499, 0.0046244797706604, 0.004590879917144775, 0.004362239837646485, 0.004601151943206787, 0.0046146559715271, 0.004597951889038086, 0.004605055809020996, 0.004615200042724609, 0.004634047985076904, 0.004607423782348633, 0.0046171841621398925, 0.004604159832000732, 0.004597504138946534, 0.004589568138122559, 0.004623456001281738, 0.0046150717735290525, 0.004712448120117187, 0.004605792045593262, 0.00461030387878418, 0.004582687854766846, 0.004606656074523926, 0.004646848201751709, 0.004615359783172608, 0.0045985918045043945, 0.0045957121849060055, 0.004611551761627197, 0.004725279808044433, 0.004626431941986084, 0.004616288185119629, 0.004605535984039307, 0.004688191890716553, 0.004600863933563232, 0.004574175834655762, 0.0046013760566711425, 0.0046228160858154295, 0.004605120182037354, 0.004591519832611084, 0.0046100797653198244, 0.004586368083953857, 0.004577280044555664, 0.004694015979766846, 0.004638112068176269, 0.004581183910369873, 0.004753632068634033, 0.004606527805328369, 0.004595039844512939, 0.004636352062225342, 0.004630911827087403, 0.004599743843078613, 0.00463481616973877, 0.004602176189422607, 0.004612480163574219, 0.004591040134429932, 0.004710752010345459, 0.004589568138122559, 0.004587615966796875, 0.004603744029998779, 0.004581439971923828, 0.004599071979522705, 0.004616928100585937, 0.004697824001312256, 0.004583615779876709, 0.0045857601165771485, 0.004600831985473633, 0.004574016094207763, 0.00433955192565918, 0.00461081600189209, 0.004615808010101318, 0.004618752002716065, 0.004597760200500488, 0.004589407920837402, 0.0045994877815246586, 0.0055485119819641115, 0.0061151041984558105, 0.005529888153076172, 0.005916607856750488, 0.00466534423828125, 0.004630496025085449, 0.004617536067962646, 0.004713183879852295, 0.004671103954315185, 0.0046533761024475095, 0.0046319360733032226, 0.004616831779479981, 0.004609663963317871, 0.004654560089111328, 0.004832096099853515, 0.004634751796722412, 0.004658815860748291, 0.004607456207275391, 0.004625311851501465, 0.004696063995361328, 0.004610047817230224, 0.00459980821609497, 0.004589568138122559, 0.004593664169311523, 0.004573184013366699, 0.004640768051147461, 0.004622335910797119, 0.004610047817230224, 0.004677631855010986, 0.00458128023147583, 0.004593376159667969, 0.00458892822265625, 0.00461078405380249, 0.0046302080154418945, 0.004614719867706299, 0.004597311973571778, 0.0045892162322998045, 0.004602079868316651, 0.004604032039642334, 0.0046228160858154295, 0.004628736019134522, 0.004595456123352051, 0.004592832088470459, 0.0046885118484497074, 0.004581120014190674, 0.0046098241806030275, 0.004618815898895264, 0.004587615966796875, 0.004593183994293213, 0.004587935924530029, 0.00468998384475708, 0.00551526403427124, 0.004667295932769776, 0.0045979199409484865, 0.00459769582748413, 0.004591839790344238, 0.004322463989257812, 0.004604767799377441, 0.0048213119506835935, 0.004602655887603759, 0.0046089282035827635, 0.004575232028961182, 0.004599552154541015, 0.004598015785217285, 0.004618368148803711, 0.004627744197845459, 0.0045840320587158204, 0.004610047817230224, 0.004607999801635742, 0.00459606409072876, 0.004587168216705322, 0.004622079849243164, 0.004618495941162109, 0.004599167823791504, 0.0045922560691833494, 0.004620287895202637, 0.004583263874053955, 0.0046245441436767575, 0.004677631855010986, 0.004622335910797119, 0.004601151943206787, 0.004682432174682618, 0.004597760200500488, 0.004680704116821289, 0.0047226881980895995, 0.004737728118896485, 0.004630623817443847, 0.004618463993072509, 0.004618239879608154, 0.004615359783172608, 0.004641600131988525, 0.004622079849243164, 0.00460211181640625, 0.00459980821609497, 0.004619584083557129, 0.004620512008666992, 0.004612576007843017, 0.0046382398605346676, 0.004610112190246582, 0.004649343967437744, 0.004656383991241455, 0.004603839874267578, 0.0046293439865112305, 0.004637951850891113, 0.004614912033081055, 0.004600927829742431, 0.004639039993286133, 0.004639039993286133, 0.004639008045196533, 0.0046685757637023925, 0.004663424015045166, 0.004647647857666016, 0.004679008007049561, 0.004700160026550293, 0.0046211199760437015, 0.004652895927429199, 0.004663392066955567, 0.004629439830780029, 0.004662240028381347, 0.004368703842163086, 0.004634272098541259, 0.004606239795684815, 0.004616511821746826, 0.004605055809020996, 0.004585696220397949, 0.004583104133605957, 0.0045895037651062015, 0.004595903873443603, 0.004602015972137451, 0.004603616237640381, 0.00460697603225708, 0.004595200061798096, 0.004610400199890136, 0.004594816207885742, 0.004625088214874268, 0.004606080055236816, 0.004616064071655273, 0.004617919921875, 0.004616384029388428, 0.0045853757858276365, 0.004593887805938721, 0.0046096320152282716, 0.0046119999885559084, 0.004624896049499512, 0.00459116792678833, 0.0045895037651062015, 0.004605855941772461, 0.004602464199066162, 0.004599199771881103, 0.004647520065307617, 0.004630527973175049, 0.004603231906890869, 0.004612703800201416, 0.004610112190246582, 0.0045912318229675295, 0.004688255786895752, 0.004618239879608154, 0.004656544208526611, 0.004632448196411133, 0.004575551986694336, 0.004598176002502442, 0.0045872960090637205, 0.004624608039855957, 0.004622335910797119, 0.0046159358024597166, 0.004607615947723388, 0.004609920024871826, 0.004623104095458985, 0.004618336200714111, 0.004625696182250977, 0.004696800231933594, 0.004601247787475586, 0.004579967975616455, 0.004594592094421387, 0.004585663795471192, 0.004618944168090821, 0.004614208221435547, 0.004612127780914307, 0.00459769582748413, 0.004597824096679687, 0.00458784008026123, 0.004581056118011475, 0.004335968017578125, 0.0046386561393737796, 0.00460422420501709, 0.00459558391571045, 0.004603807926177979, 0.004577184200286865, 0.004573279857635498, 0.004614143848419189, 0.0046694397926330565, 0.004587200164794922, 0.004587776184082031, 0.004589536190032959, 0.004601632118225098, 0.004595647811889649, 0.0046022400856018065, 0.0046113600730896, 0.004598048210144043, 0.004639167785644532, 0.004583424091339112, 0.004570176124572754, 0.004604864120483399, 0.004612095832824707, 0.004587200164794922, 0.00458784008026123, 0.004595104217529297, 0.0045799360275268555, 0.004581376075744629, 0.004632575988769531, 0.004626207828521729, 0.004574848175048828, 0.004583168029785156, 0.004576416015625, 0.004591296195983887, 0.004566847801208496, 0.00461023998260498, 0.004601856231689453, 0.004579328060150147, 0.0045957121849060055, 0.004582399845123291, 0.004600224018096924, 0.004616511821746826, 0.004641056060791016, 0.004596767902374267, 0.004590496063232422, 0.004646656036376953, 0.004606272220611572, 0.004617343902587891, 0.004639616012573242, 0.004638720035552979, 0.004661119937896728, 0.004620480060577393, 0.004595071792602539, 0.004581952095031738, 0.00458515214920044, 0.004628767967224121, 0.004589600086212158, 0.004591616153717041, 0.004611584186553955, 0.004579840183258056, 0.004582752227783203, 0.004690591812133789, 0.004607327938079834, 0.004588064193725586]",tokens/s,215.7895708829342,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,890.20416,14281.474048,0.0,13878.951936,13865.632768,s,1,7.4677412109375,7.4677412109375,0.0,7.4677412109375,7.4677412109375,7.4677412109375,7.4677412109375,[7.4677412109375],,kWh,9.391485658314498e-06,1.0284966197199963e-06,5.081115176011619e-06,1.5501097454046114e-05,,MB,1220.173824,14703.0016,0.0,14289.993728,14237.628416,s,10,12.32615283203125,1.232615283203125,0.0043079941697083245,1.2344929809570313,1.2362048828124999,1.2364078002929688,1.2365701342773439,"[1.2225899658203125, 1.2289388427734376, 1.22941064453125, 1.2314349365234376, 1.233703857421875, 1.2361016845703126, 1.2352821044921876, 1.2359202880859375, 1.2366107177734376, 1.2361597900390624]",tokens/s,207.68848438642414,kWh,3.5947611201252885e-05,3.964536364065857e-06,2.3785546806200098e-05,6.369769437151884e-05,tokens/kWh,4018983.7721106797,MB,1245.474816,14870.77376,0.0,14457.765888,14416.301056,s,10,38.59487036132812,3.859487036132813,0.005665137170797998,3.857146484375,3.8680602783203124,3.8688775512695313,3.8695313696289064,"[3.86969482421875, 3.867878662109375, 3.86386083984375, 3.861939453125, 3.857606689453125, 3.85405810546875, 3.853245361328125, 3.855262451171875, 3.8546376953125, 3.856686279296875]",tokens/s,16.323412777446634,kWh,0.00011283630283916256,1.2446725052571238e-05,7.50471433710001e-05,0.00020033017126273392,tokens/kWh,314480.8373241753,,s,630,38.591319522857724,0.06125606273469471,0.00034725985711133844,0.061271600723266595,0.06170152359008789,0.0618001766204834,0.061970125427246094,"[0.06170624160766602, 0.06108287811279297, 0.06095743942260742, 0.06086790466308594, 0.06075369644165039, 0.0608590087890625, 0.06089920043945313, 0.06106768035888672, 0.06098720169067383, 0.06104288101196289, 0.061067264556884764, 0.061039745330810545, 0.060908416748046874, 0.06109299087524414, 0.061031295776367185, 0.06124687957763672, 0.06132358551025391, 0.06135014343261719, 0.061295841217041014, 0.06123129653930664, 0.061048896789550784, 0.06117023849487305, 0.0611860466003418, 0.061257728576660155, 0.06128025436401367, 0.06155059051513672, 0.06141132736206055, 0.06144409561157226, 0.061282302856445314, 0.061408382415771484, 0.06138284683227539, 0.061477409362792966, 0.06147292709350586, 0.06143385696411133, 0.061391998291015625, 0.06139363098144531, 0.061313182830810546, 0.061343456268310545, 0.06128464126586914, 0.06140518569946289, 0.061710334777832034, 0.06164611053466797, 0.06153903961181641, 0.06174886322021484, 0.06176396942138672, 0.061774848937988285, 0.06158950424194336, 0.06162063980102539, 0.06176419067382812, 0.06174908828735352, 0.06173712158203125, 0.06184304046630859, 0.06165340805053711, 0.06162636947631836, 0.061642078399658205, 0.06163727951049805, 0.061797889709472656, 0.061782527923583984, 0.06185555267333984, 0.0619747200012207, 0.062046142578125, 0.06196867370605469, 0.06201116943359375, 0.061970718383789064, 0.06116352081298828, 0.060824703216552735, 0.060875648498535155, 0.060819454193115234, 0.06084563064575195, 0.060898910522460936, 0.061069793701171875, 0.06105449676513672, 0.0610865592956543, 0.0613001594543457, 0.061241920471191404, 0.06105497741699219, 0.06112870407104492, 0.06103039932250977, 0.061102046966552734, 0.06128211212158203, 0.061147327423095706, 0.06101375961303711, 0.06104912185668945, 0.06079804611206055, 0.060944511413574216, 0.060959007263183596, 0.061092353820800784, 0.0612880973815918, 0.06134614562988281, 0.061259777069091796, 0.06135539245605469, 0.06131692886352539, 0.06140192031860352, 0.06142156982421875, 0.06145843124389649, 0.06147398376464844, 0.06156745529174805, 0.06137276840209961, 0.06138032150268555, 0.061346080780029295, 0.061399040222167967, 0.06140313720703125, 0.06132876968383789, 0.06133814239501953, 0.061631649017333985, 0.06146115112304688, 0.06148479843139648, 0.061564544677734374, 0.061795230865478515, 0.06169804763793945, 0.06162227249145508, 0.061906494140625, 0.061847137451171874, 0.06171657562255859, 0.06170256042480469, 0.061720928192138674, 0.06168166351318359, 0.061644001007080076, 0.061835968017578125, 0.06182495880126953, 0.061915073394775394, 0.06163817596435547, 0.06173766326904297, 0.061908992767333984, 0.061908992767333984, 0.06198476791381836, 0.06194467163085938, 0.06112982559204101, 0.060934688568115236, 0.06082598495483398, 0.06072870254516602, 0.06065011215209961, 0.06086835098266601, 0.06074934387207031, 0.06080374526977539, 0.06091987228393555, 0.060720832824707034, 0.06078428649902344, 0.060811424255371095, 0.060805633544921876, 0.06083993530273438, 0.06119334411621094, 0.06143065643310547, 0.06137855911254883, 0.061140064239501954, 0.06122383880615234, 0.061110271453857425, 0.06116742324829102, 0.06095481491088867, 0.06110003280639648, 0.061267967224121096, 0.06117155075073242, 0.06103055953979492, 0.06123929595947265, 0.06131836700439453, 0.061309726715087894, 0.061668800354003905, 0.06167987060546875, 0.06163228988647461, 0.061649345397949216, 0.061666431427001955, 0.06176047897338867, 0.06161379241943359, 0.061585697174072265, 0.06138675308227539, 0.061300735473632816, 0.06145024108886719, 0.06140313720703125, 0.06136374282836914, 0.06154230499267578, 0.06141804885864258, 0.06158335876464844, 0.06170742416381836, 0.06166409683227539, 0.061777568817138674, 0.06180489730834961, 0.06164684677124024, 0.06176748657226563, 0.06161427307128906, 0.06153609466552734, 0.061437599182128905, 0.061427200317382816, 0.06142873764038086, 0.061582592010498045, 0.061507774353027345, 0.06151839828491211, 0.061470718383789064, 0.06170624160766602, 0.061663230895996096, 0.061755359649658205, 0.060821537017822266, 0.06057494354248047, 0.060511009216308594, 0.060467201232910155, 0.06065318298339844, 0.06061913681030273, 0.06061056137084961, 0.06086860656738281, 0.06076211166381836, 0.06072934341430664, 0.06069657516479492, 0.06089113616943359, 0.06094643020629883, 0.06099148941040039, 0.06116515350341797, 0.06119635009765625, 0.06138710403442383, 0.061107681274414065, 0.061194782257080076, 0.060837886810302735, 0.06085222244262695, 0.0607907829284668, 0.06089311981201172, 0.060897342681884764, 0.06115327835083008, 0.06098124694824219, 0.06098067092895508, 0.06095724868774414, 0.06113276672363281, 0.06115654373168945, 0.06123606491088867, 0.06144384002685547, 0.06169830322265625, 0.06161967849731445, 0.06154438400268555, 0.06134640121459961, 0.06131302261352539, 0.06137382507324219, 0.061421504974365236, 0.06150368118286133, 0.06149990463256836, 0.061609088897705076, 0.06162931060791016, 0.061519809722900394, 0.0616589126586914, 0.06160617446899414, 0.06177382278442383, 0.06180204772949219, 0.06192351913452149, 0.06190105438232422, 0.061886207580566406, 0.061611328125, 0.061722686767578125, 0.061444992065429686, 0.061685760498046874, 0.061852737426757814, 0.06181907272338867, 0.06198303985595703, 0.0618741455078125, 0.061843936920166015, 0.061868030548095705, 0.06189449691772461, 0.06173462295532227, 0.061055423736572266, 0.06076006317138672, 0.06074982452392578, 0.060806785583496094, 0.06077478408813477, 0.06084719848632812, 0.06080195236206055, 0.060809024810791014, 0.0609343376159668, 0.06104624176025391, 0.061038654327392576, 0.06118361663818359, 0.061364479064941406, 0.061252193450927736, 0.06137241744995117, 0.06146662521362305, 0.06147891235351562, 0.06125568008422851, 0.06113251113891602, 0.060907806396484375, 0.060878849029541014, 0.06076006317138672, 0.06085836791992188, 0.061042015075683596, 0.06100444793701172, 0.06100099182128906, 0.061135551452636716, 0.06116694259643555, 0.06128441619873047, 0.06119283294677735, 0.06128620910644531, 0.061373855590820314, 0.06129129409790039, 0.0611409912109375, 0.06122086334228516, 0.061171646118164065, 0.06125164794921875, 0.06099257659912109, 0.060897247314453125, 0.061010303497314455, 0.06112681579589844, 0.061268383026123044, 0.061289920806884765, 0.06142832183837891, 0.061517822265625, 0.06153955078125, 0.06148175811767578, 0.06161612701416016, 0.06163062286376953, 0.06153148651123047, 0.06147129440307617, 0.061507423400878905, 0.06144009780883789, 0.06155878448486328, 0.061458400726318356, 0.06143593597412109, 0.06140694427490234, 0.06136041641235351, 0.061465888977050784, 0.0616168327331543, 0.061636768341064456, 0.06179008102416992, 0.061830718994140624, 0.06086291122436523, 0.060633087158203126, 0.060639232635498044, 0.06052249526977539, 0.06060377502441406, 0.06068492889404297, 0.06060441589355469, 0.06076572799682617, 0.06082812881469726, 0.0607454719543457, 0.06076646423339844, 0.06074982452392578, 0.06090956878662109, 0.06089116668701172, 0.06107542419433594, 0.061101505279541016, 0.06108038330078125, 0.06091084671020508, 0.06093056106567383, 0.06077030563354492, 0.060811073303222656, 0.060870849609375, 0.06104678344726563, 0.061020160675048826, 0.06107318496704101, 0.06104691314697266, 0.06107961654663086, 0.06100486373901367, 0.0611475830078125, 0.06121846389770508, 0.06133152008056641, 0.06129913711547851, 0.06132364654541016, 0.061282302856445314, 0.06141251373291016, 0.061241504669189456, 0.06125433731079102, 0.06116352081298828, 0.06128966522216797, 0.061283134460449216, 0.061298686981201174, 0.06116726303100586, 0.06122326278686523, 0.06129459381103516, 0.06141132736206055, 0.06139289474487305, 0.06140313720703125, 0.061456382751464846, 0.0615013427734375, 0.06148649597167969, 0.06146732711791992, 0.06138265609741211, 0.06171862411499023, 0.06157916641235352, 0.061456256866455075, 0.061669502258300785, 0.0616589126586914, 0.06153033447265625, 0.0615362548828125, 0.06157235336303711, 0.06163942337036133, 0.06171798324584961, 0.061693950653076174, 0.060789886474609374, 0.06056844711303711, 0.060552799224853515, 0.06051472091674805, 0.06054419326782227, 0.06066668701171875, 0.06058803176879883, 0.06078015899658203, 0.06080732727050781, 0.06072956848144531, 0.06077030563354492, 0.06081475067138672, 0.06088320159912109, 0.06093830490112305, 0.06103887939453125, 0.06111753463745117, 0.06111734390258789, 0.06104383850097656, 0.061116542816162106, 0.06103315353393555, 0.0611082878112793, 0.06091775894165039, 0.06086041641235351, 0.06096438217163086, 0.06110214233398437, 0.060999488830566405, 0.06114080047607422, 0.061093921661376956, 0.061123329162597655, 0.06110822296142578, 0.06131280136108398, 0.061236480712890624, 0.06128534317016601, 0.061125728607177736, 0.06116239929199219, 0.06113206481933594, 0.06121945571899414, 0.06117180633544922, 0.06114912033081055, 0.061204544067382814, 0.061179359436035155, 0.0611453742980957, 0.06135363388061523, 0.06132796859741211, 0.06134374237060547, 0.061480480194091795, 0.06134627151489258, 0.061456382751464846, 0.06149465560913086, 0.06157171249389649, 0.06165222549438477, 0.06143052673339844, 0.06146867370605469, 0.061431808471679686, 0.06158950424194336, 0.061491199493408207, 0.06161203384399414, 0.06152601623535156, 0.061587455749511716, 0.06151107025146484, 0.06164691162109375, 0.06175388717651367, 0.06195510482788086, 0.06088393783569336, 0.06076620864868164, 0.060590080261230465, 0.060518142700195315, 0.060727584838867185, 0.06069859313964844, 0.060655391693115235, 0.06068041610717773, 0.060733375549316404, 0.060725311279296874, 0.06091561508178711, 0.06077804946899414, 0.061000225067138675, 0.06091705703735351, 0.061063873291015626, 0.061142078399658205, 0.061182910919189454, 0.06100092697143555, 0.06104348754882812, 0.06082355117797852, 0.06088294219970703, 0.06090956878662109, 0.061044864654541016, 0.06099065780639648, 0.06105971145629883, 0.06105299377441406, 0.06112623977661133, 0.06108729553222656, 0.06117052841186523, 0.06135110473632813, 0.06121964645385742, 0.06117375946044922, 0.061432865142822264, 0.06136726379394531, 0.06127001571655273, 0.06112870407104492, 0.06122700881958008, 0.06112217712402344, 0.06110809707641601, 0.06113040161132813, 0.061440608978271485, 0.061413631439208985, 0.061505313873291015, 0.061333728790283204, 0.06140879821777344, 0.06151625442504883, 0.06132883071899414, 0.06148540878295899, 0.06156924819946289, 0.06140719985961914, 0.06148099136352539, 0.06153420639038086, 0.06153811264038086, 0.06148934555053711, 0.06150249481201172, 0.06167036819458008, 0.06168310546875, 0.06150547027587891, 0.061604225158691406, 0.06156876754760742, 0.061657630920410156, 0.061679615020751956, 0.06177648162841797, 0.060897377014160155, 0.060634368896484374, 0.060563583374023434, 0.06056409454345703, 0.060649471282958986, 0.06067814254760742, 0.06053478240966797, 0.06070649719238281, 0.060766529083251954, 0.0607191047668457, 0.060763294219970704, 0.06078518295288086, 0.06090742492675781, 0.061081886291503906, 0.0612262077331543, 0.06116854476928711, 0.061093887329101565, 0.06088268661499024, 0.060862720489501955, 0.06080070495605469, 0.0610491828918457, 0.06089519882202148, 0.06098739242553711, 0.06098108673095703, 0.06108483123779297, 0.06103551864624023, 0.06114883041381836, 0.06117001724243164, 0.0611143684387207, 0.06118195343017578, 0.061376319885253904, 0.061413406372070316, 0.06136201477050781, 0.06127030563354492, 0.06119414520263672, 0.061165470123291016, 0.06113302230834961, 0.061155296325683596, 0.06114096069335938, 0.06127212905883789, 0.061298686981201174, 0.06116726303100586, 0.06122726440429688, 0.06138268661499023, 0.06142982482910156, 0.06148012924194336, 0.061414207458496094, 0.061593246459960935, 0.061639007568359376, 0.06152396774291992, 0.06153206253051758, 0.061430976867675784, 0.06145731353759765, 0.061609886169433595, 0.061474910736083986, 0.06155878448486328, 0.06167526245117187, 0.06155904006958008, 0.06173491287231445, 0.06165094375610351, 0.06170140838623047, 0.061602527618408204, 0.061986656188964845, 0.0609329605102539, 0.06062681579589844, 0.060622848510742185, 0.06058598327636719, 0.060646976470947266, 0.0607072639465332, 0.06069247817993164, 0.060780544281005856, 0.060747776031494144, 0.06078249740600586, 0.06076787185668946, 0.060821983337402345, 0.06112361526489258, 0.06108019256591797, 0.06121507263183594, 0.06133708953857422, 0.06116198348999023, 0.06099353790283203, 0.06094438552856445, 0.060859710693359374, 0.06099359893798828, 0.060844673156738284, 0.06090137481689453, 0.060985343933105465, 0.0611959342956543, 0.06108195114135742, 0.06100787353515625, 0.061001728057861325, 0.06123724746704102, 0.061233150482177735, 0.06127107238769531, 0.06141356658935547, 0.06138719940185547, 0.061435455322265624, 0.06135068893432617, 0.06118195343017578, 0.06121881484985352, 0.061177505493164065, 0.061088096618652346, 0.06127376174926758, 0.06133929443359375, 0.06140182495117188, 0.061384033203125, 0.061415809631347656, 0.06141567993164063, 0.06149884796142578, 0.06144464111328125, 0.061489151000976565, 0.061669376373291014, 0.061505535125732425, 0.06160732650756836, 0.061438369750976565, 0.06150912094116211, 0.06138079833984375, 0.061579776763916017, 0.061538303375244144, 0.061688865661621094, 0.061610977172851564, 0.06161612701416016, 0.06157683181762695, 0.061817214965820315, 0.061767326354980466]",tokens/s,16.32491471629651,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,809.51296,11806.834688,0.0,11404.312576,11388.314624,s,1,7.21101025390625,7.21101025390625,0.0,7.21101025390625,7.21101025390625,7.21101025390625,7.21101025390625,[7.21101025390625],,kWh,5.88875670834265e-06,6.423591562389423e-07,2.4741686459953116e-06,9.005284510576905e-06,,MB,1168.338944,11819.4176,0.0,11406.409728,11107.923968,s,10,3.693287750244141,0.3693287750244141,0.007025141483879123,0.3713990173339844,0.3734230133056641,0.3734380233764648,0.37345003143310546,"[0.348709228515625, 0.371537353515625, 0.3704242858886719, 0.37126068115234373, 0.3718695068359375, 0.36868795776367186, 0.373387939453125, 0.3734530334472656, 0.3705380859375, 0.373419677734375]",tokens/s,693.149349067311,kWh,1.0716025983333566e-05,1.1817992409252324e-06,7.139093012857094e-06,1.9036918237115893e-05,tokens/kWh,13447554.736085488,MB,1174.421504,11821.514752,0.0,11408.50688,11305.03168,s,10,33.18926049804688,3.3189260498046878,0.002399903474918135,3.3185374755859374,3.3217452880859377,3.3222173706054687,3.322595036621094,"[3.315338134765625, 3.316279052734375, 3.321640380859375, 3.319322509765625, 3.316801513671875, 3.320297119140625, 3.317642822265625, 3.322689453125, 3.3214970703125, 3.31775244140625]",tokens/s,18.98204390655448,kWh,9.693563090624973e-05,1.0691403011497888e-05,6.428554746014304e-05,0.00017191258137789066,tokens/kWh,366465.32496371615,,s,630,33.186391433715805,0.05267681179954892,0.0003013257014760566,0.05265347099304199,0.05296169052124024,0.053116100120544435,0.05395898113250733,"[0.05364499282836914, 0.05264006423950195, 0.05306345748901367, 0.052467937469482424, 0.05261056137084961, 0.05221017456054688, 0.05224652862548828, 0.05217030334472656, 0.05306003189086914, 0.052209663391113284, 0.052482048034667966, 0.05235686492919922, 0.05250652694702149, 0.05228569412231445, 0.05245552062988281, 0.05262886428833008, 0.052329086303710935, 0.05247180938720703, 0.05241856002807617, 0.052563968658447265, 0.05245302581787109, 0.05334460830688476, 0.05263324737548828, 0.052667873382568356, 0.052525951385498044, 0.05231951904296875, 0.05269347381591797, 0.05264764785766601, 0.05249283218383789, 0.052598785400390625, 0.05251382446289062, 0.05258544158935547, 0.05237760162353516, 0.052383136749267575, 0.05245398330688476, 0.0526561279296875, 0.05266636657714844, 0.05246892929077149, 0.05283216094970703, 0.05272873687744141, 0.05273161697387695, 0.052463905334472656, 0.052600639343261715, 0.052805824279785155, 0.052762622833251956, 0.05254499053955078, 0.052590721130371096, 0.052585918426513674, 0.052908031463623044, 0.052644607543945315, 0.05265395355224609, 0.0525458869934082, 0.0527400016784668, 0.0526275520324707, 0.0526827507019043, 0.05267865753173828, 0.05273190307617188, 0.052961280822753906, 0.05277407836914062, 0.05269177627563477, 0.05285478210449219, 0.05279305648803711, 0.052819393157958985, 0.05437849426269531, 0.05275775909423828, 0.05218790435791015, 0.05226291275024414, 0.05220761489868164, 0.05214310455322266, 0.05229379272460938, 0.052251487731933596, 0.05218294525146484, 0.05255715179443359, 0.05247257614135742, 0.05240627288818359, 0.05235452651977539, 0.05241427230834961, 0.052367935180664064, 0.052428958892822265, 0.052391937255859375, 0.052485889434814456, 0.05268016052246094, 0.05266614532470703, 0.05247283172607422, 0.052639041900634766, 0.052402881622314455, 0.05248966217041016, 0.05241708755493164, 0.05246566390991211, 0.05242060852050781, 0.052428512573242186, 0.05244073486328125, 0.05250502395629883, 0.052555423736572265, 0.052544033050537106, 0.052674560546875, 0.05259468841552734, 0.05263359832763672, 0.052764511108398436, 0.05258419036865234, 0.052726177215576174, 0.052901630401611326, 0.05268915176391602, 0.05268070220947266, 0.05262745666503906, 0.05261721420288086, 0.05268070220947266, 0.05301248168945313, 0.052717151641845705, 0.05259920120239258, 0.05271551895141602, 0.05278515243530273, 0.052733665466308595, 0.052685089111328125, 0.052676414489746096, 0.05282015991210937, 0.05276646423339844, 0.05277679824829101, 0.052642208099365234, 0.05303910446166992, 0.053043201446533204, 0.05325126266479492, 0.05287187194824219, 0.05299417495727539, 0.05288140869140625, 0.05312102508544922, 0.054056831359863285, 0.052881248474121095, 0.05235929489135742, 0.052459583282470704, 0.05236483383178711, 0.05241084671020508, 0.052316158294677735, 0.052252544403076175, 0.05239616012573242, 0.05245132827758789, 0.052365310668945314, 0.05227225494384766, 0.05231849670410156, 0.052302433013916017, 0.052400127410888675, 0.05233663940429688, 0.05229308700561523, 0.05231875228881836, 0.052590591430664066, 0.05313740921020508, 0.05293056106567383, 0.0525926399230957, 0.052768768310546874, 0.05261283111572266, 0.05265343856811523, 0.0528076171875, 0.05246051025390625, 0.052483104705810545, 0.052513248443603514, 0.05251692962646484, 0.052463039398193356, 0.052765697479248044, 0.05268035125732422, 0.052681056976318356, 0.05252096176147461, 0.052959232330322265, 0.052736000061035154, 0.052838401794433595, 0.05281587219238281, 0.05281792068481445, 0.053960159301757814, 0.0526890869140625, 0.052732223510742186, 0.05283638381958008, 0.05272371292114258, 0.05262540817260742, 0.05259468841552734, 0.05265203094482422, 0.052686847686767575, 0.05291212844848633, 0.05287843322753906, 0.05267679977416992, 0.053471454620361326, 0.05276927947998047, 0.052729854583740236, 0.052739456176757814, 0.05292268753051758, 0.05278752136230469, 0.053544960021972655, 0.05339136123657227, 0.053305343627929686, 0.052780063629150394, 0.05306467056274414, 0.053938369750976566, 0.053542911529541014, 0.0523746223449707, 0.05248092651367187, 0.05254963302612305, 0.052359169006347656, 0.05244313430786133, 0.052432384490966794, 0.05234534454345703, 0.05320265579223633, 0.05228572845458984, 0.05222768020629883, 0.05232579040527344, 0.052558849334716794, 0.052485729217529295, 0.052550048828125, 0.05229568099975586, 0.05239603042602539, 0.05248614501953125, 0.052596736907958984, 0.0525926399230957, 0.05242265701293945, 0.05245337677001953, 0.052467041015625, 0.052662654876708986, 0.05249817657470703, 0.05257654571533203, 0.052514591217041016, 0.05282454299926758, 0.05359001541137695, 0.052647937774658204, 0.05251686477661133, 0.05269504165649414, 0.05274566268920899, 0.0527295036315918, 0.05286124801635742, 0.05269673538208008, 0.05297043228149414, 0.052790401458740234, 0.05286528015136719, 0.05271980667114258, 0.05289823913574219, 0.05265350341796875, 0.05266614532470703, 0.05287945556640625, 0.052658111572265624, 0.052625919342041014, 0.05271577453613281, 0.05273299026489258, 0.05292031860351563, 0.052628414154052734, 0.0526110725402832, 0.052838401794433595, 0.05269094467163086, 0.05272364807128906, 0.052576320648193356, 0.0528238410949707, 0.05272188949584961, 0.05280886459350586, 0.05279836654663086, 0.05294688034057617, 0.05262540817260742, 0.05279129409790039, 0.0539469108581543, 0.052819358825683595, 0.0524683837890625, 0.05235539245605469, 0.05234688186645508, 0.0524920654296875, 0.052213790893554685, 0.05220780944824219, 0.052375072479248046, 0.052589023590087894, 0.05246515274047851, 0.05238771057128906, 0.05226099014282227, 0.05233689498901367, 0.0522523193359375, 0.05259324645996094, 0.05224179077148437, 0.05250838470458984, 0.052687488555908206, 0.052770687103271485, 0.05253299331665039, 0.05258636856079101, 0.052533344268798826, 0.052445823669433594, 0.052510784149169924, 0.05243280029296875, 0.052492385864257814, 0.052441089630126954, 0.05237760162353516, 0.05238579177856445, 0.052553375244140624, 0.05245497512817383, 0.05250332641601563, 0.05247590255737305, 0.05253529739379883, 0.05258844757080078, 0.052682849884033205, 0.05270057678222656, 0.05315366363525391, 0.05268332672119141, 0.05277097702026367, 0.05270732879638672, 0.05280080032348633, 0.05282889556884766, 0.05284659194946289, 0.05264998245239258, 0.052697311401367186, 0.05276649475097656, 0.052752384185791014, 0.05298777770996094, 0.05262963104248047, 0.052924415588378904, 0.05280767822265625, 0.05290950393676758, 0.052760478973388675, 0.05274281692504883, 0.05283964920043945, 0.052978462219238284, 0.05292851257324219, 0.053043201446533204, 0.052983806610107424, 0.05279471969604492, 0.052984321594238284, 0.054452224731445314, 0.0525814094543457, 0.05219356918334961, 0.05242486572265625, 0.05237404632568359, 0.052375553131103515, 0.052432064056396485, 0.05235385513305664, 0.05234268951416016, 0.052392032623291014, 0.052410369873046876, 0.05249187088012695, 0.05226444625854492, 0.05230684661865234, 0.05235411071777344, 0.05256492614746094, 0.05254288101196289, 0.05255215835571289, 0.052542945861816404, 0.053039134979248045, 0.05250073623657227, 0.05269295883178711, 0.052490238189697266, 0.05251881790161133, 0.05259110260009765, 0.05253529739379883, 0.052621311187744144, 0.05263359832763672, 0.05245951843261719, 0.05240627288818359, 0.05254348754882812, 0.05250867080688477, 0.05331478500366211, 0.05250128173828125, 0.05262454223632813, 0.05257088088989258, 0.05260835266113281, 0.05268755340576172, 0.053594207763671874, 0.05269500732421875, 0.052664321899414064, 0.052729854583740236, 0.05281695938110351, 0.052689857482910156, 0.05289350509643555, 0.05264812850952148, 0.05279443359375, 0.05275743865966797, 0.052654079437255856, 0.052807041168212894, 0.05268953704833984, 0.05319270324707031, 0.052872928619384765, 0.05299433517456055, 0.053083423614501954, 0.05287305450439453, 0.05295711898803711, 0.05305440139770508, 0.052795391082763675, 0.05300806427001953, 0.05299846267700195, 0.052864959716796875, 0.053055553436279296, 0.05395609664916992, 0.05293247985839844, 0.0523823356628418, 0.05233452987670899, 0.052340576171875, 0.052246753692626956, 0.05236326217651367, 0.05270044708251953, 0.0523455696105957, 0.05230713653564453, 0.05247264099121094, 0.052448833465576175, 0.05247545623779297, 0.05229657745361328, 0.05242995071411133, 0.05262150573730469, 0.05254217529296875, 0.05261836624145508, 0.05268975830078125, 0.05268191909790039, 0.05277779388427734, 0.052539390563964845, 0.05254553604125976, 0.05244211196899414, 0.052437152862548825, 0.052464481353759765, 0.05261056137084961, 0.0525296630859375, 0.052520286560058596, 0.052484638214111326, 0.05250060653686523, 0.052572158813476565, 0.052529151916503904, 0.05252096176147461, 0.052664321899414064, 0.052727550506591794, 0.05271088027954102, 0.05265488052368164, 0.0529194221496582, 0.052865921020507814, 0.052819969177246094, 0.05278441619873047, 0.052841182708740234, 0.05274176025390625, 0.05284441757202148, 0.05270534515380859, 0.05269241714477539, 0.05265510559082031, 0.05277667236328125, 0.05263999938964844, 0.05268278503417969, 0.052612895965576174, 0.05276185607910156, 0.05277151870727539, 0.052830177307128905, 0.05278752136230469, 0.05286297607421875, 0.05281382369995117, 0.05295513534545898, 0.05294694519042969, 0.0528955192565918, 0.052764896392822266, 0.05296537780761719, 0.05419779205322266, 0.052918590545654294, 0.0522446403503418, 0.052334590911865236, 0.05253529739379883, 0.05231340789794922, 0.05240492630004883, 0.05244623947143555, 0.05240630340576172, 0.05240518569946289, 0.05239603042602539, 0.05246361541748047, 0.052440513610839845, 0.05250822448730469, 0.05249126434326172, 0.0525926399230957, 0.05248409652709961, 0.05307913589477539, 0.05283513641357422, 0.05313337707519531, 0.052717601776123044, 0.05270032119750977, 0.05265903854370117, 0.05271868896484375, 0.05259151840209961, 0.052496383666992184, 0.05260697555541992, 0.052552928924560545, 0.0525975341796875, 0.052724769592285156, 0.052531326293945316, 0.05254025650024414, 0.05265203094482422, 0.05248614501953125, 0.05260697555541992, 0.052759777069091796, 0.05284124755859375, 0.05276803207397461, 0.05277155303955078, 0.052789249420166016, 0.0527657585144043, 0.05288032150268555, 0.05287116622924805, 0.05271347045898438, 0.052819969177246094, 0.052671550750732425, 0.0525997428894043, 0.05265919876098633, 0.05292544174194336, 0.05282406234741211, 0.05318656158447266, 0.052736000061035154, 0.0531861457824707, 0.0529285774230957, 0.05290758514404297, 0.05279004669189453, 0.052923934936523434, 0.052894176483154295, 0.052944896697998046, 0.05298495864868164, 0.05323660659790039, 0.05311008071899414, 0.05303366470336914, 0.053989376068115234, 0.052838401794433595, 0.05245337677001953, 0.05245289611816406, 0.05256854248046875, 0.05236940765380859, 0.05248556900024414, 0.052507198333740235, 0.05233868789672851, 0.05262540817260742, 0.05238579177856445, 0.0529879035949707, 0.0524920654296875, 0.052690750122070314, 0.05237936019897461, 0.052416351318359374, 0.05238460922241211, 0.053585918426513675, 0.05269852828979492, 0.05273392105102539, 0.05266495895385742, 0.05266409683227539, 0.05272582244873047, 0.05249244689941406, 0.05247590255737305, 0.05255347061157226, 0.05256627273559571, 0.05259468841552734, 0.05265151977539063, 0.052635936737060546, 0.05346518325805664, 0.05295935821533203, 0.052744190216064454, 0.0526561279296875, 0.052674560546875, 0.052523006439208986, 0.052654079437255856, 0.0526778564453125, 0.05265817642211914, 0.05279619216918945, 0.05286297607421875, 0.052727550506591794, 0.05274035263061523, 0.05266636657714844, 0.052854400634765625, 0.05301692962646484, 0.05270233535766602, 0.05263561630249024, 0.05270214462280273, 0.05267251205444336, 0.052604927062988284, 0.05272908782958984, 0.05275315093994141, 0.05285638427734375, 0.05269343948364258, 0.052664321899414064, 0.05269708633422852, 0.053008384704589843, 0.05287321472167969, 0.05298483276367188, 0.05287212753295899, 0.052803489685058595, 0.052901664733886716, 0.05428009414672851, 0.052928607940673826, 0.05240627288818359, 0.05242265701293945, 0.052217857360839844, 0.052182144165039065, 0.05221260833740234, 0.05229568099975586, 0.052360481262207034, 0.052345024108886716, 0.052443679809570314, 0.052415679931640625, 0.05252703857421875, 0.05262195205688477, 0.05259494400024414, 0.0525384635925293, 0.05242950439453125, 0.05247721481323242, 0.05249529647827148, 0.05274524688720703, 0.052617313385009766, 0.05273689651489258, 0.052621311187744144, 0.05250643157958984, 0.05257849502563477, 0.05261888122558594, 0.05256780624389648, 0.05249497604370117, 0.05257011032104492, 0.05262684631347656, 0.05272550582885742, 0.052587360382080076, 0.052676063537597656, 0.05271327972412109, 0.05259452819824219, 0.05266032028198242, 0.052575008392333984, 0.052776958465576174, 0.052843616485595706, 0.052585376739501956, 0.05284864044189453, 0.05289299011230469, 0.05286064147949219, 0.052656223297119144, 0.052935264587402345, 0.05258268737792969, 0.05260396957397461, 0.052620223999023434, 0.05255987167358398, 0.05271347045898438, 0.052686847686767575, 0.052829952239990235, 0.052716896057128905, 0.05270166397094726, 0.05284294509887695, 0.05260403060913086, 0.052792160034179685, 0.05285833740234375, 0.053077983856201175, 0.05275247955322265, 0.052902111053466795, 0.05298204803466797, 0.05283404922485352]",tokens/s,18.983684961901268,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,891.33056,6230.50752,0.0,5827.985408,5712.718848,s,1,7.233669921875,7.233669921875,0.0,7.233669921875,7.233669921875,7.233669921875,7.233669921875,[7.233669921875],,kWh,6.562171000041416e-06,7.162912528881505e-07,2.2836129379977255e-06,9.562075190927292e-06,,MB,1310.224384,6440.22272,0.0,6025.117696,5988.31104,s,10,0.6042631340026855,0.06042631340026855,0.0011322622670656955,0.06057470512390137,0.06188753166198731,0.06190342922210693,0.061916147270202636,"[0.061919326782226565, 0.05931193542480469, 0.06079171371459961, 0.06188399887084961, 0.05890342330932617, 0.06035769653320312, 0.06165731048583984, 0.060909984588623046, 0.058866912841796876, 0.05966083145141601]",tokens/s,4236.5649266774935,kWh,2.0622147117075055e-06,2.274247157577636e-07,1.36871588370426e-06,3.6583553111695288e-06,tokens/kWh,69976800.56346416,MB,1342.849024,6524.1088,0.0,6109.003776,6092.144128,s,10,18.22747326660156,1.8227473266601564,0.004333706049756072,1.8225454711914062,1.8280695922851562,1.828096563720703,1.8281181408691405,"[1.8211199951171875, 1.818463134765625, 1.823970947265625, 1.8278548583984375, 1.8189525146484375, 1.8150384521484375, 1.824962158203125, 1.82812353515625, 1.8280635986328124, 1.820924072265625]",tokens/s,34.563210752548855,kWh,5.2837018581626e-05,5.827104082985441e-06,3.498917431369561e-05,9.365329697830706e-05,tokens/kWh,672693.8829990439,,s,630,18.22422921180724,0.028927347955249608,0.0006782341227295325,0.028856143951416016,0.02907007350921631,0.029497062110900873,0.03303063617706299,"[0.03130979156494141, 0.02997056007385254, 0.02922700881958008, 0.028694528579711914, 0.028628511428833006, 0.02865760040283203, 0.02856105613708496, 0.028558208465576173, 0.028919807434082033, 0.02876006317138672, 0.028807167053222657, 0.028753919601440428, 0.028669952392578125, 0.028806751251220702, 0.02868627166748047, 0.028844512939453126, 0.028851551055908205, 0.0289532470703125, 0.028874496459960937, 0.028757600784301757, 0.028676767349243164, 0.028659711837768553, 0.028816736221313477, 0.02877507209777832, 0.02895030403137207, 0.028850400924682617, 0.028796672821044922, 0.028813535690307618, 0.028829727172851562, 0.028827327728271485, 0.028856639862060548, 0.0288602237701416, 0.0289814395904541, 0.028868608474731446, 0.02913689613342285, 0.029089279174804687, 0.02895078468322754, 0.028962528228759766, 0.029147680282592774, 0.028921407699584963, 0.028818880081176758, 0.028873311996459962, 0.028767776489257813, 0.028894079208374022, 0.02888003158569336, 0.029079456329345704, 0.02886956787109375, 0.028822975158691408, 0.028883264541625975, 0.02893622398376465, 0.02896713638305664, 0.028855615615844727, 0.02879136085510254, 0.028731264114379883, 0.028782848358154298, 0.028804895401000976, 0.0287926082611084, 0.0288853759765625, 0.028876096725463866, 0.028866912841796873, 0.028862016677856445, 0.02888175964355469, 0.028868608474731446, 0.03312883377075195, 0.03092086410522461, 0.029757631301879882, 0.028945472717285155, 0.028492511749267577, 0.028649696350097655, 0.028693504333496093, 0.02851091194152832, 0.02815007972717285, 0.02815590476989746, 0.028469247817993162, 0.02876416015625, 0.028657760620117188, 0.02832784080505371, 0.0281661434173584, 0.028325887680053712, 0.028495264053344727, 0.028578399658203125, 0.02843231964111328, 0.02829484748840332, 0.028384735107421875, 0.028550239562988283, 0.02847724723815918, 0.028354272842407227, 0.028254592895507812, 0.028595903396606445, 0.028601823806762697, 0.02843724822998047, 0.028516351699829103, 0.028663007736206055, 0.02893401527404785, 0.028924192428588867, 0.028932735443115233, 0.028874752044677734, 0.028821504592895508, 0.0289300479888916, 0.02911027145385742, 0.02913260841369629, 0.02908297538757324, 0.029053152084350584, 0.028961408615112306, 0.028901376724243165, 0.028890304565429688, 0.028878847122192384, 0.02917398452758789, 0.029010528564453124, 0.02882697677612305, 0.02892406463623047, 0.028779008865356445, 0.02895257568359375, 0.02890547180175781, 0.028925952911376954, 0.028839935302734376, 0.02904640007019043, 0.029028736114501952, 0.028917728424072267, 0.02893379211425781, 0.028957056045532226, 0.028846080780029298, 0.02892185592651367, 0.02893414306640625, 0.029042688369750977, 0.028966880798339843, 0.033396736145019534, 0.030953216552734374, 0.029540607452392578, 0.028993535995483398, 0.028821504592895508, 0.02860851287841797, 0.028590080261230468, 0.0295731201171875, 0.028655616760253907, 0.028469247817993162, 0.028361888885498048, 0.028445375442504882, 0.028415199279785155, 0.028593088150024416, 0.028823040008544923, 0.028946943283081054, 0.0287825927734375, 0.02867558479309082, 0.028590591430664062, 0.02875187110900879, 0.028862464904785157, 0.02877225685119629, 0.028817792892456055, 0.028692031860351564, 0.028698848724365233, 0.02883884811401367, 0.028627935409545897, 0.02889081573486328, 0.02876860809326172, 0.028719104766845704, 0.02879897689819336, 0.028782047271728516, 0.028766752243041992, 0.028692480087280273, 0.028944320678710937, 0.029040447235107424, 0.029034591674804686, 0.028962528228759766, 0.028842432022094727, 0.028962528228759766, 0.028809503555297853, 0.02896406364440918, 0.028875551223754882, 0.028765663146972657, 0.02883772850036621, 0.028879552841186523, 0.02883168029785156, 0.02878060722351074, 0.028894527435302735, 0.029016767501831055, 0.0289234561920166, 0.02883628845214844, 0.02894156837463379, 0.028926591873168945, 0.02890755271911621, 0.029048927307128908, 0.028912895202636717, 0.028954559326171875, 0.029006656646728517, 0.028948352813720702, 0.029023391723632812, 0.02902729606628418, 0.028996864318847657, 0.03385366439819336, 0.03116851234436035, 0.02974028778076172, 0.029133567810058592, 0.028821504592895508, 0.028516159057617188, 0.0286680965423584, 0.028769760131835936, 0.02864387130737305, 0.028686336517333984, 0.02876416015625, 0.028657663345336915, 0.028643552780151366, 0.028837503433227538, 0.02872319984436035, 0.028788896560668947, 0.028680192947387696, 0.02851785659790039, 0.028727840423583985, 0.028800575256347657, 0.02869481658935547, 0.028790912628173827, 0.029163551330566407, 0.02879635238647461, 0.028651456832885742, 0.02886911964416504, 0.02872457695007324, 0.028938623428344728, 0.028908992767333986, 0.028983999252319335, 0.028855808258056642, 0.028834592819213866, 0.02867977523803711, 0.028939775466918945, 0.028768415451049804, 0.02900864028930664, 0.028980287551879882, 0.029031360626220703, 0.029083648681640626, 0.028964864730834962, 0.028862464904785157, 0.02904617691040039, 0.029008480072021486, 0.028821504592895508, 0.028911359786987306, 0.02885219192504883, 0.028946207046508788, 0.02889132881164551, 0.02896847915649414, 0.028896032333374025, 0.028827487945556642, 0.028995744705200194, 0.02877235221862793, 0.02890547180175781, 0.028728927612304687, 0.0288505916595459, 0.02886396789550781, 0.029966880798339843, 0.030109535217285155, 0.028878303527832033, 0.02892393684387207, 0.028912288665771484, 0.028823360443115235, 0.03373049545288086, 0.031203359603881837, 0.029601791381835937, 0.028870399475097657, 0.028497888565063478, 0.028545312881469727, 0.028712671279907228, 0.02869478416442871, 0.028411231994628906, 0.02820310401916504, 0.028160608291625977, 0.028481056213378906, 0.02871753692626953, 0.028689952850341798, 0.02850217628479004, 0.028692800521850585, 0.028825599670410155, 0.028605663299560546, 0.028640031814575195, 0.02869808006286621, 0.028717023849487305, 0.02856812858581543, 0.028744863510131835, 0.028709728240966795, 0.028436384201049804, 0.028473440170288085, 0.02850201606750488, 0.02890131187438965, 0.02881952095031738, 0.02874313545227051, 0.028735584259033203, 0.02858812713623047, 0.028724735260009765, 0.02888175964355469, 0.028778495788574218, 0.02897849655151367, 0.028845888137817383, 0.028905344009399415, 0.028847103118896485, 0.028925888061523436, 0.028898591995239257, 0.028840736389160158, 0.028802560806274413, 0.028684799194335937, 0.02876963233947754, 0.02893257522583008, 0.02882784080505371, 0.028804319381713867, 0.028723455429077147, 0.02887286376953125, 0.02872563171386719, 0.028956672668457032, 0.02873958396911621, 0.02891484832763672, 0.02882441520690918, 0.029003103256225585, 0.028921760559082032, 0.028860448837280273, 0.028863040924072266, 0.02885343933105469, 0.02886502456665039, 0.02881113624572754, 0.02887126350402832, 0.032997920989990236, 0.03088739204406738, 0.029572832107543946, 0.028871456146240235, 0.028553216934204102, 0.02828463935852051, 0.0281147518157959, 0.02823139190673828, 0.028617376327514647, 0.02874991989135742, 0.02857369613647461, 0.028337791442871095, 0.028168512344360352, 0.028280767440795898, 0.028480640411376955, 0.02867737579345703, 0.02858367919921875, 0.02839481544494629, 0.02832896041870117, 0.028253408432006837, 0.02832598304748535, 0.028551103591918946, 0.02855571174621582, 0.028383232116699218, 0.028219648361206055, 0.028434175491333008, 0.028594175338745118, 0.028516351699829103, 0.02838118362426758, 0.02832774353027344, 0.028283327102661134, 0.028517951965332033, 0.02857721519470215, 0.028555328369140626, 0.028664512634277342, 0.029251232147216796, 0.029196640014648438, 0.029154943466186523, 0.02906937599182129, 0.028761823654174803, 0.028949087142944335, 0.02895257568359375, 0.02905721664428711, 0.028915519714355468, 0.028935264587402344, 0.028913759231567384, 0.028777280807495118, 0.028948480606079102, 0.028999744415283205, 0.028936128616333007, 0.02895795249938965, 0.02881612777709961, 0.028910591125488282, 0.02885478401184082, 0.02901375961303711, 0.028928768157958983, 0.028907520294189453, 0.028932096481323243, 0.028893184661865235, 0.029033632278442384, 0.028990304946899415, 0.02894438362121582, 0.028861888885498045, 0.03286166381835937, 0.03071232032775879, 0.029261823654174804, 0.028841983795166014, 0.028461055755615236, 0.028588031768798827, 0.0287457275390625, 0.028652896881103517, 0.02841801643371582, 0.028450624465942383, 0.02843712043762207, 0.0285347843170166, 0.028689760208129883, 0.02890025520324707, 0.028720224380493164, 0.02871980857849121, 0.028692703247070312, 0.02855936050415039, 0.028882944107055664, 0.028833152770996094, 0.02887539291381836, 0.02887654495239258, 0.02874166488647461, 0.028856544494628905, 0.02880102348327637, 0.02898454475402832, 0.028910367965698243, 0.028757024765014648, 0.028854528427124024, 0.028787424087524414, 0.028940288543701172, 0.02889727973937988, 0.02893619155883789, 0.02891366386413574, 0.029037696838378906, 0.029037439346313476, 0.028911615371704103, 0.02905292892456055, 0.029114368438720704, 0.029048831939697265, 0.028882944107055664, 0.02895257568359375, 0.028956672668457032, 0.028862464904785157, 0.028867967605590822, 0.028934783935546875, 0.029038528442382812, 0.02892959976196289, 0.028900928497314453, 0.02904742431640625, 0.028931680679321288, 0.029004512786865236, 0.02899740791320801, 0.028905696868896484, 0.028980415344238283, 0.029145919799804687, 0.029001216888427734, 0.028912128448486327, 0.02900716781616211, 0.028981952667236327, 0.028944063186645507, 0.029032768249511717, 0.029038591384887694, 0.03304399871826172, 0.031019872665405273, 0.02956287956237793, 0.02905904006958008, 0.028715040206909178, 0.028612607955932616, 0.02839344024658203, 0.028329280853271483, 0.028254751205444337, 0.028458240509033204, 0.028575712203979493, 0.028742624282836915, 0.028528640747070313, 0.028807167053222657, 0.028856319427490236, 0.02877644729614258, 0.028941471099853514, 0.028791648864746094, 0.028819456100463867, 0.0287042236328125, 0.028932640075683594, 0.02875596809387207, 0.028931135177612304, 0.028721824645996093, 0.028727392196655273, 0.02873948860168457, 0.028725536346435546, 0.028991487503051756, 0.030219871520996092, 0.028794784545898438, 0.02887641525268555, 0.02884288024902344, 0.028880895614624022, 0.029042720794677734, 0.028975072860717772, 0.02913804817199707, 0.029365119934082032, 0.029443840026855468, 0.02931020736694336, 0.029199424743652343, 0.028934080123901366, 0.029087743759155273, 0.028961887359619142, 0.028732032775878907, 0.02888118362426758, 0.028915712356567383, 0.02889423942565918, 0.028742048263549806, 0.02896908760070801, 0.028895679473876952, 0.02906835174560547, 0.02904153633117676, 0.028959936141967773, 0.029076351165771484, 0.029022207260131837, 0.029006912231445314, 0.028931072235107422, 0.02903647994995117, 0.02901798439025879, 0.02891788864135742, 0.028999103546142577, 0.029067840576171875, 0.028935840606689453, 0.03422793579101562, 0.03133881568908691, 0.02992278480529785, 0.02923161506652832, 0.028854272842407228, 0.028725248336791992, 0.028559167861938475, 0.028485824584960937, 0.02870681571960449, 0.02875372886657715, 0.02865580749511719, 0.028661760330200195, 0.028817407608032225, 0.02874163246154785, 0.028846080780029298, 0.0287825927734375, 0.028692480087280273, 0.028806495666503906, 0.028699296951293946, 0.02887868881225586, 0.028770463943481445, 0.028909568786621095, 0.028878847122192384, 0.02879692840576172, 0.028753759384155274, 0.028874656677246095, 0.028774656295776368, 0.02874777603149414, 0.028985343933105468, 0.028855968475341796, 0.02876041603088379, 0.02871641540527344, 0.028789375305175783, 0.028856319427490236, 0.02899504089355469, 0.028993888854980467, 0.0290993595123291, 0.029151424407958985, 0.029034975051879883, 0.02889257621765137, 0.029031200408935545, 0.029023935317993164, 0.02893212890625, 0.02887654495239258, 0.028846303939819337, 0.0289465274810791, 0.02897273635864258, 0.028897823333740233, 0.028915712356567383, 0.029042688369750977, 0.02896895980834961, 0.028894847869873046, 0.028932031631469728, 0.029012416839599608, 0.029128576278686525, 0.02894428825378418, 0.028814687728881835, 0.028904319763183594, 0.028819456100463867, 0.028994815826416016, 0.028939008712768555, 0.028962175369262697, 0.02893187141418457, 0.0344409294128418, 0.032059646606445315, 0.02977359962463379, 0.029014240264892577, 0.028685888290405272, 0.02851433563232422, 0.02879120063781738, 0.028778495788574218, 0.028481536865234375, 0.028641279220581056, 0.028667903900146483, 0.028661727905273438, 0.028517631530761717, 0.0287127685546875, 0.02880303955078125, 0.02854550361633301, 0.02840835189819336, 0.028425472259521484, 0.028356447219848632, 0.028332223892211916, 0.028230367660522462, 0.028391424179077147, 0.02864508819580078, 0.028627231597900392, 0.028411903381347657, 0.028452863693237306, 0.02854867172241211, 0.02884614372253418, 0.028856704711914063, 0.028757280349731445, 0.028647327423095705, 0.02853356742858887, 0.028448768615722656, 0.028708864212036132, 0.029124607086181642, 0.029165023803710936, 0.029003551483154297, 0.028951263427734374, 0.028989471435546876, 0.02895871925354004, 0.028889087677001952, 0.028802431106567383, 0.0287521915435791, 0.028772512435913087, 0.028842144012451172, 0.02876416015625, 0.028903423309326173, 0.02879692840576172, 0.02886150360107422, 0.02876076889038086, 0.028987648010253907, 0.028858367919921874, 0.028809215545654295, 0.02875775909423828, 0.02893552017211914, 0.028842912673950196, 0.028925952911376954, 0.028891136169433593, 0.02886150360107422, 0.02886751937866211, 0.02890547180175781, 0.02895580863952637, 0.029002304077148437]",tokens/s,34.56936327336307,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,891.101184,6182.273024,0.0,5779.750912,5773.960192,s,1,7.059513671875,7.059513671875,0.0,7.059513671875,7.059513671875,7.059513671875,7.059513671875,[7.059513671875],,kWh,5.918845945840682e-06,6.379662212087591e-07,2.109168353997548e-06,8.66598052104699e-06,,MB,1234.259968,6498.942976,0.0,6085.935104,6038.345728,s,10,2.1504409484863283,0.21504409484863282,0.009960307513119647,0.21905220794677735,0.2213324249267578,0.2220226272583008,0.22257478912353518,"[0.1867261047363281, 0.21322866821289063, 0.21253321838378905, 0.22044931030273437, 0.22117904663085938, 0.2184662780761719, 0.22271282958984376, 0.2196381378173828, 0.21582952880859374, 0.21967782592773438]",tokens/s,1190.4535215449446,kWh,5.695962256250088e-06,6.280494393691151e-07,3.7735767368078014e-06,1.0097588432427005e-05,tokens/kWh,25352588.0672549,MB,1239.71584,6519.914496,0.0,6106.906624,6086.544896,s,10,17.959033935546874,1.7959033935546873,0.0017538222568509985,1.7953265380859373,1.7978223754882814,1.798744073486328,1.7994814318847656,"[1.793896728515625, 1.7936160888671875, 1.79518408203125, 1.7976175537109376, 1.795468994140625, 1.799665771484375, 1.794830078125, 1.7951627197265625, 1.7963450927734375, 1.797246826171875]",tokens/s,35.0798379390008,kWh,5.304545940166292e-05,5.850842006211375e-06,3.514887106779211e-05,9.404517247566642e-05,tokens/kWh,669890.8443843924,,s,630,17.95668284606935,0.02850267118423704,0.0002760017601332727,0.028449007987976074,0.028654674720764158,0.028775333309173583,0.03012207580566406,"[0.029691871643066407, 0.028944416046142576, 0.028491775512695314, 0.028288320541381837, 0.028244672775268556, 0.028282880783081055, 0.028175807952880858, 0.028307680130004884, 0.028377439498901365, 0.028802528381347656, 0.02859062385559082, 0.02833350372314453, 0.028213823318481445, 0.028226816177368164, 0.028242687225341796, 0.0282457275390625, 0.028298879623413088, 0.02834089660644531, 0.028340192794799806, 0.028436511993408204, 0.028464448928833007, 0.029037248611450194, 0.0282926082611084, 0.028299135208129884, 0.028394048690795898, 0.028395584106445312, 0.028379135131835938, 0.028387327194213868, 0.028382816314697266, 0.028348352432250975, 0.028338655471801758, 0.02864240074157715, 0.0285316162109375, 0.028423488616943358, 0.02855766487121582, 0.028532991409301756, 0.028566911697387697, 0.02853321647644043, 0.028469343185424805, 0.028510143280029297, 0.028364704132080077, 0.02838764762878418, 0.028354528427124024, 0.028405792236328126, 0.02855526351928711, 0.029220767974853516, 0.028704864501953125, 0.028403360366821288, 0.028419647216796875, 0.028416799545288085, 0.028474624633789063, 0.028502784729003906, 0.028440288543701172, 0.028485919952392576, 0.028505632400512695, 0.02842412757873535, 0.02862272071838379, 0.028454944610595702, 0.028344959259033204, 0.02839756774902344, 0.02850201606750488, 0.028538047790527345, 0.028377920150756835, 0.03006732749938965, 0.029239295959472656, 0.02874297523498535, 0.02832863998413086, 0.02828646469116211, 0.028264192581176757, 0.028272735595703126, 0.028390047073364257, 0.02837708854675293, 0.02834432029724121, 0.028286975860595705, 0.028227584838867188, 0.028301311492919923, 0.0282391357421875, 0.028193504333496093, 0.028270336151123048, 0.02828227233886719, 0.028253023147583007, 0.028250112533569335, 0.02826576042175293, 0.028328672409057617, 0.028366847991943358, 0.028453952789306642, 0.028312511444091797, 0.028396768569946287, 0.02858678436279297, 0.02828044891357422, 0.02827916717529297, 0.02839481544494629, 0.028387903213500976, 0.028358655929565428, 0.02833420753479004, 0.028456960678100586, 0.028493152618408204, 0.028619232177734374, 0.028673343658447266, 0.02845955276489258, 0.028617055892944335, 0.02850105667114258, 0.028588991165161132, 0.028479072570800783, 0.028504255294799805, 0.02844623947143555, 0.028398271560668945, 0.02844611167907715, 0.028501855850219728, 0.028448543548583984, 0.02846614456176758, 0.028585216522216798, 0.028463872909545898, 0.028508159637451173, 0.028530464172363282, 0.028448991775512696, 0.028471296310424804, 0.028534528732299804, 0.02847974395751953, 0.028594175338745118, 0.028495872497558594, 0.02860851287841797, 0.028473344802856446, 0.028657503128051758, 0.02852060890197754, 0.028546367645263672, 0.030122976303100586, 0.02917513656616211, 0.028735103607177733, 0.028435455322265626, 0.028379135131835938, 0.028334079742431642, 0.02821855926513672, 0.02827961540222168, 0.028370943069458008, 0.028342271804809572, 0.02833568000793457, 0.0283789119720459, 0.028439199447631836, 0.028309440612792967, 0.028380704879760743, 0.028330528259277343, 0.028339839935302733, 0.028368928909301757, 0.02832419204711914, 0.02835456085205078, 0.02838937568664551, 0.028358655929565428, 0.028438528060913085, 0.028390399932861327, 0.028431360244750976, 0.02837299156188965, 0.028337440490722655, 0.028357343673706056, 0.028374208450317382, 0.028377920150756835, 0.02837299156188965, 0.0283790397644043, 0.028394687652587892, 0.028570528030395507, 0.02851430320739746, 0.028621856689453124, 0.02856585693359375, 0.02863484764099121, 0.02855743980407715, 0.02852534484863281, 0.02855526351928711, 0.02848739242553711, 0.02838492774963379, 0.028400192260742186, 0.028518463134765627, 0.028536832809448243, 0.0284751033782959, 0.028638975143432617, 0.028698879241943358, 0.028539167404174805, 0.028606464385986328, 0.02850796890258789, 0.028508352279663085, 0.028473344802856446, 0.028498943328857423, 0.028627967834472655, 0.028571264266967773, 0.028501728057861327, 0.028539007186889648, 0.028493471145629883, 0.02862505531311035, 0.02844540786743164, 0.028405088424682617, 0.030119871139526366, 0.029222816467285157, 0.0286844482421875, 0.02845280075073242, 0.028415103912353516, 0.028353471755981446, 0.028305408477783203, 0.028431936264038084, 0.02839187240600586, 0.028386688232421874, 0.028391616821289063, 0.028345983505249025, 0.02857766342163086, 0.028330591201782225, 0.028335744857788087, 0.028344575881958007, 0.02828745651245117, 0.028380800247192382, 0.028320127487182618, 0.02841926383972168, 0.02834105682373047, 0.028333311080932618, 0.028531455993652345, 0.028434368133544923, 0.028470848083496095, 0.028419584274291993, 0.028400640487670898, 0.02835625648498535, 0.028406112670898438, 0.02840278434753418, 0.028455839157104493, 0.028459007263183594, 0.02847862434387207, 0.02875379180908203, 0.028817472457885743, 0.028653631210327147, 0.02863395118713379, 0.028497119903564454, 0.0285765438079834, 0.028607776641845703, 0.02852118492126465, 0.028550880432128906, 0.02846748733520508, 0.028432384490966797, 0.028391424179077147, 0.028421152114868165, 0.028525535583496093, 0.0286167049407959, 0.02857574462890625, 0.028618719100952147, 0.028524511337280272, 0.028555328369140626, 0.028528640747070313, 0.028571584701538085, 0.028610624313354493, 0.02859552001953125, 0.02863920021057129, 0.02858982467651367, 0.02859926414489746, 0.028598079681396483, 0.028559167861938475, 0.028719488143920897, 0.028631040573120117, 0.030119136810302736, 0.029244192123413087, 0.028731231689453126, 0.028516511917114257, 0.028407808303833007, 0.0283504638671875, 0.02830745506286621, 0.028339231491088867, 0.02833024024963379, 0.028365535736083983, 0.028425344467163084, 0.028326784133911133, 0.028339807510375976, 0.028293535232543944, 0.028266496658325195, 0.02836275291442871, 0.02839091110229492, 0.028376703262329103, 0.028329856872558595, 0.02842428779602051, 0.02833091163635254, 0.028360671997070312, 0.028336160659790038, 0.02836479949951172, 0.02841766357421875, 0.028399999618530274, 0.028384416580200196, 0.028432992935180663, 0.028440576553344726, 0.028449024200439453, 0.02838528060913086, 0.028438528060913085, 0.02840166473388672, 0.028521760940551758, 0.028559328079223633, 0.02865184020996094, 0.02877609634399414, 0.0285949764251709, 0.028614656448364258, 0.028648479461669922, 0.028332319259643555, 0.028523199081420897, 0.028507200241088868, 0.028453535079956054, 0.02843267250061035, 0.02852979278564453, 0.028489984512329102, 0.02854140853881836, 0.028499807357788086, 0.02843065643310547, 0.028610559463500978, 0.028526464462280274, 0.028509952545166015, 0.02844710350036621, 0.028499839782714843, 0.028548799514770507, 0.028442623138427735, 0.028494272232055664, 0.028520448684692383, 0.02852454376220703, 0.028542015075683595, 0.028555456161499022, 0.028516767501831054, 0.03014441680908203, 0.029197471618652344, 0.028654367446899413, 0.02846281623840332, 0.02834886360168457, 0.028230911254882814, 0.028190528869628906, 0.028570335388183595, 0.028408000946044922, 0.02842560005187988, 0.028441247940063478, 0.02838096046447754, 0.02834454345703125, 0.02834022331237793, 0.02844825553894043, 0.02831171226501465, 0.028264799118041993, 0.02830335998535156, 0.02827644729614258, 0.028649696350097655, 0.028407871246337892, 0.028405439376831054, 0.028331520080566407, 0.028334911346435548, 0.028395519256591797, 0.02834432029724121, 0.028309503555297853, 0.02835456085205078, 0.02841983985900879, 0.028422208786010743, 0.02899168014526367, 0.02941542434692383, 0.02852454376220703, 0.028601919174194336, 0.028584384918212892, 0.028667903900146483, 0.028706560134887694, 0.02867180824279785, 0.028461183547973633, 0.028501504898071288, 0.02846531105041504, 0.02840959930419922, 0.02852547264099121, 0.030291967391967774, 0.028765472412109375, 0.028580575942993163, 0.02862076759338379, 0.028644384384155272, 0.028543296813964843, 0.028518463134765627, 0.028496511459350587, 0.028508159637451173, 0.0284932804107666, 0.02852681541442871, 0.028573055267333985, 0.028477951049804686, 0.02862291145324707, 0.028595840454101563, 0.028491872787475586, 0.028432767868041994, 0.028520736694335937, 0.02854297637939453, 0.0285118408203125, 0.030223424911499024, 0.029143999099731446, 0.028718591690063477, 0.028392959594726562, 0.028435455322265626, 0.028329984664916992, 0.028223487854003908, 0.028358432769775392, 0.0283275203704834, 0.028312192916870118, 0.028271808624267578, 0.02831612777709961, 0.028300928115844726, 0.028326623916625975, 0.028358655929565428, 0.028334079742431642, 0.02835369682312012, 0.028378976821899413, 0.028378080368041993, 0.028354591369628906, 0.02840166473388672, 0.028325887680053712, 0.02836627197265625, 0.028293472290039062, 0.028438112258911134, 0.028390016555786133, 0.02835990333557129, 0.028375295639038085, 0.02839401626586914, 0.02831715202331543, 0.028381343841552734, 0.028391807556152344, 0.028395519256591797, 0.028591712951660155, 0.028574111938476563, 0.028694528579711914, 0.028657440185546876, 0.028509632110595703, 0.028594207763671876, 0.028433151245117187, 0.028491584777832032, 0.02849510383605957, 0.028368831634521485, 0.028820480346679687, 0.028605663299560546, 0.028614591598510743, 0.028496736526489257, 0.02838684844970703, 0.028457151412963868, 0.028501663208007812, 0.028518655776977538, 0.028694431304931642, 0.028463584899902344, 0.02870204734802246, 0.028516288757324218, 0.028506528854370116, 0.028418367385864257, 0.028545024871826172, 0.028495840072631836, 0.028368928909301757, 0.02851430320739746, 0.028515520095825194, 0.028379423141479492, 0.030144512176513674, 0.0293253116607666, 0.02877440071105957, 0.028475391387939454, 0.02838937568664551, 0.028243967056274414, 0.028307136535644532, 0.02829539108276367, 0.02833008003234863, 0.028335487365722656, 0.02829580879211426, 0.028378591537475586, 0.028340768814086915, 0.02836479949951172, 0.028379072189331056, 0.028344383239746095, 0.02829516792297363, 0.028323583602905274, 0.028336320877075195, 0.028267999649047852, 0.02834217643737793, 0.02831430435180664, 0.02838118362426758, 0.028313119888305663, 0.02840547180175781, 0.02838809585571289, 0.028383232116699218, 0.02834841537475586, 0.028378496170043944, 0.028375680923461915, 0.02838118362426758, 0.02852012825012207, 0.02855311965942383, 0.02861916732788086, 0.028595840454101563, 0.02872563171386719, 0.028659711837768553, 0.028674047470092775, 0.02851840019226074, 0.02858304023742676, 0.02839193534851074, 0.028410175323486327, 0.028362335205078124, 0.02837881660461426, 0.028355039596557618, 0.028678047180175782, 0.028477855682373047, 0.02857164764404297, 0.028495616912841797, 0.02850432014465332, 0.028456640243530274, 0.028401439666748046, 0.028604896545410156, 0.02847648048400879, 0.02883612823486328, 0.028571744918823243, 0.0284466552734375, 0.02849452781677246, 0.028528640747070313, 0.0284421443939209, 0.02854550361633301, 0.028520448684692383, 0.02856345558166504, 0.030326271057128908, 0.029401632308959962, 0.02882761573791504, 0.028489599227905272, 0.02837673568725586, 0.028292959213256835, 0.028301952362060546, 0.028239456176757813, 0.028247615814208985, 0.028347232818603515, 0.028386783599853516, 0.028346656799316406, 0.028377344131469726, 0.028297216415405273, 0.028407808303833007, 0.028299264907836914, 0.028284927368164063, 0.028425504684448242, 0.02836499214172363, 0.02842825508117676, 0.028426015853881836, 0.028401695251464843, 0.028316255569458007, 0.028328096389770508, 0.028435871124267577, 0.028359264373779298, 0.028308479309082032, 0.02835660743713379, 0.028390399932861327, 0.028379135131835938, 0.02836479949951172, 0.02878054428100586, 0.028556991577148437, 0.028504383087158202, 0.028567007064819336, 0.028697120666503907, 0.028651519775390624, 0.028704767227172853, 0.02864918327331543, 0.02865385627746582, 0.02844460868835449, 0.028442176818847656, 0.028459455490112303, 0.0285020809173584, 0.028545024871826172, 0.028665824890136717, 0.028559392929077148, 0.028495872497558594, 0.028536319732666016, 0.02851433563232422, 0.028455392837524414, 0.02852659225463867, 0.02848476791381836, 0.028574560165405275, 0.02852249526977539, 0.02852659225463867, 0.02851840019226074, 0.0284498233795166, 0.028462047576904296, 0.02857779121398926, 0.028536159515380858, 0.028569343566894532, 0.028440799713134766, 0.030342079162597655, 0.029490591049194336, 0.02874019241333008, 0.028433759689331053, 0.02835456085205078, 0.028314271926879884, 0.02833955192565918, 0.02834883117675781, 0.02847769546508789, 0.028389312744140624, 0.02833964729309082, 0.028340320587158203, 0.02839401626586914, 0.02832918357849121, 0.02832259178161621, 0.028313600540161132, 0.028296863555908203, 0.02853059196472168, 0.02833990478515625, 0.02839219284057617, 0.02840985679626465, 0.028364639282226562, 0.028430496215820313, 0.02831974411010742, 0.02840768051147461, 0.02839347267150879, 0.028426368713378905, 0.028434431076049805, 0.028484928131103517, 0.028368608474731445, 0.028525535583496093, 0.028487680435180664, 0.02850598335266113, 0.02854710388183594, 0.02859017562866211, 0.028673919677734375, 0.028747743606567382, 0.028743839263916014, 0.028614656448364258, 0.028639232635498047, 0.028462688446044923, 0.028445087432861327, 0.02836275291442871, 0.02847884750366211, 0.02859449577331543, 0.028622495651245118, 0.028471967697143555, 0.028524511337280272, 0.028545055389404297, 0.028489503860473633, 0.028502143859863282, 0.028550399780273437, 0.02863190460205078, 0.02858598327636719, 0.028545024871826172, 0.028542751312255858, 0.02851043128967285, 0.028475391387939454, 0.028577472686767576, 0.028532608032226563, 0.02857209587097168, 0.028527967453002928, 0.028521024703979492]",tokens/s,35.08443098319271,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,888.6272,813.563904,0.0,411.041792,391.374848,s,1,7.434431640625,7.434431640625,0.0,7.434431640625,7.434431640625,7.434431640625,7.434431640625,[7.434431640625],,kWh,4.59192244584301e-06,4.991675271076693e-07,9.080562819868687e-07,5.999146254937548e-06,,MB,1331.154944,891.158528,0.0,473.956352,454.832128,s,14,0.18492969512939453,0.013209263937813895,0.00042342614826914633,0.013151984214782714,0.01344154577255249,0.013843049716949463,0.014420756855010985,"[0.012909536361694335, 0.013454208374023438, 0.013163680076599122, 0.013140288352966309, 0.01319484806060791, 0.013061663627624513, 0.01287615966796875, 0.012943807601928712, 0.013411999702453613, 0.013260704040527344, 0.013257247924804687, 0.012857215881347656, 0.012833151817321777, 0.014565183639526367]",tokens/s,19380.338011655134,kWh,3.7239050256692686e-07,4.106789813704789e-08,2.197117687398041e-07,6.331701694437788e-07,tokens/kWh,404314688.774565,MB,1370.43968,918.421504,0.0,501.219328,454.834688,s,14,10.329214599609374,0.737801042829241,0.010965657993545389,0.7356738891601562,0.7557709045410157,0.7607003814697265,0.7607912481689453,"[0.7341726684570312, 0.7444114990234375, 0.7404459838867188, 0.7388132934570313, 0.7342659301757812, 0.7313853149414062, 0.7383519287109375, 0.7370818481445313, 0.76081396484375, 0.7606392211914063, 0.7336757202148437, 0.7257778930664063, 0.7227520751953125, 0.7266272583007812]",tokens/s,85.3888736161368,kWh,2.0830976734934495e-05,2.2973009438155192e-06,8.093092500544703e-06,3.122137017929472e-05,tokens/kWh,2017848.6606516752,,s,882,10.321465736389168,0.011702342104749616,0.00027808413158231156,0.011612400054931641,0.012051110363006592,0.012110157203674315,0.0124839127922058,"[0.011292511940002442, 0.011499423980712891, 0.011542752265930175, 0.011489600181579589, 0.011478655815124512, 0.011446944236755371, 0.01207040023803711, 0.011597855567932129, 0.012155072212219238, 0.011593728065490723, 0.011544863700866699, 0.011691647529602051, 0.011529472351074219, 0.011649888038635254, 0.01145366382598877, 0.011457183837890624, 0.01139724826812744, 0.01140940761566162, 0.011407199859619141, 0.011421055793762206, 0.011478079795837402, 0.01149129581451416, 0.011507455825805663, 0.011431903839111328, 0.011414719581604003, 0.011613023757934571, 0.013220128059387206, 0.012351519584655761, 0.011664480209350585, 0.011663007736206054, 0.011624735832214355, 0.01163702392578125, 0.011570879936218262, 0.011632415771484375, 0.011577983856201172, 0.011562527656555176, 0.011469599723815917, 0.011435520172119141, 0.01142627239227295, 0.011436256408691406, 0.011474335670471191, 0.011688608169555664, 0.011480768203735352, 0.01148521614074707, 0.01153983974456787, 0.011522687911987305, 0.011476192474365234, 0.011510560035705567, 0.011491104125976563, 0.011560447692871094, 0.011504384040832519, 0.011618144035339355, 0.01173465633392334, 0.011698559761047363, 0.011857343673706054, 0.011842240333557129, 0.011905023574829102, 0.01184563159942627, 0.011862239837646484, 0.011861920356750488, 0.012173184394836426, 0.012089344024658203, 0.01204259204864502, 0.011657440185546875, 0.01310688018798828, 0.012635616302490235, 0.012101984024047852, 0.013027615547180176, 0.011786047935485839, 0.011804863929748536, 0.011884384155273438, 0.011820223808288574, 0.011785183906555176, 0.012052160263061523, 0.011575327873229981, 0.011594016075134278, 0.011519776344299317, 0.011524319648742676, 0.011476351737976074, 0.011678336143493652, 0.011537952423095703, 0.011455231666564942, 0.011395999908447266, 0.011625280380249023, 0.011438400268554687, 0.011575296401977539, 0.011470784187316894, 0.011627424240112304, 0.011570240020751953, 0.011636704444885253, 0.011577088356018067, 0.012046655654907227, 0.011740544319152831, 0.011712896347045898, 0.011646656036376954, 0.011708736419677734, 0.011880448341369629, 0.01171455955505371, 0.011871359825134277, 0.011668352127075196, 0.01177337646484375, 0.011858495712280273, 0.011904735565185547, 0.011915552139282226, 0.011831295967102052, 0.011909088134765626, 0.011919391632080079, 0.012013343811035157, 0.011865376472473144, 0.011923551559448242, 0.011892767906188964, 0.011756447792053223, 0.01170867156982422, 0.011818304061889648, 0.011748831748962402, 0.011784064292907714, 0.011750240325927735, 0.011749055862426758, 0.01184329605102539, 0.011784000396728516, 0.011800928115844727, 0.011842207908630371, 0.011841504096984864, 0.011934847831726074, 0.011832287788391113, 0.011911040306091308, 0.011841888427734376, 0.011927552223205566, 0.011838656425476074, 0.01201039981842041, 0.011855263710021973, 0.011928288459777833, 0.011874208450317383, 0.011868127822875977, 0.011836864471435548, 0.011799103736877441, 0.011705408096313477, 0.01186019229888916, 0.011837504386901855, 0.012079327583312989, 0.011742688179016114, 0.01171132755279541, 0.011634816169738769, 0.011589632034301758, 0.011576640129089355, 0.011547327995300293, 0.011687295913696289, 0.011747967720031739, 0.0117391357421875, 0.011575072288513184, 0.011614432334899902, 0.011667455673217773, 0.01153171157836914, 0.011553631782531739, 0.011603487968444823, 0.011654751777648926, 0.011655072212219238, 0.011614912033081055, 0.011604191780090332, 0.011517727851867676, 0.011485247611999512, 0.01150496006011963, 0.011568927764892578, 0.011575488090515137, 0.011514528274536133, 0.011521535873413086, 0.011463168144226075, 0.011900927543640137, 0.011692031860351563, 0.011772192001342774, 0.011749343872070312, 0.011762880325317382, 0.01195680046081543, 0.011873855590820312, 0.012071680068969727, 0.01183516788482666, 0.011774016380310058, 0.011804832458496093, 0.011804351806640625, 0.011812895774841308, 0.011866432189941406, 0.012051199913024902, 0.011992032051086426, 0.011959712028503417, 0.011810784339904784, 0.011747903823852539, 0.011692031860351563, 0.011788288116455077, 0.011717887878417968, 0.011403264045715332, 0.01172480010986328, 0.011675456047058105, 0.011548864364624023, 0.011562911987304688, 0.01158672046661377, 0.01149619197845459, 0.011606207847595215, 0.011463935852050782, 0.011512576103210449, 0.011489279747009277, 0.0114803524017334, 0.011432671546936035, 0.011491168022155761, 0.011488448143005371, 0.011418208122253418, 0.01158182430267334, 0.01142579174041748, 0.011458368301391601, 0.011454208374023438, 0.011430624008178712, 0.011466367721557617, 0.011517215728759765, 0.011429951667785645, 0.011533056259155273, 0.011575296401977539, 0.01150972843170166, 0.011496607780456542, 0.011452672004699706, 0.011522687911987305, 0.011624095916748046, 0.011675999641418458, 0.011571200370788574, 0.01159609603881836, 0.011612128257751465, 0.011636447906494141, 0.011737024307250977, 0.01174556827545166, 0.011628288269042969, 0.011676735877990722, 0.011872735977172852, 0.011894783973693847, 0.011934207916259766, 0.012019712448120118, 0.011970527648925781, 0.012081184387207032, 0.012191007614135742, 0.012106464385986329, 0.01201471996307373, 0.011961215972900391, 0.012031999588012696, 0.011929599761962891, 0.012856767654418946, 0.01196294403076172, 0.011878399848937989, 0.01183465576171875, 0.011715295791625977, 0.011830400466918946, 0.011942527770996093, 0.011927264213562012, 0.01200387191772461, 0.011988991737365723, 0.012426719665527344, 0.012014368057250976, 0.012140543937683105, 0.012293503761291505, 0.01206716823577881, 0.011991328239440917, 0.012209280014038086, 0.01204911994934082, 0.011992511749267578, 0.012026271820068359, 0.011935232162475586, 0.011918144226074219, 0.011909119606018067, 0.011927455902099609, 0.011995231628417969, 0.011892640113830566, 0.011927647590637207, 0.01186012840270996, 0.011714336395263671, 0.011700608253479005, 0.011581151962280274, 0.011614336013793945, 0.011618144035339355, 0.01154047966003418, 0.01149884796142578, 0.01147152042388916, 0.011445440292358399, 0.011434559822082519, 0.011391231536865234, 0.011546239852905274, 0.011450752258300782, 0.011419679641723633, 0.011452383995056152, 0.011464896202087403, 0.011433792114257813, 0.01145030403137207, 0.01143609619140625, 0.011408800125122071, 0.011397727966308594, 0.011460800170898438, 0.01143331241607666, 0.011463168144226075, 0.01141756820678711, 0.01149728012084961, 0.011475232124328614, 0.011454367637634277, 0.011474464416503906, 0.01148361587524414, 0.011549983978271484, 0.011551775932312013, 0.011511263847351074, 0.011476415634155274, 0.011434368133544922, 0.01140777587890625, 0.011458656311035157, 0.011473152160644531, 0.011626144409179688, 0.011567104339599609, 0.011675647735595703, 0.011576736450195312, 0.011686495780944824, 0.011595775604248047, 0.011576704025268555, 0.01176841640472412, 0.011558912277221679, 0.011785247802734374, 0.011795424461364746, 0.011755328178405761, 0.011766176223754882, 0.011757247924804688, 0.011819104194641113, 0.011808128356933593, 0.011803263664245606, 0.011726816177368165, 0.01175759983062744, 0.011712544441223145, 0.012619008064270019, 0.011811552047729492, 0.01174118423461914, 0.011746432304382324, 0.011701120376586913, 0.011624223709106445, 0.011577343940734864, 0.011546624183654786, 0.0118438081741333, 0.011614208221435546, 0.011621888160705567, 0.01146444797515869, 0.011400159835815429, 0.011554911613464355, 0.011660991668701172, 0.01148851203918457, 0.011458368301391601, 0.011417823791503906, 0.011406047821044922, 0.011415871620178222, 0.011456192016601563, 0.011386207580566406, 0.011416319847106934, 0.011430975914001464, 0.011426655769348145, 0.011407551765441894, 0.011463711738586426, 0.011416352272033692, 0.011449983596801758, 0.011439807891845703, 0.01146771240234375, 0.011501312255859375, 0.01140121555328369, 0.011533439636230469, 0.011510784149169923, 0.011532159805297851, 0.011542079925537109, 0.011538751602172851, 0.0115098876953125, 0.011628704071044922, 0.011747488021850586, 0.011724032402038575, 0.01164355182647705, 0.011587072372436523, 0.011526432037353516, 0.011533984184265137, 0.011585887908935546, 0.01157759952545166, 0.011708064079284668, 0.011548768043518067, 0.011483391761779786, 0.011309439659118653, 0.011533120155334472, 0.011521408081054688, 0.011687999725341796, 0.01170467185974121, 0.011630080223083495, 0.01155123233795166, 0.012186719894409179, 0.013980575561523437, 0.011622400283813476, 0.011569279670715332, 0.011571071624755859, 0.011623744010925294, 0.012031744003295899, 0.011829855918884278, 0.011890368461608887, 0.011661343574523926, 0.01170905590057373, 0.011722687721252442, 0.011636704444885253, 0.01167369556427002, 0.011866111755371094, 0.011743391990661621, 0.011677568435668945, 0.011633983612060547, 0.011567839622497559, 0.011628288269042969, 0.011854240417480469, 0.011863200187683106, 0.011680383682250977, 0.011904671669006347, 0.012091744422912597, 0.011937791824340821, 0.011812512397766112, 0.01173087978363037, 0.011665823936462403, 0.011660703659057617, 0.011682271957397461, 0.011666624069213868, 0.011635680198669434, 0.0116943998336792, 0.01163257598876953, 0.011740896224975586, 0.011702272415161133, 0.01153007984161377, 0.01153654384613037, 0.01148300838470459, 0.011576576232910156, 0.011660160064697266, 0.012152928352355957, 0.0115480318069458, 0.011558815956115723, 0.01146233558654785, 0.011637824058532715, 0.011513728141784668, 0.01154867172241211, 0.011563232421875, 0.011569952011108399, 0.011486080169677735, 0.011534560203552245, 0.011558688163757325, 0.011569279670715332, 0.011612159729003906, 0.01136467170715332, 0.011537728309631347, 0.011629247665405273, 0.011614208221435546, 0.011610112190246581, 0.011685983657836914, 0.011664992332458496, 0.011716959953308105, 0.011737088203430175, 0.011673184394836425, 0.011637120246887208, 0.011620256423950195, 0.011589728355407714, 0.011573087692260743, 0.011632800102233887, 0.011564255714416504, 0.011551520347595216, 0.011501567840576172, 0.011604063987731934, 0.011462559700012207, 0.011525664329528808, 0.011471232414245606, 0.01151308822631836, 0.01150425624847412, 0.011518176078796388, 0.011667167663574219, 0.011649312019348145, 0.011538432121276856, 0.011540096282958984, 0.011534848213195802, 0.011469023704528809, 0.011529952049255372, 0.01148089599609375, 0.011461055755615235, 0.011463616371154785, 0.011590399742126466, 0.011483296394348145, 0.011528032302856445, 0.011448320388793945, 0.011531488418579101, 0.011554783821105957, 0.011831199645996094, 0.011941920280456543, 0.011860768318176269, 0.011794207572937011, 0.011788607597351074, 0.01184496021270752, 0.01191708755493164, 0.011905664443969727, 0.011897088050842285, 0.0120381441116333, 0.011959296226501465, 0.011966464042663574, 0.012009663581848145, 0.011991711616516113, 0.012064288139343262, 0.012060319900512696, 0.011995583534240723, 0.011944479942321777, 0.011913215637207031, 0.01189475154876709, 0.012001312255859375, 0.011913215637207031, 0.011853568077087403, 0.011957951545715332, 0.011982560157775879, 0.011960576057434082, 0.011980511665344239, 0.012121024131774903, 0.012098560333251953, 0.012091967582702636, 0.012095871925354005, 0.012061984062194823, 0.012015935897827148, 0.012015647888183593, 0.012003168106079102, 0.01200387191772461, 0.012005375862121583, 0.012029120445251465, 0.012011903762817383, 0.01205686378479004, 0.012011103630065918, 0.011890432357788087, 0.012032511711120606, 0.012028415679931641, 0.011966272354125977, 0.011974656105041503, 0.012083295822143555, 0.012040096282958984, 0.011953503608703613, 0.01194598388671875, 0.012008416175842284, 0.011894463539123535, 0.012003616333007812, 0.012025568008422851, 0.012074175834655762, 0.012056672096252441, 0.012036895751953125, 0.012320704460144043, 0.01227564811706543, 0.01213856029510498, 0.01207091236114502, 0.012016799926757813, 0.012110272407531738, 0.012050304412841797, 0.012067359924316406, 0.01198316764831543, 0.01201734447479248, 0.012019040107727051, 0.01204700756072998, 0.012040032386779784, 0.012042400360107421, 0.012086527824401855, 0.011955039978027344, 0.012002495765686036, 0.011995871543884278, 0.013282496452331543, 0.01218233585357666, 0.012142592430114747, 0.011929696083068847, 0.011999456405639649, 0.01210540771484375, 0.011925408363342285, 0.011954272270202636, 0.011995136260986328, 0.013051424026489258, 0.012403807640075683, 0.012016544342041016, 0.011984895706176758, 0.011950079917907714, 0.011923680305480957, 0.011886015892028808, 0.012452223777770995, 0.0123985595703125, 0.012089568138122559, 0.012029727935791016, 0.01200704002380371, 0.011925888061523438, 0.012101344108581543, 0.012136159896850586, 0.012116543769836426, 0.01211353588104248, 0.012014176368713379, 0.012039392471313477, 0.012113727569580079, 0.012077823638916015, 0.012132320404052734, 0.011964768409729003, 0.011962047576904297, 0.011978816032409668, 0.012027199745178224, 0.01206275177001953, 0.012042783737182617, 0.012060256004333495, 0.01208687973022461, 0.012005279541015625, 0.011991935729980469, 0.012128479957580566, 0.01211961555480957, 0.012194111824035644, 0.012193792343139649, 0.012059935569763184, 0.01195081615447998, 0.012070655822753906, 0.012058400154113769, 0.012024224281311035, 0.01208140754699707, 0.012074463844299317, 0.01211580753326416, 0.01210531234741211, 0.012084128379821778, 0.011994463920593262, 0.012020256042480469, 0.012041407585144043, 0.012039039611816406, 0.011992223739624023, 0.012000224113464356, 0.011943872451782227, 0.011952128410339356, 0.011917344093322754, 0.012005151748657227, 0.012103872299194336, 0.012041567802429199, 0.0121079683303833, 0.01202995204925537, 0.01208566379547119, 0.012143808364868164, 0.01214515209197998, 0.012114303588867188, 0.011885184288024903, 0.012030207633972168, 0.012165184020996095, 0.012056575775146485, 0.012039775848388673, 0.012052127838134766, 0.011996031761169434, 0.012092800140380859, 0.012017248153686523, 0.01203225612640381, 0.011977375984191895, 0.011931648254394531, 0.01194803237915039, 0.011919520378112792, 0.011777888298034668, 0.011702272415161133, 0.011700448036193847, 0.011675423622131347, 0.01154867172241211, 0.011558976173400879, 0.01165510368347168, 0.011526144027709961, 0.01147475242614746, 0.011436223983764648, 0.011499072074890137, 0.011405119895935059, 0.0114901762008667, 0.011519968032836914, 0.01149510383605957, 0.011475040435791015, 0.011435456275939941, 0.011434304237365722, 0.011497344017028809, 0.01150102424621582, 0.011631104469299316, 0.011604384422302246, 0.011486495971679687, 0.011497695922851563, 0.011635199546813964, 0.011445280075073241, 0.011547295570373536, 0.011481696128845215, 0.011578751564025878, 0.011585887908935546, 0.011555999755859374, 0.01153110408782959, 0.011829312324523927, 0.011529696464538575, 0.011413984298706055, 0.011556863784790039, 0.011439583778381347, 0.011536160469055175, 0.011467007637023926, 0.011469311714172363, 0.011507712364196777, 0.011508735656738281, 0.01148806381225586, 0.011606080055236816, 0.011493247985839844, 0.011507967948913574, 0.011412896156311036, 0.011413279533386231, 0.01147494411468506, 0.011206815719604492, 0.011478912353515626, 0.011476991653442382, 0.011379936218261718, 0.011441984176635743, 0.011467552185058594, 0.011456800460815429, 0.011495039939880372, 0.011518239974975586, 0.011525664329528808, 0.011538911819458007, 0.011488287925720216, 0.011481375694274902, 0.011552576065063477, 0.01158454418182373, 0.011492416381835938, 0.011571999549865722, 0.011601920127868653, 0.011528223991394042, 0.011451807975769043, 0.011483712196350098, 0.011488927841186523, 0.011493344306945801, 0.011473535537719726, 0.011519743919372558, 0.011532256126403809, 0.011511296272277831, 0.011448863983154298, 0.011493375778198242, 0.011488832473754883, 0.011507424354553222, 0.011553024291992188, 0.01150614356994629, 0.011527392387390136, 0.011516703605651855, 0.01145577621459961, 0.011571935653686524, 0.011507391929626465, 0.011542143821716309, 0.011582143783569336, 0.011589792251586914, 0.011493375778198242, 0.011558464050292969, 0.011526623725891113, 0.011556672096252442, 0.011501888275146484, 0.011491007804870605, 0.011517951965332032, 0.011542688369750977, 0.011714591979980468, 0.011580320358276367, 0.011549599647521972, 0.011572319984436035, 0.011494303703308105, 0.011462080001831054, 0.011486047744750977, 0.011476863861083984, 0.01154032039642334, 0.011515904426574707, 0.011517536163330079, 0.011565471649169922, 0.011560959815979004, 0.011494848251342773, 0.011168928146362304, 0.011428640365600587, 0.01156726360321045, 0.0118089599609375, 0.011650719642639161, 0.011436384201049804, 0.01161622428894043, 0.011421440124511718, 0.0113920316696167, 0.011656160354614259, 0.011461919784545898, 0.01145958423614502, 0.011418463706970215, 0.011514752388000489, 0.011447296142578126, 0.011450847625732423, 0.011392671585083008, 0.011459456443786622, 0.011525664329528808, 0.011487327575683593, 0.011460320472717285, 0.011455136299133301, 0.011454463958740235, 0.01146457576751709, 0.011388959884643555, 0.011414624214172364, 0.011413567543029786, 0.011410367965698243, 0.011392448425292969, 0.011468768119812012, 0.011470911979675293, 0.011386528015136718, 0.011418496131896973, 0.01142579174041748, 0.011469887733459474, 0.011430624008178712, 0.011380224227905274, 0.011389727592468262, 0.01136633586883545, 0.011401311874389648, 0.011484255790710449, 0.011409760475158692, 0.011403743743896484, 0.011388128280639648, 0.011418399810791015, 0.011419168472290039, 0.011411935806274413, 0.011422783851623534, 0.011453632354736329, 0.011415616035461425, 0.01141478443145752, 0.011393440246582032, 0.01140329647064209, 0.011455967903137207, 0.011577887535095215, 0.011472352027893066, 0.011473695755004882, 0.0115830078125, 0.011479264259338378, 0.01142518424987793, 0.01153494358062744, 0.011597824096679688, 0.012004799842834472, 0.011612640380859376, 0.011579392433166504, 0.011499520301818847, 0.011564800262451172, 0.011438112258911132, 0.011479583740234375, 0.011622079849243164, 0.011483136177062989, 0.011487232208251954, 0.011439743995666505, 0.011476927757263184, 0.011454560279846192, 0.01144662380218506, 0.011481087684631347, 0.011533439636230469, 0.011512127876281739, 0.011447135925292968, 0.01142959976196289, 0.011481023788452149, 0.01144428825378418, 0.011560959815979004, 0.01149673557281494, 0.01153321647644043, 0.011672960281372071, 0.011692288398742676, 0.0115098237991333, 0.012149056434631348, 0.012320544242858887, 0.011816991806030273, 0.011599295616149903, 0.011577887535095215, 0.011544608116149902, 0.011522047996520996, 0.01151734447479248, 0.011481599807739258, 0.011449567794799805, 0.011565952301025391, 0.011493535995483398, 0.011452256202697754, 0.01145241641998291, 0.011452159881591796, 0.011531935691833497, 0.011444352149963378, 0.011444000244140625, 0.011550432205200195, 0.011459551811218261, 0.011425087928771973, 0.011448351860046387, 0.011428511619567872, 0.011441311836242675, 0.01146332836151123, 0.01143008041381836, 0.01177558422088623, 0.011551456451416016, 0.011443455696105958, 0.011430335998535155, 0.011470848083496094, 0.011413536071777343, 0.011453632354736329, 0.011472000122070312, 0.01146025562286377, 0.011368800163269042, 0.011455424308776855]",tokens/s,85.4529795017812,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 240877 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,890.277888,2134.769664,0.0,1732.247552,1728.316416,s,1,7.1752685546875,7.1752685546875,0.0,7.1752685546875,7.1752685546875,7.1752685546875,7.1752685546875,[7.1752685546875],,kWh,5.641033679133519e-06,6.126123403563451e-07,2.132223927989174e-06,8.38586994747904e-06,,MB,1216.466944,2336.096256,0.0,1923.088384,1891.2,s,11,0.4598833656311035,0.041807578693736684,0.0024665971482922522,0.04118921661376953,0.041549697875976566,0.045530641555786136,0.048715396499633794,"[0.049511585235595706, 0.040306144714355466, 0.04039846420288086, 0.04103945541381836, 0.040976383209228515, 0.041549697875976566, 0.04078704071044922, 0.041377281188964846, 0.04134988784790039, 0.04118921661376953, 0.04139820861816406]",tokens/s,6123.291709269738,kWh,1.6124362434296904e-06,1.7772977314398336e-07,1.0653244175824217e-06,2.8554904341560953e-06,tokens/kWh,89651849.97219492,MB,1228.029952,2336.096256,0.0,1923.088384,1895.80032,s,11,10.496321838378906,0.9542110762162642,0.005846273630776672,0.952310546875,0.961578857421875,0.9635577392578125,0.9651408447265626,"[0.947490234375, 0.96553662109375, 0.9507423095703125, 0.961578857421875, 0.9545164184570313, 0.952310546875, 0.9494661865234375, 0.9466533203125, 0.957639892578125, 0.9507672729492187, 0.9596201782226562]",tokens/s,66.02312797480205,kWh,2.7646173964903117e-05,3.048985154862754e-06,1.4985521756418675e-05,4.5680680876184545e-05,tokens/kWh,1379138.8129865818,,s,693,10.493796959876995,0.015142564155666689,0.0002963120602918356,0.015057696342468261,0.015441324806213378,0.01569280605316162,0.016201974029541016,"[0.0150600004196167, 0.015004608154296875, 0.015104864120483398, 0.014944255828857422, 0.01490873622894287, 0.015006624221801757, 0.015027999877929688, 0.014886431694030762, 0.014989791870117187, 0.015125696182250976, 0.014942079544067384, 0.014993439674377442, 0.014864383697509765, 0.015614879608154298, 0.014989312171936036, 0.015009792327880859, 0.014989503860473633, 0.014935839653015136, 0.01495043182373047, 0.015087615966796876, 0.0149749755859375, 0.015118335723876953, 0.015029855728149414, 0.014989888191223145, 0.014915424346923828, 0.014849535942077637, 0.0148603515625, 0.01489145565032959, 0.015267840385437012, 0.015484959602355956, 0.015054719924926758, 0.015035903930664063, 0.01490595245361328, 0.01501814365386963, 0.01503215980529785, 0.015153152465820312, 0.015013152122497559, 0.015012288093566894, 0.015068863868713379, 0.015059552192687987, 0.014954079627990722, 0.015143296241760255, 0.015130656242370605, 0.015020031929016114, 0.014964735984802247, 0.014997183799743652, 0.014997823715209961, 0.015044608116149903, 0.014999551773071289, 0.015251456260681152, 0.015001600265502929, 0.015035679817199707, 0.015100640296936035, 0.014977024078369141, 0.014970144271850587, 0.015243647575378418, 0.015018336296081543, 0.015006719589233398, 0.015020352363586426, 0.0150731201171875, 0.015059807777404784, 0.015121664047241211, 0.014942975997924804, 0.014975199699401856, 0.015050592422485352, 0.015036767959594726, 0.015087167739868164, 0.01490924835205078, 0.015094623565673829, 0.015126303672790527, 0.015008031845092773, 0.014859999656677246, 0.015046079635620118, 0.015143327713012696, 0.015163840293884278, 0.014880191802978516, 0.015179552078247071, 0.015220416069030763, 0.014987456321716308, 0.01501206398010254, 0.015167167663574219, 0.015057632446289063, 0.015001279830932617, 0.015044608116149903, 0.014900671958923339, 0.014993632316589355, 0.014977696418762208, 0.014935935974121094, 0.015018112182617187, 0.015009792327880859, 0.015050751686096191, 0.015239168167114257, 0.015160799980163574, 0.015606304168701172, 0.01516652774810791, 0.015109056472778321, 0.015048576354980468, 0.015134847640991211, 0.015252991676330567, 0.01550592041015625, 0.0156364803314209, 0.015783935546875, 0.015699968338012696, 0.01574668788909912, 0.015745408058166505, 0.0158023681640625, 0.015638208389282225, 0.015706432342529296, 0.015686655998229982, 0.015867136001586915, 0.015724127769470214, 0.015691935539245607, 0.0158756160736084, 0.015939040184020994, 0.015737855911254883, 0.01572380828857422, 0.015622912406921386, 0.015613471984863281, 0.015684096336364747, 0.015793215751647948, 0.015362848281860351, 0.015476320266723633, 0.015536319732666016, 0.015413567543029786, 0.015427040100097657, 0.015198752403259278, 0.015200799942016601, 0.014989215850830078, 0.015062496185302735, 0.015041312217712403, 0.014988639831542968, 0.014949024200439453, 0.015052607536315918, 0.014981311798095703, 0.015017984390258789, 0.015040512084960938, 0.015114239692687988, 0.015003423690795898, 0.015028448104858398, 0.01489305591583252, 0.015373408317565918, 0.015057696342468261, 0.015063167572021485, 0.016199680328369142, 0.015140864372253418, 0.015060992240905761, 0.014936063766479492, 0.014976032257080078, 0.015104096412658691, 0.015082207679748535, 0.014870623588562011, 0.015068896293640137, 0.015119711875915527, 0.014994400024414063, 0.015390751838684082, 0.015060799598693848, 0.015474880218505859, 0.014927200317382813, 0.014891263961791992, 0.0149651517868042, 0.014964735984802247, 0.01497056007385254, 0.01491385555267334, 0.014925824165344239, 0.01658470344543457, 0.015107135772705078, 0.015145088195800781, 0.0152359037399292, 0.0150316162109375, 0.015047327995300293, 0.014979104042053222, 0.015005696296691894, 0.01504428768157959, 0.015014559745788573, 0.014956192016601562, 0.015273983955383302, 0.015054847717285156, 0.015124480247497558, 0.015114239692687988, 0.014948351860046387, 0.015045856475830078, 0.015168064117431641, 0.014891231536865234, 0.014938112258911132, 0.014964735984802247, 0.014952608108520508, 0.014945856094360352, 0.015026144027709961, 0.015025952339172362, 0.014973119735717773, 0.015023455619812012, 0.014919424057006836, 0.015923359870910644, 0.01804960060119629, 0.015079615592956543, 0.015005696296691894, 0.015099616050720214, 0.015050687789916993, 0.015053152084350586, 0.015007743835449219, 0.015161312103271485, 0.015153183937072753, 0.015493151664733886, 0.015961119651794434, 0.015518655776977539, 0.015531583786010742, 0.01567910385131836, 0.01585801601409912, 0.015563488006591797, 0.015572704315185548, 0.015534144401550293, 0.015531583786010742, 0.015624608039855957, 0.015665151596069335, 0.015602975845336915, 0.015472736358642578, 0.01540351963043213, 0.015368320465087891, 0.015306015968322755, 0.01518665599822998, 0.014987263679504394, 0.014942208290100097, 0.015060992240905761, 0.014911487579345703, 0.01493769645690918, 0.015078911781311035, 0.015016223907470703, 0.01515993595123291, 0.01500879955291748, 0.014906240463256835, 0.01502012825012207, 0.015125823974609374, 0.014998208045959473, 0.015095711708068848, 0.01506287956237793, 0.01515340805053711, 0.015033632278442384, 0.01500211238861084, 0.014967007637023926, 0.015063039779663086, 0.015286272048950195, 0.014906944274902344, 0.014989760398864746, 0.015060640335083008, 0.015028063774108886, 0.01582108783721924, 0.015069600105285644, 0.01503212833404541, 0.015181695938110352, 0.014973055839538575, 0.015040512084960938, 0.015058943748474121, 0.015040927886962891, 0.015114784240722656, 0.015054047584533692, 0.015108256340026856, 0.014987872123718262, 0.015039775848388672, 0.015089471817016602, 0.015039392471313476, 0.014938112258911132, 0.015003647804260254, 0.01499350357055664, 0.015087519645690918, 0.015011008262634278, 0.015020031929016114, 0.015337920188903809, 0.01513100814819336, 0.015130368232727051, 0.015085087776184083, 0.01512063980102539, 0.01517750358581543, 0.015100159645080567, 0.01517407989501953, 0.015038463592529297, 0.015125727653503418, 0.015046560287475585, 0.015018048286437989, 0.01498793601989746, 0.015073344230651856, 0.015151200294494628, 0.015126527786254883, 0.015009792327880859, 0.015052127838134765, 0.015065759658813476, 0.015106047630310059, 0.014998944282531738, 0.015049311637878418, 0.015079423904418946, 0.015081695556640625, 0.014999327659606933, 0.015101951599121094, 0.015060992240905761, 0.01515724754333496, 0.015374336242675781, 0.015806464195251464, 0.01508351993560791, 0.015204352378845215, 0.01510524845123291, 0.017842559814453124, 0.015310815811157227, 0.01519865608215332, 0.015224063873291015, 0.015182592391967774, 0.01505065631866455, 0.01521065616607666, 0.01507472038269043, 0.015077919960021972, 0.015248831748962402, 0.015018400192260742, 0.015091520309448242, 0.015042911529541016, 0.015003647804260254, 0.015043935775756835, 0.01498316764831543, 0.014930848121643067, 0.015083552360534668, 0.014994912147521973, 0.015033920288085938, 0.014969632148742676, 0.015044095993041993, 0.01510262393951416, 0.015000896453857422, 0.014940032005310059, 0.014938431739807129, 0.015065600395202636, 0.01497929573059082, 0.01498908805847168, 0.01501699161529541, 0.014980064392089844, 0.015261216163635254, 0.014893343925476074, 0.015007264137268066, 0.014979743957519531, 0.014962688446044922, 0.014984352111816406, 0.015046496391296387, 0.014937088012695313, 0.014976096153259278, 0.014932543754577636, 0.014936415672302247, 0.015037631988525391, 0.014889792442321777, 0.015056896209716796, 0.015093119621276855, 0.01501859188079834, 0.014988736152648926, 0.014979680061340333, 0.015015935897827149, 0.016146175384521483, 0.015307007789611816, 0.015996928215026854, 0.015207424163818359, 0.015133888244628906, 0.015084896087646484, 0.015045087814331054, 0.015142911911010743, 0.0149749755859375, 0.015345664024353027, 0.015003647804260254, 0.015267135620117188, 0.01525011157989502, 0.015011808395385743, 0.015166815757751464, 0.015071935653686523, 0.015165216445922851, 0.015028448104858398, 0.015089088439941407, 0.014981696128845215, 0.015042112350463868, 0.015122879981994629, 0.015900671958923338, 0.016228351593017578, 0.01508351993560791, 0.015063039779663086, 0.015107263565063476, 0.015041407585144042, 0.01500972843170166, 0.015026432037353515, 0.01493558406829834, 0.014944735527038575, 0.015154656410217284, 0.014879263877868653, 0.014954751968383789, 0.015040448188781738, 0.014996607780456543, 0.015051456451416015, 0.015022111892700194, 0.015407072067260743, 0.015102208137512207, 0.014994400024414063, 0.014957344055175781, 0.014968832015991211, 0.015179264068603515, 0.015001791954040528, 0.015085696220397949, 0.014972384452819825, 0.015162079811096192, 0.015278079986572265, 0.01501910400390625, 0.015049632072448731, 0.015054847717285156, 0.014926976203918457, 0.014969152450561523, 0.015382368087768555, 0.01496451187133789, 0.015059904098510742, 0.015108351707458495, 0.01493990421295166, 0.014942527770996094, 0.01497056007385254, 0.015056896209716796, 0.014907391548156738, 0.014942208290100097, 0.01530470371246338, 0.015204352378845215, 0.015966208457946777, 0.015076543807983398, 0.014963520050048829, 0.015073023796081542, 0.015048959732055663, 0.015369728088378906, 0.015038975715637207, 0.015113823890686036, 0.015098527908325195, 0.014962592124938966, 0.014921279907226562, 0.015044192314147949, 0.01512889575958252, 0.015194496154785157, 0.01507737636566162, 0.014985216140747071, 0.014968832015991211, 0.01501696014404297, 0.015021056175231933, 0.015120512008666992, 0.015077088356018066, 0.015153311729431152, 0.01497804832458496, 0.014919839859008788, 0.014998368263244629, 0.015081600189208984, 0.015042559623718262, 0.015007488250732422, 0.01507148838043213, 0.015054719924926758, 0.015019904136657715, 0.015008000373840332, 0.015208064079284668, 0.01512281608581543, 0.015052800178527831, 0.01513584041595459, 0.014990079879760742, 0.014950559616088868, 0.015038080215454101, 0.015194496154785157, 0.015132448196411134, 0.015439583778381347, 0.015055551528930664, 0.01512838363647461, 0.015136768341064453, 0.015042559623718262, 0.015087360382080079, 0.014946208000183106, 0.015044960021972656, 0.014975263595581054, 0.014952159881591797, 0.014874624252319337, 0.014945280075073243, 0.014957568168640138, 0.015110143661499023, 0.014951999664306641, 0.015022656440734864, 0.014970335960388183, 0.014968480110168457, 0.014899968147277832, 0.015000703811645508, 0.014969792366027832, 0.014921600341796876, 0.014958656311035157, 0.01503382396697998, 0.015095840454101563, 0.014894623756408692, 0.0148603515625, 0.014962656021118164, 0.014983391761779786, 0.015024864196777343, 0.014910847663879394, 0.014965375900268555, 0.014919679641723632, 0.014929920196533204, 0.015041664123535156, 0.015220895767211914, 0.015010527610778809, 0.014999551773071289, 0.015011903762817382, 0.0149967041015625, 0.014975711822509766, 0.015040512084960938, 0.015058015823364258, 0.015162272453308106, 0.0149618558883667, 0.015012736320495606, 0.014882752418518067, 0.014872575759887695, 0.015788031578063966, 0.015104000091552734, 0.014985216140747071, 0.015044384002685547, 0.015056351661682128, 0.014987296104431152, 0.014927935600280762, 0.015032992362976075, 0.014944160461425781, 0.015011584281921387, 0.01513916778564453, 0.014940159797668457, 0.015005824089050292, 0.015015392303466797, 0.015274399757385254, 0.015259008407592773, 0.01588278388977051, 0.015454048156738282, 0.015694111824035645, 0.015411168098449708, 0.015458304405212403, 0.01547878360748291, 0.015370368003845215, 0.015331199645996094, 0.015333375930786132, 0.015308799743652344, 0.015359999656677246, 0.015427231788635254, 0.015294431686401367, 0.01582646369934082, 0.015358431816101074, 0.015284607887268067, 0.015106047630310059, 0.01508556842803955, 0.01516051197052002, 0.015094559669494629, 0.015042176246643066, 0.01499561595916748, 0.015016192436218262, 0.015074496269226073, 0.01502905559539795, 0.015047776222229003, 0.015032383918762207, 0.015346272468566895, 0.015413344383239746, 0.015044639587402344, 0.015057024002075195, 0.015180895805358887, 0.015059840202331544, 0.015106080055236816, 0.01525062370300293, 0.015242048263549805, 0.015103551864624023, 0.015165887832641601, 0.01519587230682373, 0.015137056350708007, 0.015156319618225098, 0.015313823699951172, 0.015140704154968262, 0.014993247985839843, 0.015071680068969726, 0.0150895357131958, 0.01507532787322998, 0.015142911911010743, 0.01504032039642334, 0.015075296401977538, 0.015081695556640625, 0.015258912086486817, 0.015075936317443847, 0.015036352157592773, 0.01505401611328125, 0.01500707244873047, 0.015050399780273437, 0.015032320022583008, 0.015007743835449219, 0.015036416053771973, 0.015128576278686523, 0.015052000045776366, 0.015160096168518067, 0.015437824249267578, 0.01514684772491455, 0.015271360397338868, 0.015147744178771972, 0.015175104141235352, 0.015028800010681152, 0.01507532787322998, 0.01509171199798584, 0.01497481632232666, 0.015052960395812989, 0.015089664459228515, 0.015058943748474121, 0.015019871711730958, 0.015314208030700684, 0.015145952224731446, 0.015144864082336425, 0.015098112106323242, 0.015027135848999023, 0.014984000205993652, 0.015068767547607421, 0.015114656448364258, 0.015058943748474121, 0.014958687782287598, 0.014974240303039552, 0.015054719924926758, 0.014936832427978516, 0.015005215644836426, 0.015020352363586426, 0.01500380802154541, 0.014999551773071289, 0.014913536071777344, 0.015113920211791991, 0.015055168151855468, 0.01507532787322998, 0.015031999588012695, 0.015441760063171387, 0.015077856063842774, 0.015109567642211915, 0.015078175544738769, 0.015068960189819337, 0.01507737636566162, 0.015220543861389161, 0.015165087699890136, 0.015155743598937989, 0.015063039779663086, 0.015087103843688965, 0.015056896209716796, 0.01498316764831543, 0.01505065631866455, 0.015056063652038574, 0.014945183753967285, 0.015200511932373047, 0.014968576431274415, 0.014985568046569824, 0.014991007804870605, 0.015120351791381836, 0.015036479949951172, 0.014958559989929199, 0.015000864028930664, 0.015008480072021484, 0.015009056091308594, 0.014949312210083008, 0.01496656036376953, 0.015486304283142089, 0.0156146240234375, 0.015194144248962402, 0.015132160186767578, 0.01504099178314209, 0.015123968124389648, 0.015106559753417969, 0.015591423988342285, 0.015032320022583008, 0.014939488410949707, 0.014985471725463868, 0.015501728057861328, 0.015273056030273437, 0.015048768043518066, 0.015135552406311035, 0.015120415687561036, 0.015773792266845704, 0.015105952262878418, 0.015122367858886718, 0.015015999794006347, 0.015162464141845702, 0.015157535552978515, 0.015103967666625976, 0.015114336013793946, 0.015159423828125, 0.01514742374420166, 0.015101280212402344, 0.015458911895751952, 0.015237376213073731, 0.01520796775817871, 0.015214847564697266, 0.015259712219238282, 0.015253503799438477, 0.01507737636566162, 0.015106047630310059, 0.015218688011169433, 0.015177727699279785, 0.015118335723876953, 0.01507027244567871, 0.01509062385559082, 0.015179776191711425, 0.016291519165039063, 0.015423808097839355, 0.017822751998901366, 0.01630905532836914, 0.015286432266235352]",tokens/s,66.03901358580525,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,807.391232,6322.782208,0.0,5920.260096,5695.433728,s,1,7.12855517578125,7.12855517578125,0.0,7.12855517578125,7.12855517578125,7.12855517578125,7.12855517578125,[7.12855517578125],,kWh,5.135933949964055e-06,5.477968852319573e-07,1.0894453159951412e-06,6.7731761511911535e-06,,MB,1261.375488,6337.462272,0.0,5922.357248,5577.222144,s,10,0.8698075790405273,0.08698075790405274,0.0027856695391058396,0.0877873764038086,0.08874007415771484,0.08881962203979492,0.08888326034545897,"[0.07887007904052734, 0.08762115478515625, 0.08761644744873047, 0.0865711669921875, 0.08872239685058594, 0.088899169921875, 0.08795359802246094, 0.08838272094726562, 0.0870967025756836, 0.08807414245605469]",tokens/s,2943.1796890340966,kWh,2.473927541137145e-06,2.7282960274386404e-07,1.6399000878135056e-06,4.386657231694515e-06,tokens/kWh,58358788.13378591,MB,1299.247104,6337.462272,0.0,5922.357248,5663.963648,s,10,20.89849194335937,2.0898491943359376,0.0028199697651873586,2.0892509765625,2.0933873291015623,2.094445861816406,2.095292687988281,"[2.091013916015625, 2.0879228515625, 2.086322021484375, 2.08748828125, 2.09055224609375, 2.08794970703125, 2.087192138671875, 2.091394287109375, 2.093152099609375, 2.09550439453125]",tokens/s,30.145715858707515,kWh,6.0927853701777545e-05,6.72003014200529e-06,4.0353744618187474e-05,0.0001080016284619703,tokens/kWh,583324.5377608696,,s,630,20.89573957061767,0.03316784058828203,0.00045336523228546214,0.03307014465332031,0.03341920700073243,0.03369798259735108,0.03597649814605713,"[0.03491753768920899, 0.034726398468017575, 0.03339708709716797, 0.03321241760253906, 0.03306515121459961, 0.03302726364135742, 0.03298278427124023, 0.032840576171875, 0.03283772659301758, 0.033023647308349606, 0.032890209197998045, 0.034493343353271484, 0.032943870544433596, 0.03279692840576172, 0.032866302490234374, 0.03286207962036133, 0.032934017181396484, 0.0329543685913086, 0.032841632843017575, 0.03403545761108399, 0.03296905517578125, 0.03295641708374023, 0.03301363372802734, 0.033462398529052736, 0.03327590560913086, 0.03303535842895508, 0.03298191833496094, 0.0329903678894043, 0.033061664581298826, 0.033033279418945315, 0.0332210578918457, 0.03317756652832031, 0.03369359970092774, 0.03308819198608398, 0.03309564971923828, 0.03300969696044922, 0.03311942291259766, 0.03313724899291992, 0.03290985488891601, 0.032923328399658204, 0.033078880310058595, 0.03307734298706055, 0.033118526458740236, 0.03297657775878906, 0.03311014556884766, 0.033011390686035154, 0.033106433868408204, 0.03309936141967774, 0.033021663665771486, 0.03305292892456055, 0.033186241149902346, 0.03315708923339844, 0.033210399627685544, 0.03319807815551758, 0.033173503875732424, 0.033099777221679685, 0.03314697647094727, 0.0332468147277832, 0.03332275390625, 0.0334252815246582, 0.03360019302368164, 0.03328732681274414, 0.03324335861206055, 0.03610521697998047, 0.033908798217773435, 0.033570751190185544, 0.0330417594909668, 0.0329343376159668, 0.03289424133300781, 0.032973758697509764, 0.03299679946899414, 0.033061664581298826, 0.03300294494628906, 0.03309199905395508, 0.03296006393432617, 0.032917888641357425, 0.032817089080810546, 0.03294559860229492, 0.032762336730957034, 0.03295657730102539, 0.03293929672241211, 0.03299401473999024, 0.03276003265380859, 0.03296438217163086, 0.032923679351806644, 0.03310588836669922, 0.03300521469116211, 0.03311862564086914, 0.03298297500610352, 0.033089534759521484, 0.03295868682861328, 0.03307702255249023, 0.03314438247680664, 0.03337200164794922, 0.03332147216796875, 0.033157215118408204, 0.032997535705566405, 0.033180606842041015, 0.03295084762573242, 0.0331124153137207, 0.03307110214233398, 0.03299532699584961, 0.03297654342651367, 0.033024513244628906, 0.032971904754638674, 0.03311280059814453, 0.03326806259155273, 0.033086944580078125, 0.032995521545410154, 0.03307212829589844, 0.033049182891845705, 0.0331165771484375, 0.03304579162597656, 0.033039070129394534, 0.033050624847412106, 0.03319555282592773, 0.03335625457763672, 0.03314912033081055, 0.033210174560546875, 0.03305472183227539, 0.033117343902587894, 0.033411937713623045, 0.03324313735961914, 0.033311840057373046, 0.03324979019165039, 0.03338460922241211, 0.036218112945556644, 0.03383369445800781, 0.0332042236328125, 0.03279779052734375, 0.032991966247558596, 0.03287200164794922, 0.03290995025634766, 0.03281919860839844, 0.03290521621704102, 0.032729087829589845, 0.03290726470947265, 0.0328908805847168, 0.033058815002441407, 0.03294822311401367, 0.033035263061523434, 0.0330035514831543, 0.03299590301513672, 0.03301007843017578, 0.03302809524536133, 0.03287039947509766, 0.03307110214233398, 0.033056766510009765, 0.03301375961303711, 0.032959678649902346, 0.03314566421508789, 0.03296460723876953, 0.03298303985595703, 0.03291952133178711, 0.033081375122070315, 0.03301990509033203, 0.03310956954956055, 0.03303427124023438, 0.03304079818725586, 0.033023998260498046, 0.0330582389831543, 0.03305660629272461, 0.0332336311340332, 0.03301171112060547, 0.033111297607421875, 0.0331060791015625, 0.03306931304931641, 0.032993633270263674, 0.03302195358276367, 0.03309270477294922, 0.033076126098632815, 0.032933887481689454, 0.03298303985595703, 0.03302342224121094, 0.033032703399658206, 0.03300972747802734, 0.033058559417724606, 0.03306505584716797, 0.03312246322631836, 0.03318118286132812, 0.03321680068969726, 0.0332022705078125, 0.03330031967163086, 0.03312860870361328, 0.03332723236083984, 0.03329228973388672, 0.033207935333251955, 0.033234912872314455, 0.03345443344116211, 0.035977054595947265, 0.033603294372558594, 0.03332947158813476, 0.032860862731933595, 0.03311993789672851, 0.033081729888916014, 0.03290924835205078, 0.03270655822753906, 0.032911712646484376, 0.0327632942199707, 0.03293824005126953, 0.032909473419189456, 0.033011070251464845, 0.032850399017333984, 0.032935935974121096, 0.03286969757080078, 0.032949249267578126, 0.03290079879760742, 0.032896385192871094, 0.03302873611450195, 0.03290035247802734, 0.0328342399597168, 0.032960575103759764, 0.03289023971557617, 0.03293862533569336, 0.033040382385253905, 0.03299532699584961, 0.03293183898925781, 0.03304857635498047, 0.03303014373779297, 0.03306662368774414, 0.0339859504699707, 0.03330147171020508, 0.033230846405029296, 0.033103584289550785, 0.03313862228393555, 0.03302844619750977, 0.03300966262817383, 0.03313177490234375, 0.0330082893371582, 0.033220703125, 0.03312025451660156, 0.03309356689453125, 0.03326163101196289, 0.033215774536132815, 0.03309552001953125, 0.033082080841064454, 0.03297091293334961, 0.033105918884277344, 0.03314457702636719, 0.03323497772216797, 0.03337875366210938, 0.03319891357421875, 0.03308438491821289, 0.03319807815551758, 0.033137855529785154, 0.033413951873779296, 0.033116161346435545, 0.03314396667480469, 0.033197120666503904, 0.03320614242553711, 0.03320003128051758, 0.033279998779296875, 0.0360618896484375, 0.0346357421875, 0.033316352844238284, 0.03289548873901367, 0.03283529663085937, 0.03275600051879883, 0.03288896179199219, 0.03293376159667969, 0.03286022567749024, 0.03278128051757812, 0.032854335784912106, 0.03284239959716797, 0.03284787368774414, 0.03284515380859375, 0.03305094528198242, 0.03279257583618164, 0.03281100845336914, 0.03278678512573242, 0.033003456115722654, 0.03273529434204102, 0.03293990325927734, 0.03282710266113281, 0.03296614456176758, 0.03289385604858398, 0.03294204711914062, 0.033071136474609374, 0.03298086547851563, 0.03287823867797852, 0.0328831672668457, 0.03305267333984375, 0.03300268936157227, 0.0329997444152832, 0.033057281494140625, 0.03305001449584961, 0.033001472473144534, 0.03297750473022461, 0.03303843307495117, 0.033053695678710936, 0.03305564880371094, 0.033019168853759766, 0.034114273071289065, 0.033511199951171876, 0.03312252807617187, 0.03302809524536133, 0.033165313720703124, 0.03312588882446289, 0.033376766204833985, 0.03305599975585938, 0.03316403198242188, 0.033079105377197264, 0.03370156860351563, 0.033417728424072264, 0.033314815521240236, 0.03343564987182617, 0.0333776969909668, 0.033186302185058594, 0.03352179336547852, 0.03333526229858398, 0.0336097297668457, 0.033568161010742184, 0.03359955215454102, 0.03382838439941406, 0.033393665313720705, 0.036306911468505856, 0.03383894348144531, 0.03341324615478516, 0.03301788711547852, 0.032984928131103514, 0.03317164611816406, 0.033271774291992184, 0.03297510528564453, 0.03317561721801758, 0.032951713562011715, 0.03295484924316406, 0.03278351974487305, 0.032897697448730466, 0.032771198272705075, 0.032979839324951174, 0.03297689437866211, 0.03292281723022461, 0.03280572891235352, 0.03291459274291992, 0.032852798461914065, 0.03298099136352539, 0.03305654525756836, 0.03296278381347656, 0.032933887481689454, 0.03288848114013672, 0.03285404968261719, 0.03299055862426758, 0.032962623596191405, 0.03303696060180664, 0.03297657775878906, 0.03298287963867187, 0.03317327880859375, 0.033027008056640626, 0.033112350463867186, 0.03324476623535156, 0.033079456329345704, 0.033216480255126954, 0.033159168243408206, 0.03311315155029297, 0.03308230209350586, 0.033212448120117186, 0.033306591033935545, 0.03338358306884766, 0.03313750457763672, 0.033148929595947264, 0.03309568023681641, 0.03313868713378906, 0.03299932861328125, 0.033105567932128904, 0.03330502319335937, 0.03301516723632812, 0.033036350250244144, 0.03310444641113281, 0.03314668655395508, 0.03311577606201172, 0.03315564727783203, 0.03323036956787109, 0.03312278366088867, 0.03322048187255859, 0.03316511917114258, 0.033237537384033206, 0.03319599914550781, 0.033307937622070315, 0.03629081726074219, 0.03374860763549805, 0.03343500900268555, 0.03309628677368164, 0.032852352142333986, 0.033056766510009765, 0.03287449645996094, 0.03283148956298828, 0.032804832458496094, 0.03302812957763672, 0.03288063812255859, 0.03271257781982422, 0.03315110397338867, 0.03283967971801758, 0.032888767242431644, 0.03283257675170898, 0.0329284782409668, 0.03274576187133789, 0.03286220932006836, 0.03297587203979492, 0.032881664276123046, 0.03286544036865234, 0.03299580764770508, 0.03294451141357422, 0.03293955230712891, 0.033057247161865234, 0.033017024993896485, 0.03311289596557617, 0.03304243087768555, 0.03300252914428711, 0.03312534332275391, 0.0331201286315918, 0.03311014556884766, 0.03307929611206055, 0.03304243087768555, 0.03299942398071289, 0.03305267333984375, 0.03299654388427734, 0.033180030822753906, 0.033049022674560544, 0.03303359985351562, 0.03301232147216797, 0.03313667297363281, 0.033037311553955076, 0.03299356842041016, 0.03294063949584961, 0.03314495849609375, 0.03321651077270508, 0.033329151153564454, 0.033263614654541016, 0.033255424499511715, 0.033187614440917966, 0.03319830322265625, 0.03326566314697266, 0.033124351501464845, 0.0331278076171875, 0.03307379150390625, 0.033099552154541016, 0.03333552169799805, 0.033554431915283206, 0.03340902328491211, 0.03341516876220703, 0.03328550338745117, 0.03615859222412109, 0.03400716781616211, 0.03316604614257813, 0.03397836685180664, 0.033436767578125, 0.032887454986572265, 0.03292390441894531, 0.033056575775146486, 0.033296287536621096, 0.03288092803955078, 0.0329760971069336, 0.03302070236206055, 0.032833534240722655, 0.032794624328613284, 0.03421401596069336, 0.03307097625732422, 0.033285633087158206, 0.03295008087158203, 0.03291993713378906, 0.032788799285888674, 0.03296255874633789, 0.032950271606445314, 0.03290726470947265, 0.03292364883422851, 0.03300966262817383, 0.03306195068359375, 0.03296710586547852, 0.032963199615478514, 0.03319516754150391, 0.033151424407958985, 0.03316691207885742, 0.03305129623413086, 0.033003009796142575, 0.033027969360351565, 0.033041088104248044, 0.03291078567504883, 0.03296518325805664, 0.03298918533325195, 0.03303180694580078, 0.03302358245849609, 0.03308009719848633, 0.03303654479980469, 0.03299097442626953, 0.03292364883422851, 0.03362015914916992, 0.033004638671875, 0.03307593536376953, 0.033099777221679685, 0.03305267333984375, 0.03303014373779297, 0.03312995147705078, 0.033410625457763674, 0.03360457611083984, 0.03324710464477539, 0.03322246551513672, 0.033420703887939454, 0.03321129608154297, 0.03310780715942383, 0.03365903854370117, 0.03324627304077148, 0.03317855834960937, 0.033512607574462894, 0.03329520034790039, 0.03569158554077148, 0.03368431854248047, 0.033140735626220705, 0.032917503356933595, 0.03292364883422851, 0.032780223846435544, 0.03317318344116211, 0.03277657699584961, 0.0328458251953125, 0.03272409439086914, 0.03277721786499024, 0.032836990356445314, 0.032920127868652345, 0.032763839721679684, 0.03286774444580078, 0.03274812698364258, 0.03301990509033203, 0.03280281448364258, 0.033050624847412106, 0.033111263275146484, 0.03302915191650391, 0.032949951171875, 0.03286431884765625, 0.03296857452392578, 0.03313868713378906, 0.03298316955566406, 0.033301952362060544, 0.033220638275146486, 0.03341904067993164, 0.03320499038696289, 0.03348438262939453, 0.03325689697265625, 0.03317449569702149, 0.033183521270751956, 0.03307747268676758, 0.03320217514038086, 0.03331804656982422, 0.033221473693847654, 0.03313459014892578, 0.03307929611206055, 0.03306496047973633, 0.03515801620483398, 0.03314271926879883, 0.033112129211425784, 0.03311328125, 0.0329920654296875, 0.03405564880371094, 0.03384783935546875, 0.033181697845458984, 0.03323699188232422, 0.03307110214233398, 0.033235969543457033, 0.03346720123291016, 0.033224449157714844, 0.03310550308227539, 0.03319075012207031, 0.03341660690307617, 0.033385055541992184, 0.033662174224853514, 0.033325855255126956, 0.03345817565917969, 0.03337366485595703, 0.033274398803710935, 0.03597513580322265, 0.033834049224853516, 0.03336697769165039, 0.032997440338134766, 0.03294815826416016, 0.03286220932006836, 0.03297484970092773, 0.03295743942260742, 0.03300044631958008, 0.03302358245849609, 0.03304054260253906, 0.032885311126708984, 0.03291279983520508, 0.03361590576171875, 0.033476608276367184, 0.033067230224609376, 0.03304880142211914, 0.03298035049438477, 0.033023998260498046, 0.03297635269165039, 0.033106048583984374, 0.03301241683959961, 0.03300982284545898, 0.033764446258544925, 0.03321129608154297, 0.03313049697875976, 0.03299737548828125, 0.032952030181884764, 0.03308931350708008, 0.03307571029663086, 0.033134910583496095, 0.03304211044311523, 0.033236190795898436, 0.033288833618164065, 0.033140480041503904, 0.03305923080444336, 0.03313167953491211, 0.03314329528808594, 0.03302640151977539, 0.033003360748291015, 0.03343196868896484, 0.033060577392578124, 0.033065216064453125, 0.033115070343017576, 0.03550908660888672, 0.03388809585571289, 0.0332918701171875, 0.03323868942260742, 0.03311094284057617, 0.03303433609008789, 0.03307702255249023, 0.03318540954589844, 0.03359008026123047, 0.03347014236450195, 0.03323494338989258, 0.03323427200317383, 0.03320694351196289, 0.033244640350341796, 0.033276447296142576, 0.033290241241455076, 0.03341516876220703, 0.033361087799072264, 0.03336438369750976]",tokens/s,30.14968663209546,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 500.12 MiB is free. Process 215090 has 14.25 GiB memory in use. Of the allocated memory 14.13 GiB is allocated by PyTorch, and 8.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,802.492416,811.466752,0.0,408.94464,387.119104,s,1,7.13020458984375,7.13020458984375,0.0,7.13020458984375,7.13020458984375,7.13020458984375,7.13020458984375,[7.13020458984375],,kWh,2.71068195419654e-06,2.897794648110072e-07,1.0125008099992616e-06,4.012962229006809e-06,,MB,1259.241472,836.632576,0.0,421.527552,354.085376,s,16,0.19470534324645994,0.012169083952903746,0.0003762004797081261,0.01206440019607544,0.012193999767303466,0.0125551438331604,0.013394894123077393,"[0.013604831695556641, 0.012124671936035156, 0.01220524787902832, 0.012027392387390137, 0.011985247611999511, 0.012164192199707031, 0.012005472183227539, 0.01203104019165039, 0.012097727775573731, 0.012055232048034667, 0.012182751655578613, 0.012065312385559083, 0.012063488006591797, 0.011998911857604981, 0.012023615837097167, 0.012070207595825196]",tokens/s,21036.916253578325,kWh,3.504307763655268e-07,3.864624437608533e-08,2.229386870876288e-07,6.12015707829241e-07,tokens/kWh,418289917.6035965,MB,1298.194432,853.409792,0.0,436.207616,354.087936,s,16,10.243882263183593,0.6402426414489746,0.003015659841567892,0.6395029602050781,0.644943115234375,0.6458429107666016,0.6468378692626954,"[0.6470866088867188, 0.6444578857421875, 0.6376323852539062, 0.638298095703125, 0.64079150390625, 0.6400365600585938, 0.6372914428710937, 0.638103515625, 0.6382666625976563, 0.6395967407226563, 0.6394091796875, 0.6454283447265625, 0.640323486328125, 0.638524658203125, 0.6361475830078125, 0.6424876098632812]",tokens/s,98.40019380374387,kWh,1.8476635206445552e-05,2.0376642046846294e-06,7.377588226662384e-06,2.789188763779257e-05,tokens/kWh,2258721.274734992,,s,1008,10.2360394592285,0.010154801050821938,0.00020850638961136984,0.010117151737213136,0.0102662748336792,0.010387179231643676,0.010891270427703846,"[0.009934335708618165, 0.010283519744873047, 0.010160127639770507, 0.010596351623535156, 0.010090496063232422, 0.010112704277038575, 0.010066368103027344, 0.0100731201171875, 0.010492287635803222, 0.010101152420043944, 0.010217215538024902, 0.01010854434967041, 0.010177215576171876, 0.010122336387634278, 0.010217535972595214, 0.010175328254699707, 0.010176511764526367, 0.010176511764526367, 0.010131456375122071, 0.01031942367553711, 0.010168767929077149, 0.010156096458435059, 0.010069952011108399, 0.010277119636535645, 0.010110719680786133, 0.010143168449401855, 0.010146592140197754, 0.010095999717712403, 0.010157600402832031, 0.011455679893493652, 0.012046015739440918, 0.01194598388671875, 0.010299391746520996, 0.010260031700134278, 0.010392255783081054, 0.01034227180480957, 0.010239423751831055, 0.010433216094970704, 0.010403583526611328, 0.010209600448608398, 0.0102008638381958, 0.010253503799438476, 0.010215231895446777, 0.01016105556488037, 0.010164223670959472, 0.010131775856018066, 0.010147520065307617, 0.01039798355102539, 0.010117952346801757, 0.010088512420654297, 0.010113311767578124, 0.010049440383911134, 0.010140031814575194, 0.010090880393981934, 0.01018051242828369, 0.010305536270141602, 0.010106880187988282, 0.010216832160949707, 0.010097279548645019, 0.010092800140380859, 0.010066880226135253, 0.01009337615966797, 0.010084351539611817, 0.009830719947814941, 0.01020035171508789, 0.010107359886169433, 0.010182784080505372, 0.010368000030517579, 0.010154080390930176, 0.010146592140197754, 0.01021951961517334, 0.01012940788269043, 0.010105183601379394, 0.010122912406921387, 0.010076191902160645, 0.01006716823577881, 0.010049759864807129, 0.010085984230041504, 0.010310591697692871, 0.010151935577392577, 0.01005561637878418, 0.010128959655761719, 0.010087967872619629, 0.010167263984680175, 0.010102784156799317, 0.010314047813415527, 0.010192383766174316, 0.010184351921081542, 0.010244864463806153, 0.010436127662658692, 0.010327520370483399, 0.010156479835510254, 0.0101463041305542, 0.010190688133239745, 0.010168000221252442, 0.011316896438598633, 0.012147359848022461, 0.010164223670959472, 0.010587712287902833, 0.010605024337768556, 0.010440671920776367, 0.010166208267211914, 0.010072128295898437, 0.010404959678649902, 0.010606816291809082, 0.010386367797851563, 0.010106623649597168, 0.010072064399719239, 0.010243488311767577, 0.010062335968017578, 0.010141728401184082, 0.010031167984008788, 0.010145888328552247, 0.010039199829101562, 0.01011302375793457, 0.010114175796508789, 0.010088831901550292, 0.01002342414855957, 0.010090239524841308, 0.010124959945678712, 0.010054143905639648, 0.0100414400100708, 0.010192511558532714, 0.010025343894958495, 0.010081567764282227, 0.010041952133178711, 0.009768959999084472, 0.0101561279296875, 0.010073663711547852, 0.010121536254882813, 0.010132896423339843, 0.01011673641204834, 0.010101759910583496, 0.010122336387634278, 0.010187680244445801, 0.010086400032043457, 0.010106975555419923, 0.010095552444458008, 0.01016316795349121, 0.010108927726745605, 0.010117119789123535, 0.010105119705200196, 0.010157279968261719, 0.010092864036560059, 0.010156224250793457, 0.01011731243133545, 0.010100543975830078, 0.01011302375793457, 0.010083456039428711, 0.01002995204925537, 0.010147007942199707, 0.010127776145935059, 0.010269087791442871, 0.010063488006591797, 0.010081855773925782, 0.010080991744995117, 0.010100831985473633, 0.01013759994506836, 0.010137215614318847, 0.010078592300415039, 0.010059167861938476, 0.010168928146362305, 0.010091872215270995, 0.010233759880065918, 0.010154911994934082, 0.010270336151123046, 0.010092767715454101, 0.01019923210144043, 0.010113951683044433, 0.01011190414428711, 0.010161855697631835, 0.010086591720581056, 0.010254464149475097, 0.010049728393554688, 0.010173791885375977, 0.010051808357238769, 0.010094847679138184, 0.010018495559692383, 0.010066240310668946, 0.010096063613891601, 0.010076671600341798, 0.009984224319458009, 0.010231904029846191, 0.010114815711975098, 0.010254336357116698, 0.010086624145507813, 0.010054495811462403, 0.01004640007019043, 0.010110976219177246, 0.009853343963623046, 0.010105888366699218, 0.010085280418395997, 0.010065919876098632, 0.010073439598083496, 0.010093215942382813, 0.010201215744018555, 0.010237471580505371, 0.010092991828918457, 0.010121248245239257, 0.010066880226135253, 0.010060735702514648, 0.010172575950622558, 0.010075296401977539, 0.01008739185333252, 0.010088255882263183, 0.010237855911254883, 0.010080256462097169, 0.010199040412902831, 0.010135552406311036, 0.010109215736389161, 0.010175519943237305, 0.010139583587646484, 0.01013856029510498, 0.010264384269714355, 0.01017187213897705, 0.010195520401000977, 0.010242015838623046, 0.01013158416748047, 0.01008140754699707, 0.010053440093994141, 0.01003548812866211, 0.010076831817626954, 0.010067456245422364, 0.01020364761352539, 0.010041343688964843, 0.01009222412109375, 0.010052000045776367, 0.010176416397094726, 0.01003929615020752, 0.010143744468688964, 0.010126751899719238, 0.010123264312744141, 0.010185312271118165, 0.010108927726745605, 0.010043392181396485, 0.010192895889282226, 0.01021951961517334, 0.010121503829956054, 0.01011683177947998, 0.010063936233520508, 0.010096096038818359, 0.010139103889465331, 0.010281984329223632, 0.010268959999084473, 0.010153823852539063, 0.010071935653686524, 0.0101396484375, 0.010080256462097169, 0.010057279586791992, 0.010226112365722656, 0.010145792007446289, 0.010074111938476562, 0.009785599708557129, 0.010210335731506348, 0.010061887741088868, 0.010116000175476075, 0.010082143783569336, 0.010098527908325196, 0.010144063949584961, 0.01014521598815918, 0.010063520431518555, 0.010137632369995117, 0.010065855979919434, 0.010162464141845703, 0.010242048263549805, 0.010196895599365234, 0.01017958354949951, 0.01016806411743164, 0.01022976016998291, 0.010387616157531738, 0.010405119895935058, 0.01021123218536377, 0.010236607551574708, 0.010151455879211426, 0.01014844799041748, 0.010125184059143066, 0.010049504280090332, 0.010103839874267578, 0.010232192039489746, 0.01011734390258789, 0.010229567527770995, 0.010179167747497558, 0.010053631782531738, 0.010071999549865722, 0.010063008308410644, 0.01029212760925293, 0.010186752319335938, 0.010132736206054687, 0.010106847763061524, 0.010050335884094238, 0.010108927726745605, 0.010106304168701171, 0.010135392189025878, 0.01005027198791504, 0.01033456039428711, 0.0100382080078125, 0.010304224014282227, 0.010149888038635254, 0.010301247596740722, 0.010525952339172364, 0.010163104057312012, 0.010414112091064453, 0.010158080101013184, 0.010125311851501465, 0.01009663963317871, 0.010096735954284668, 0.010082240104675294, 0.01019696044921875, 0.010090271949768066, 0.010230015754699708, 0.010270784378051758, 0.010134783744812012, 0.01024886417388916, 0.010098688125610352, 0.010223615646362304, 0.009787839889526367, 0.010201279640197753, 0.010050592422485351, 0.010439647674560546, 0.01014352035522461, 0.010066368103027344, 0.010102144241333008, 0.010111455917358398, 0.010111231803894043, 0.010045120239257813, 0.010166272163391114, 0.010089887619018554, 0.010399744033813477, 0.010227968215942383, 0.010158432006835938, 0.010117024421691894, 0.010141183853149414, 0.01014025592803955, 0.010183712005615235, 0.010160832405090333, 0.010080032348632813, 0.010106911659240722, 0.01014799976348877, 0.010172736167907715, 0.010121343612670898, 0.010178624153137207, 0.010209152221679688, 0.010117055892944336, 0.010134655952453614, 0.010132096290588378, 0.010036992073059083, 0.010145440101623535, 0.01021628761291504, 0.01009222412109375, 0.01016864013671875, 0.01006387233734131, 0.01005568027496338, 0.010026783943176269, 0.010379232406616212, 0.010129664421081543, 0.010147232055664063, 0.010023520469665528, 0.010289504051208495, 0.010034432411193847, 0.010171999931335449, 0.01019388771057129, 0.010159071922302245, 0.01010364818572998, 0.01007414436340332, 0.010128735542297363, 0.010237855911254883, 0.01022969627380371, 0.010277183532714843, 0.010166943550109863, 0.01018454360961914, 0.010184703826904297, 0.010295488357543946, 0.010114560127258301, 0.010098848342895508, 0.010082304000854492, 0.010338656425476074, 0.010206048011779784, 0.010245087623596192, 0.009893024444580078, 0.010148608207702636, 0.01012335968017578, 0.010133503913879394, 0.010122336387634278, 0.010116000175476075, 0.010156031608581542, 0.010098624229431152, 0.010175616264343261, 0.010034144401550293, 0.010203424453735352, 0.010053312301635742, 0.01023737621307373, 0.010161760330200196, 0.010128352165222168, 0.010100735664367675, 0.010256287574768066, 0.010125727653503417, 0.010163904190063477, 0.010105119705200196, 0.010180416107177735, 0.010120287895202636, 0.010036128044128418, 0.010120256423950195, 0.010129631996154785, 0.010091232299804688, 0.010069536209106446, 0.010120896339416504, 0.010059679985046387, 0.01024284839630127, 0.010102304458618164, 0.010105536460876465, 0.010061599731445313, 0.010194944381713868, 0.010085599899291992, 0.010154751777648926, 0.010141728401184082, 0.010100735664367675, 0.010245375633239746, 0.010058496475219727, 0.01017187213897705, 0.010074655532836914, 0.010107199668884277, 0.0100098237991333, 0.010052063941955566, 0.010031295776367187, 0.010198528289794923, 0.010135807991027833, 0.01005737590789795, 0.0100348482131958, 0.010087424278259278, 0.01007094383239746, 0.01009552001953125, 0.010127296447753906, 0.010008064270019532, 0.010030912399291993, 0.010084511756896973, 0.010011167526245117, 0.010179648399353028, 0.010052576065063476, 0.010110464096069336, 0.010072159767150878, 0.010049920082092285, 0.00980134391784668, 0.010146176338195801, 0.010171456336975098, 0.01014470386505127, 0.010050592422485351, 0.009997183799743653, 0.010059871673583985, 0.010059776306152344, 0.010127360343933106, 0.010133503913879394, 0.010100735664367675, 0.01007750415802002, 0.010118176460266113, 0.010112288475036622, 0.010150591850280761, 0.010082143783569336, 0.010071904182434081, 0.01006156826019287, 0.010184415817260742, 0.010134048461914062, 0.010007840156555176, 0.010076959609985352, 0.010053919792175294, 0.010204768180847168, 0.01011308765411377, 0.010182720184326172, 0.010032896041870118, 0.01008687973022461, 0.009996000289916993, 0.010106880187988282, 0.010133503913879394, 0.01023417568206787, 0.010171199798583984, 0.01059449577331543, 0.010123264312744141, 0.010236160278320312, 0.010148287773132324, 0.010012672424316407, 0.010057503700256348, 0.010039263725280762, 0.010053888320922851, 0.01005577564239502, 0.010151071548461914, 0.01005174446105957, 0.010046048164367676, 0.010135680198669434, 0.01004355239868164, 0.010458751678466797, 0.01004758358001709, 0.010123264312744141, 0.010026399612426757, 0.01052143955230713, 0.010229503631591797, 0.010188799858093261, 0.010119168281555176, 0.01011251163482666, 0.010033056259155274, 0.010119775772094726, 0.01008358383178711, 0.010112992286682129, 0.01022646427154541, 0.010139583587646484, 0.010145471572875977, 0.009742431640625, 0.010117119789123535, 0.010010496139526367, 0.010117247581481933, 0.01022150421142578, 0.01009055995941162, 0.010051584243774414, 0.010090047836303712, 0.0113721923828125, 0.010162976264953614, 0.010112832069396973, 0.010034560203552246, 0.010139552116394043, 0.010058655738830566, 0.010203392028808593, 0.010167519569396972, 0.01006835174560547, 0.010152095794677735, 0.01004963207244873, 0.010100128173828125, 0.010102879524230958, 0.010076191902160645, 0.010056063652038575, 0.010090847969055176, 0.01003439998626709, 0.01007046413421631, 0.010069503784179687, 0.01007363224029541, 0.010079039573669433, 0.010076160430908204, 0.010046751976013183, 0.010154815673828125, 0.010049599647521973, 0.0101112003326416, 0.01009436798095703, 0.010081536293029785, 0.01011734390258789, 0.01015657615661621, 0.0101561279296875, 0.010014623641967773, 0.010034239768981934, 0.01012012767791748, 0.010129280090332031, 0.010061216354370118, 0.01008131217956543, 0.010028736114501953, 0.010082304000854492, 0.010589983940124512, 0.010224032402038574, 0.01007372760772705, 0.010110400199890136, 0.010026975631713867, 0.010095744132995606, 0.010208160400390624, 0.010136320114135742, 0.010184991836547851, 0.010153247833251952, 0.010125632286071778, 0.010253696441650391, 0.010140416145324706, 0.010051712036132813, 0.010069888114929199, 0.010028608322143555, 0.009910271644592286, 0.010407232284545898, 0.010154687881469726, 0.010320927619934082, 0.010252799987792969, 0.010325504302978515, 0.010281951904296874, 0.010182656288146973, 0.010102784156799317, 0.010106271743774414, 0.010129376411437989, 0.010228351593017578, 0.010055232048034667, 0.010092543601989747, 0.010298879623413085, 0.010267775535583496, 0.010278079986572266, 0.010207839965820312, 0.010156288146972656, 0.010095423698425292, 0.010076288223266602, 0.01029411220550537, 0.010127360343933106, 0.010100768089294434, 0.010059295654296875, 0.010109375953674317, 0.010090656280517578, 0.010094431877136231, 0.010000736236572265, 0.010067839622497559, 0.010055487632751464, 0.010165216445922852, 0.010096863746643066, 0.010096447944641113, 0.010035327911376953, 0.010099552154541015, 0.010051775932312011, 0.010216383934020997, 0.01012825584411621, 0.010198271751403809, 0.010129759788513184, 0.010107295989990234, 0.010080384254455567, 0.010147711753845216, 0.010119168281555176, 0.0100449275970459, 0.010099295616149903, 0.01009670352935791, 0.010168160438537598, 0.010158080101013184, 0.01012764835357666, 0.010057632446289062, 0.010298848152160645, 0.010070367813110352, 0.010192799568176269, 0.010108384132385254, 0.010118816375732422, 0.010074624061584473, 0.010084896087646485, 0.010088383674621582, 0.01011683177947998, 0.010344767570495605, 0.010236000061035156, 0.009921119689941407, 0.010134943962097168, 0.010123935699462891, 0.010084063529968261, 0.010110848426818848, 0.011233792304992676, 0.01017840003967285, 0.01010483169555664, 0.010155327796936035, 0.01011961555480957, 0.010023072242736816, 0.010140992164611817, 0.010028096199035644, 0.010083392143249511, 0.010093215942382813, 0.010110976219177246, 0.010155839920043945, 0.010011136054992676, 0.0100512638092041, 0.010155232429504394, 0.010128160476684571, 0.01002239990234375, 0.010160639762878418, 0.010067872047424316, 0.010045536041259765, 0.010010592460632324, 0.01008028793334961, 0.010077407836914062, 0.010060576438903809, 0.010024255752563476, 0.01009340763092041, 0.010008383750915527, 0.010067775726318359, 0.010082176208496094, 0.010071807861328125, 0.01015664005279541, 0.010102335929870605, 0.010295104026794434, 0.010134143829345703, 0.01005286407470703, 0.010011391639709473, 0.010186752319335938, 0.010140928268432617, 0.01006873607635498, 0.010041343688964843, 0.010073151588439942, 0.010095487594604493, 0.010064064025878906, 0.010041248321533204, 0.01011081600189209, 0.010029184341430663, 0.010096159934997559, 0.010298879623413085, 0.010120160102844238, 0.010659839630126953, 0.010201151847839356, 0.01067411231994629, 0.010174464225769043, 0.0106496000289917, 0.01012947177886963, 0.010152031898498535, 0.010094431877136231, 0.010055295944213867, 0.009857184410095214, 0.010202495574951172, 0.01026262378692627, 0.01044934368133545, 0.010340288162231445, 0.010168319702148437, 0.010200448036193847, 0.010134143829345703, 0.010082304000854492, 0.010146944046020508, 0.01015283203125, 0.010098688125610352, 0.010461312294006347, 0.010204480171203614, 0.010225631713867188, 0.010299967765808106, 0.010201087951660156, 0.010065792083740234, 0.010099040031433106, 0.010049344062805176, 0.010086336135864259, 0.01014799976348877, 0.01024022388458252, 0.010079296112060546, 0.01012998390197754, 0.010038559913635253, 0.010185791969299316, 0.010115936279296875, 0.012890912055969239, 0.012228704452514649, 0.010370495796203614, 0.010203712463378907, 0.010098879814147949, 0.010322815895080566, 0.010066720008850097, 0.010109408378601074, 0.010126079559326172, 0.010052607536315919, 0.010168160438537598, 0.010045439720153808, 0.010197216033935547, 0.010038399696350097, 0.010081024169921874, 0.010392864227294922, 0.010161215782165528, 0.010025631904602051, 0.010118464469909667, 0.010096320152282715, 0.01013088035583496, 0.010177087783813477, 0.010102784156799317, 0.010104767799377442, 0.010092608451843262, 0.010171711921691895, 0.010130111694335937, 0.010100031852722168, 0.010085151672363282, 0.010103936195373536, 0.010458016395568847, 0.010083904266357422, 0.010084671974182129, 0.010094207763671875, 0.010463359832763672, 0.009769663810729981, 0.010175583839416504, 0.010058560371398927, 0.010100576400756836, 0.010042752265930176, 0.010216223716735839, 0.010249759674072266, 0.010101183891296387, 0.010205216407775879, 0.010143168449401855, 0.01000607967376709, 0.01016534423828125, 0.010221471786499023, 0.01012876796722412, 0.010167936325073242, 0.010103903770446777, 0.010210240364074707, 0.010108991622924805, 0.010265631675720215, 0.010000032424926758, 0.01015782356262207, 0.010354911804199219, 0.010187007904052735, 0.010128992080688477, 0.010103520393371581, 0.010163519859313965, 0.010057727813720703, 0.010083776473999023, 0.010240960121154784, 0.010082304000854492, 0.010054719924926759, 0.0101528959274292, 0.010061887741088868, 0.010168255805969239, 0.010145376205444336, 0.01010524845123291, 0.010055359840393066, 0.010088768005371094, 0.010171839714050293, 0.010178239822387695, 0.010090656280517578, 0.010129535675048828, 0.01009107208251953, 0.010118751525878907, 0.010156160354614257, 0.010152031898498535, 0.010178688049316407, 0.01008614444732666, 0.010473664283752442, 0.010104384422302246, 0.010388064384460449, 0.010034720420837402, 0.010209759712219239, 0.010096384048461913, 0.010057248115539551, 0.010105567932128906, 0.010160191535949707, 0.010907615661621094, 0.010426336288452149, 0.010151935577392577, 0.010156352043151856, 0.010276543617248534, 0.010098688125610352, 0.00984175968170166, 0.010152511596679688, 0.010107392311096192, 0.010144960403442383, 0.010160960197448731, 0.010366815567016602, 0.01030457592010498, 0.010299967765808106, 0.010215423583984374, 0.010105216026306152, 0.010268671989440918, 0.010110112190246582, 0.010213312149047851, 0.01009660816192627, 0.010251328468322754, 0.010094464302062988, 0.010095744132995606, 0.010037952423095704, 0.01012268829345703, 0.010244256019592285, 0.010087008476257325, 0.010076160430908204, 0.01006499195098877, 0.010153183937072754, 0.010061375617980957, 0.010157407760620117, 0.010115872383117676, 0.010149888038635254, 0.010110112190246582, 0.010144319534301758, 0.010071999549865722, 0.010079839706420898, 0.009990559577941895, 0.010088543891906738, 0.010109408378601074, 0.010061599731445313, 0.01002012825012207, 0.010100480079650878, 0.010259167671203613, 0.01023795223236084, 0.010116415977478027, 0.010136544227600098, 0.010165504455566406, 0.010056415557861327, 0.01012335968017578, 0.010116255760192872, 0.010095359802246094, 0.010129568099975585, 0.01008841609954834, 0.010060832023620606, 0.01009340763092041, 0.01013321590423584, 0.010108223915100098, 0.010068960189819336, 0.010114815711975098, 0.010030495643615722, 0.010103455543518066, 0.01005129623413086, 0.010135968208312989, 0.010184767723083497, 0.010305631637573242, 0.010176704406738282, 0.010069600105285645, 0.009805727958679199, 0.010068160057067871, 0.010137984275817871, 0.010147520065307617, 0.010149120330810547, 0.010088735580444336, 0.010086400032043457, 0.010071840286254883, 0.010145631790161133, 0.010121952056884766, 0.01006601619720459, 0.010104191780090333, 0.010034879684448243, 0.010091487884521484, 0.01006387233734131, 0.010215007781982421, 0.010164704322814942, 0.010098848342895508, 0.010082079887390136, 0.010143136024475098, 0.010031711578369141, 0.010075296401977539, 0.010052576065063476, 0.010041343688964843, 0.010060959815979004, 0.010027647972106934, 0.010120512008666992, 0.0100032320022583, 0.010057184219360351, 0.010076800346374511, 0.010179519653320312, 0.010087679862976075, 0.010061727523803712, 0.010028736114501953, 0.01005897617340088, 0.010072192192077637, 0.010093343734741212, 0.010027008056640625, 0.010100735664367675, 0.010063936233520508, 0.010149727821350098, 0.010049951553344727, 0.010110560417175294, 0.01004758358001709, 0.010235199928283692, 0.010160832405090333, 0.010146112442016601, 0.010038240432739258, 0.010099424362182616, 0.010188480377197266, 0.010061311721801757, 0.010045408248901367, 0.010148799896240235, 0.010095840454101562, 0.010082112312316895, 0.010099712371826173, 0.010032640457153321, 0.010125696182250976, 0.010239295959472657, 0.01009507179260254, 0.010014752388000488, 0.010057472229003906, 0.01011683177947998, 0.00981167984008789, 0.010074496269226074, 0.010149951934814452, 0.0100797758102417, 0.01015657615661621, 0.01010108757019043, 0.010043456077575683, 0.010086112022399903, 0.010076031684875489, 0.010145600318908692, 0.010099007606506347, 0.010164223670959472, 0.010119104385375976, 0.01013152027130127, 0.010098688125610352, 0.010138655662536621, 0.010049983978271485, 0.010080096244812011, 0.010297760009765625, 0.010507712364196778, 0.01062118434906006, 0.010642111778259277, 0.01015219211578369, 0.010135199546813965, 0.010162176132202149, 0.010153311729431153, 0.010271391868591308, 0.010151935577392577, 0.010077664375305176, 0.01014412784576416, 0.010154144287109375, 0.010117183685302735, 0.010112159729003907, 0.010095392227172852, 0.010167488098144531, 0.010117952346801757, 0.010123264312744141, 0.010082304000854492, 0.010065088272094727, 0.010101568222045899, 0.01005964756011963, 0.010171968460083008, 0.01011081600189209, 0.010066656112670899, 0.010217472076416016, 0.010076128005981445, 0.010108960151672364, 0.010164223670959472, 0.010164223670959472, 0.010151455879211426, 0.010641887664794921, 0.0105633602142334, 0.011829471588134766, 0.010473247528076171, 0.010368288040161133, 0.010093503952026368, 0.0101212158203125, 0.010131456375122071, 0.010143296241760254, 0.010019264221191405, 0.01017193603515625, 0.010015456199645997, 0.01008358383178711]",tokens/s,98.47558755659315,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 14.12 MiB is free. Process 271572 has 14.72 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 4.70 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,890.167296,3361.603584,0.0,2959.081472,2942.567424,s,1,7.16058642578125,7.16058642578125,0.0,7.16058642578125,7.16058642578125,7.16058642578125,7.16058642578125,[7.16058642578125],,kWh,5.808171716663916e-06,6.26368501052951e-07,2.060557203989455e-06,8.495097421706321e-06,,MB,1208.287232,3554.541568,0.0,3141.533696,3105.830912,s,10,2.4604317626953125,0.2460431762695312,0.004123797940260679,0.24505162811279296,0.24895030059814452,0.253059317779541,0.2563465315246582,"[0.24049964904785157, 0.24390272521972656, 0.24485816955566406, 0.24426272583007813, 0.24524508666992187, 0.2571683349609375, 0.2447500457763672, 0.2460032958984375, 0.2480371856689453, 0.24570454406738282]",tokens/s,1040.467790578193,kWh,7.174965574085021e-06,7.912652458070161e-07,4.749705696780514e-06,1.271593651667255e-05,tokens/kWh,20132217.525963943,MB,1233.711104,3596.484608,0.0,3183.476736,3163.057152,s,10,11.716252075195312,1.1716252075195313,0.0024279256394249622,1.1719160766601564,1.1735464599609375,1.1748349365234374,1.1758657177734375,"[1.172054443359375, 1.1701400146484375, 1.1662952880859374, 1.171849365234375, 1.1732601318359375, 1.1730770263671875, 1.1761234130859375, 1.1719827880859375, 1.1699066162109375, 1.17156298828125]",tokens/s,53.771461723137925,kWh,3.463090582633429e-05,3.819506105111709e-06,2.298814982681996e-05,6.143856175826598e-05,tokens/kWh,1025414.6288104465,,s,630,11.7138453617096,0.018593405336046975,0.00025707878755219544,0.018524176597595216,0.018768131065368652,0.018976393795013428,0.019720434188842777,"[0.019445215225219727, 0.018770463943481447, 0.018600160598754883, 0.018536415100097656, 0.018507583618164063, 0.018552032470703125, 0.01848742485046387, 0.018477727890014648, 0.018573312759399413, 0.018730207443237303, 0.018546464920043946, 0.018566144943237304, 0.01846272087097168, 0.01836851119995117, 0.018492895126342772, 0.01847939109802246, 0.018493696212768553, 0.01846214485168457, 0.018882272720336914, 0.018653600692749024, 0.01849171257019043, 0.018655359268188478, 0.018577407836914063, 0.018446079254150392, 0.0184333438873291, 0.01842681694030762, 0.018568479537963867, 0.018465024948120117, 0.018444768905639647, 0.018518272399902343, 0.018535648345947266, 0.01863324737548828, 0.01918713569641113, 0.018724576950073242, 0.01853526306152344, 0.018714399337768556, 0.018577247619628905, 0.018497312545776367, 0.01854643249511719, 0.018425952911376952, 0.018516128540039062, 0.018540319442749024, 0.0185229434967041, 0.018568960189819336, 0.018514272689819335, 0.018738367080688476, 0.01869696044921875, 0.018599903106689453, 0.018526208877563476, 0.01850284767150879, 0.018637632369995116, 0.01858064079284668, 0.018537311553955077, 0.018509824752807616, 0.01853164863586426, 0.018639232635498045, 0.018620735168457032, 0.019023967742919923, 0.01882102394104004, 0.018648992538452147, 0.018728832244873046, 0.01870649528503418, 0.01860598373413086, 0.019394912719726563, 0.01896361541748047, 0.01879337692260742, 0.01861814308166504, 0.01860256004333496, 0.018728607177734374, 0.018547903060913085, 0.0187523193359375, 0.018507776260375978, 0.01864499282836914, 0.018950143814086915, 0.018694143295288086, 0.018714080810546874, 0.018795040130615233, 0.018851743698120118, 0.01863875198364258, 0.018766016006469727, 0.018673631668090822, 0.018524192810058595, 0.018509824752807616, 0.018626560211181642, 0.018518016815185546, 0.018462944030761718, 0.01873843193054199, 0.018424352645874022, 0.018492607116699217, 0.01839801597595215, 0.018449695587158203, 0.018608863830566407, 0.01850476837158203, 0.01841862487792969, 0.01848054313659668, 0.018372991561889648, 0.018516191482543944, 0.018501632690429686, 0.01846886444091797, 0.018507776260375978, 0.018498752593994142, 0.01845257568359375, 0.018475168228149413, 0.0185677433013916, 0.01846214485168457, 0.018577472686767578, 0.018458240509033202, 0.018473855972290038, 0.018482528686523437, 0.018523008346557616, 0.01853526306152344, 0.018432607650756837, 0.018534751892089845, 0.01840947151184082, 0.01863065528869629, 0.01853759956359863, 0.018418399810791016, 0.018441888809204103, 0.018485088348388672, 0.018465375900268553, 0.018493087768554687, 0.018373023986816405, 0.01845043182373047, 0.018481151580810547, 0.018601951599121095, 0.018483232498168946, 0.01945599937438965, 0.01888387107849121, 0.018514656066894532, 0.018524160385131837, 0.01845452880859375, 0.01839308738708496, 0.01846643257141113, 0.018466207504272462, 0.018411903381347655, 0.01854934310913086, 0.018491392135620118, 0.018507776260375978, 0.01881324768066406, 0.01848249626159668, 0.018473344802856444, 0.01844806480407715, 0.018417503356933592, 0.018545312881469725, 0.01839289665222168, 0.01846019172668457, 0.018507680892944335, 0.018419488906860352, 0.01843484878540039, 0.018874368667602538, 0.018488576889038086, 0.01848089599609375, 0.018447359085083007, 0.01844428825378418, 0.018542591094970702, 0.01846272087097168, 0.018526208877563476, 0.018416831970214844, 0.0184389762878418, 0.01857257652282715, 0.018483936309814455, 0.018415935516357423, 0.018412960052490233, 0.018471200942993163, 0.018638399124145506, 0.01873084831237793, 0.01846531105041504, 0.01854060745239258, 0.0187205753326416, 0.018449600219726563, 0.018465791702270508, 0.01857494354248047, 0.018508447647094726, 0.018468608856201173, 0.01850281524658203, 0.01869500732421875, 0.018452064514160156, 0.018370975494384767, 0.01842585563659668, 0.018339136123657226, 0.018451135635375978, 0.018468511581420897, 0.018403104782104492, 0.018409856796264647, 0.018345344543457032, 0.01849750328063965, 0.018373472213745117, 0.0183767032623291, 0.018400384902954103, 0.019156991958618166, 0.018800640106201173, 0.018601984024047852, 0.018520063400268554, 0.018369951248168946, 0.018415615081787108, 0.018440351486206055, 0.018331199645996093, 0.018514816284179687, 0.0183600959777832, 0.01843017578125, 0.018387168884277345, 0.018448160171508788, 0.01841702461242676, 0.018395776748657226, 0.018489280700683595, 0.018395200729370117, 0.018827520370483398, 0.018490367889404297, 0.018470848083496094, 0.018918432235717774, 0.0185598087310791, 0.018686592102050783, 0.018467168807983398, 0.01861417579650879, 0.019509248733520508, 0.01884579277038574, 0.018724863052368163, 0.018667520523071288, 0.018518016815185546, 0.018489343643188477, 0.018556896209716796, 0.01844432067871094, 0.01854252815246582, 0.018488447189331056, 0.018572256088256835, 0.018545696258544922, 0.018448352813720703, 0.018549728393554687, 0.018509824752807616, 0.018421375274658203, 0.018518239974975585, 0.018473119735717775, 0.01847881507873535, 0.018729248046875, 0.018522111892700196, 0.018613792419433593, 0.01862444877624512, 0.018487615585327147, 0.0185795841217041, 0.01880419158935547, 0.018530624389648438, 0.018562688827514648, 0.01856355285644531, 0.018482656478881837, 0.019655296325683594, 0.018704511642456054, 0.018677120208740235, 0.018621055603027344, 0.018593791961669923, 0.018660736083984376, 0.018762367248535156, 0.01862451171875, 0.01988812828063965, 0.019049503326416015, 0.01932758331298828, 0.019423616409301757, 0.01884569549560547, 0.01878009605407715, 0.018663135528564453, 0.01859030342102051, 0.018562816619873048, 0.018460735321044922, 0.018415327072143554, 0.01857551956176758, 0.01843721580505371, 0.018426847457885743, 0.0184454402923584, 0.01851638412475586, 0.018702592849731445, 0.018530527114868165, 0.01848099136352539, 0.01860940742492676, 0.018578592300415038, 0.018511775970458985, 0.018562911987304687, 0.018508960723876953, 0.01851683235168457, 0.01851740837097168, 0.01859440040588379, 0.018650239944458007, 0.018580127716064453, 0.018458175659179687, 0.018498207092285158, 0.018571264266967775, 0.01842585563659668, 0.0189069766998291, 0.018667680740356445, 0.018497535705566406, 0.018638816833496094, 0.01870031929016113, 0.018743295669555664, 0.018565248489379883, 0.018546112060546877, 0.018468448638916016, 0.018502592086791992, 0.018392288208007812, 0.01852230453491211, 0.01866204833984375, 0.018648927688598632, 0.01867366409301758, 0.018654399871826172, 0.01862700843811035, 0.018610464096069337, 0.018511968612670897, 0.018495487213134765, 0.018472095489501954, 0.018415456771850587, 0.018555072784423827, 0.018497663497924803, 0.018716800689697267, 0.018553407669067382, 0.018446048736572265, 0.01850396728515625, 0.01852604866027832, 0.01856889533996582, 0.019482656478881834, 0.018819679260253908, 0.018667520523071288, 0.0186060791015625, 0.018485471725463866, 0.018595808029174803, 0.018517824172973634, 0.01840905570983887, 0.018446752548217774, 0.018640640258789063, 0.01842348861694336, 0.018456703186035157, 0.01856764793395996, 0.018485023498535157, 0.01849977684020996, 0.018437984466552735, 0.018638912200927733, 0.01850172805786133, 0.01850592041015625, 0.01852332878112793, 0.018592384338378905, 0.018525632858276367, 0.018487903594970705, 0.018673631668090822, 0.01850160026550293, 0.018640031814575197, 0.018598783493041993, 0.018494720458984374, 0.01846553611755371, 0.018544576644897462, 0.01849760055541992, 0.018492576599121093, 0.01857855987548828, 0.018533151626586915, 0.018670528411865235, 0.018589696884155273, 0.018525279998779298, 0.018463647842407227, 0.018626560211181642, 0.01846681594848633, 0.018537792205810547, 0.018676416397094726, 0.018767871856689454, 0.01860403251647949, 0.018718879699707033, 0.018986848831176757, 0.019866815567016603, 0.018707008361816407, 0.018663360595703126, 0.018618688583374024, 0.01862860870361328, 0.01928553581237793, 0.01856924819946289, 0.01855881690979004, 0.018593599319458008, 0.018595935821533204, 0.018686208724975586, 0.018512319564819336, 0.01848838424682617, 0.018539167404174803, 0.01852035140991211, 0.018423807144165038, 0.01860812759399414, 0.019747039794921876, 0.018912000656127928, 0.01920342445373535, 0.019161535263061524, 0.01880905532836914, 0.01867366409301758, 0.01865273666381836, 0.018647615432739257, 0.01865920066833496, 0.018569183349609376, 0.018524255752563477, 0.01856835174560547, 0.018543392181396483, 0.018546720504760743, 0.018771936416625976, 0.01853379249572754, 0.018592351913452147, 0.018515968322753908, 0.018505727767944336, 0.018613887786865235, 0.01852249526977539, 0.01869011116027832, 0.018566495895385744, 0.018922079086303712, 0.019982336044311523, 0.01930803108215332, 0.018725120544433593, 0.01867158317565918, 0.018567455291748046, 0.018790399551391602, 0.018684064865112305, 0.018538335800170898, 0.018528255462646484, 0.01847305679321289, 0.018470720291137697, 0.018493791580200196, 0.018414751052856445, 0.018472896575927735, 0.01847750473022461, 0.01844451141357422, 0.01865283203125, 0.0186976318359375, 0.018455488204956055, 0.018530303955078126, 0.018493440628051756, 0.018600255966186523, 0.018521408081054687, 0.018712575912475587, 0.018436479568481445, 0.01857904052734375, 0.018554847717285158, 0.01858780860900879, 0.018567455291748046, 0.018616031646728516, 0.018562944412231445, 0.018578880310058592, 0.018500576019287108, 0.01859702491760254, 0.01853094482421875, 0.01874732780456543, 0.018641183853149414, 0.018565120697021483, 0.01865318489074707, 0.019312639236450196, 0.02044108772277832, 0.02083635139465332, 0.018811071395874023, 0.018498432159423827, 0.01850054359436035, 0.01842780876159668, 0.018421247482299806, 0.018430559158325196, 0.018418912887573243, 0.01842038345336914, 0.01844361686706543, 0.018393280029296875, 0.018485855102539063, 0.01845859146118164, 0.018511903762817382, 0.018453792572021486, 0.01860009574890137, 0.018471712112426757, 0.01839052772521973, 0.01854819107055664, 0.01852249526977539, 0.01843244743347168, 0.018552127838134765, 0.018737855911254882, 0.01924710464477539, 0.01847260856628418, 0.018497888565063476, 0.018487295150756835, 0.01852003288269043, 0.018515008926391602, 0.018451423645019532, 0.01852592086791992, 0.018543903350830077, 0.018387168884277345, 0.018463520050048827, 0.018394784927368166, 0.01845487976074219, 0.01847091293334961, 0.01844223976135254, 0.018524160385131837, 0.01845452880859375, 0.018449983596801757, 0.01844473648071289, 0.01841971206665039, 0.01882054328918457, 0.018513696670532227, 0.01847478485107422, 0.018493440628051756, 0.018445247650146483, 0.01849555206298828, 0.018480287551879884, 0.018490144729614258, 0.01856217575073242, 0.018547008514404297, 0.018643327713012695, 0.018672927856445313, 0.0185229434967041, 0.018491552352905272, 0.01847500801086426, 0.018880352020263672, 0.018632863998413084, 0.018417503356933592, 0.01932441520690918, 0.018866687774658202, 0.01899875259399414, 0.018685567855834962, 0.018541471481323242, 0.018521408081054687, 0.018436800003051756, 0.018460512161254883, 0.01857142448425293, 0.018479103088378905, 0.01846886444091797, 0.01852988815307617, 0.018526176452636718, 0.01845292854309082, 0.018519039154052733, 0.018467744827270507, 0.018534496307373048, 0.018443424224853514, 0.01851215934753418, 0.018520639419555663, 0.0185316162109375, 0.018495840072631838, 0.01852454376220703, 0.01849497604370117, 0.01859040069580078, 0.018491264343261718, 0.0186779842376709, 0.018525983810424803, 0.018544639587402344, 0.018474943161010744, 0.018550783157348632, 0.01856716728210449, 0.018518016815185546, 0.018468704223632813, 0.01862838363647461, 0.018668127059936524, 0.018620319366455078, 0.018543680191040038, 0.018567007064819337, 0.01853539276123047, 0.01853343963623047, 0.018491968154907227, 0.01846236801147461, 0.018455263137817382, 0.01843814468383789, 0.018700191497802734, 0.018487167358398438, 0.018483423233032228, 0.018497535705566406, 0.01859174346923828, 0.018489248275756837, 0.01877168083190918, 0.018634368896484375, 0.018475616455078125, 0.01861369514465332, 0.0185799674987793, 0.018716896057128906, 0.018491392135620118, 0.018504800796508788, 0.018576288223266603, 0.018603456497192382, 0.01861075210571289, 0.018571264266967775, 0.019623807907104492, 0.0187871036529541, 0.018694368362426758, 0.018507232666015627, 0.01848748779296875, 0.018460319519042968, 0.018411264419555665, 0.018412479400634764, 0.018384511947631837, 0.0184117431640625, 0.01845587158203125, 0.018409856796264647, 0.01843846321105957, 0.01846272087097168, 0.018520063400268554, 0.018993152618408202, 0.0187064323425293, 0.018554719924926757, 0.018476800918579103, 0.018520191192626954, 0.01846505546569824, 0.018454240798950194, 0.01861814308166504, 0.018442752838134766, 0.018314271926879882, 0.01846985626220703, 0.018497535705566406, 0.01860403251647949, 0.01840742492675781, 0.01847318458557129, 0.018394399642944335, 0.018420223236083985, 0.01846886444091797, 0.018694080352783204, 0.018477119445800782, 0.018431167602539062, 0.01834809684753418, 0.01848569679260254, 0.018385215759277342, 0.018466751098632814, 0.018430015563964845, 0.018345983505249023, 0.02085430335998535, 0.019239391326904297, 0.018444192886352538, 0.01870969581604004, 0.018394016265869142, 0.018665727615356446, 0.01849839973449707, 0.018573984146118164, 0.018518272399902343, 0.018515520095825196, 0.018758079528808595, 0.018545759201049804, 0.019027008056640624, 0.018634592056274414, 0.018647039413452148, 0.018491552352905272, 0.01848409652709961, 0.01873967933654785, 0.018452255249023438, 0.018634752273559572, 0.018675552368164063]",tokens/s,53.78250954715128,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.352192,3361.603584,0.0,2959.081472,2942.567424,s,1,7.23410400390625,7.23410400390625,0.0,7.23410400390625,7.23410400390625,7.23410400390625,7.23410400390625,[7.23410400390625],,kWh,5.661606183298318e-06,6.169498990853535e-07,9.450007559946849e-07,7.223556838378356e-06,,MB,1336.38144,3556.63872,0.0,3139.436544,3105.830912,s,10,0.3410763816833496,0.03410763816833497,0.0009601042729743825,0.03387631988525391,0.03456672286987305,0.035707361221313474,0.03661987190246582,"[0.036847999572753906, 0.033283454895019535, 0.03375715255737305, 0.03363756942749024, 0.033812992095947264, 0.03405897521972656, 0.03403952026367187, 0.034313247680664065, 0.03393964767456055, 0.03338582229614258]",tokens/s,7505.650163653568,kWh,1.1894227970020096e-06,1.311717452667655e-07,7.867181090487611e-07,2.1073126513175365e-06,tokens/kWh,121481736.39063163,MB,1374.326784,3598.58176,0.0,3181.379584,3162.0096,s,10,13.824638793945313,1.3824638793945314,0.004083736539661044,1.3819203491210938,1.3872135498046876,1.3876933227539063,1.3880771411132813,"[1.37633056640625, 1.38710693359375, 1.3870238037109375, 1.3768094482421875, 1.3807109375, 1.3794862060546875, 1.380813720703125, 1.3830269775390625, 1.3851571044921875, 1.388173095703125]",tokens/s,45.57081088266241,kWh,4.003637925633205e-05,4.415580499917486e-06,2.475321898955077e-05,6.920517874580029e-05,tokens/kWh,910336.4970908791,,s,630,13.822264650344863,0.021940102619594996,0.0003191475102928791,0.021874223709106445,0.022060111808776855,0.022269403171539307,0.02338968252182007,"[0.021915712356567384, 0.021916032791137696, 0.021798912048339843, 0.021942272186279296, 0.021815296173095702, 0.02183782386779785, 0.021845247268676756, 0.02187129592895508, 0.021878751754760742, 0.021794975280761717, 0.0217672004699707, 0.021781375885009766, 0.02171731185913086, 0.021802591323852538, 0.021919872283935545, 0.0217903995513916, 0.021852479934692384, 0.021742687225341797, 0.021949344635009766, 0.021809247970581053, 0.02189017677307129, 0.021771360397338867, 0.021878463745117187, 0.021863712310791015, 0.021792768478393554, 0.021744703292846678, 0.021829280853271484, 0.021800960540771484, 0.021815296173095702, 0.02187264060974121, 0.02200953674316406, 0.021886463165283202, 0.021836639404296875, 0.02186979293823242, 0.021996288299560546, 0.021823488235473632, 0.021789888381958007, 0.021773120880126954, 0.022060831069946288, 0.02182147216796875, 0.02173766326904297, 0.021769952774047852, 0.021872352600097657, 0.0218176326751709, 0.021880256652832032, 0.021876672744750976, 0.02188764762878418, 0.02177769660949707, 0.021820383071899416, 0.021809343338012696, 0.021810400009155274, 0.021736032485961915, 0.021755903244018555, 0.021794815063476563, 0.02178816032409668, 0.021758047103881836, 0.02198159980773926, 0.02188515281677246, 0.02177577590942383, 0.02197747230529785, 0.021977088928222657, 0.021843967437744142, 0.02189516830444336, 0.021942527770996093, 0.021991424560546875, 0.021870336532592773, 0.021961727142333985, 0.021961727142333985, 0.022440223693847655, 0.02237548828125, 0.021775007247924805, 0.021760095596313478, 0.02183977508544922, 0.02185420799255371, 0.021805055618286134, 0.021806655883789064, 0.02184543991088867, 0.02206003189086914, 0.02184601593017578, 0.022048095703125, 0.021829408645629884, 0.021815296173095702, 0.021956480026245118, 0.02189369583129883, 0.021780992507934572, 0.021847999572753907, 0.02196646308898926, 0.02183206367492676, 0.021849472045898436, 0.021868959426879882, 0.021790304183959962, 0.0219399356842041, 0.021908416748046874, 0.021812223434448243, 0.021872831344604493, 0.02180803108215332, 0.02254844856262207, 0.021843488693237306, 0.021906911849975588, 0.021799455642700194, 0.02195699119567871, 0.02181724739074707, 0.021817440032958983, 0.02188083267211914, 0.022068288803100584, 0.022643648147583007, 0.021811199188232423, 0.021800960540771484, 0.023053600311279298, 0.024490816116333008, 0.022130592346191406, 0.021950111389160157, 0.021881183624267577, 0.022072864532470704, 0.02190320014953613, 0.021762144088745116, 0.02183456039428711, 0.021874399185180665, 0.021917695999145507, 0.021827903747558594, 0.02183135986328125, 0.021882368087768556, 0.02195916748046875, 0.022972415924072266, 0.022683647155761717, 0.021997568130493163, 0.02189516830444336, 0.02201190376281738, 0.021966848373413086, 0.022032352447509767, 0.021817375183105468, 0.021794431686401366, 0.0216878719329834, 0.023434207916259765, 0.022593376159667968, 0.021835775375366212, 0.02182143974304199, 0.02175155258178711, 0.02179097557067871, 0.02188444709777832, 0.021862432479858397, 0.021784191131591798, 0.0218222713470459, 0.021766143798828123, 0.02185379219055176, 0.021774560928344726, 0.02185807991027832, 0.021780160903930663, 0.022016191482543947, 0.021918239593505858, 0.025417535781860352, 0.021951904296875, 0.022790943145751953, 0.0230231990814209, 0.022869504928588868, 0.02202070426940918, 0.022054847717285157, 0.02228041648864746, 0.02189263916015625, 0.021862752914428712, 0.021836191177368163, 0.02188070487976074, 0.021849151611328124, 0.02180748748779297, 0.021878400802612306, 0.02180601692199707, 0.021919679641723634, 0.022128704071044922, 0.021929983139038087, 0.022095584869384767, 0.02185427284240723, 0.021865856170654296, 0.021887840270996092, 0.021759103775024415, 0.02175270462036133, 0.021772287368774415, 0.021812671661376952, 0.021718656539916992, 0.021796031951904295, 0.021754688262939453, 0.021887359619140626, 0.021747936248779298, 0.02183193588256836, 0.02189321517944336, 0.02182963180541992, 0.021933504104614258, 0.02198111915588379, 0.021858943939208984, 0.021809152603149414, 0.021772544860839845, 0.02188470458984375, 0.021871391296386718, 0.0218603515625, 0.021778432846069336, 0.021796319961547853, 0.0217772159576416, 0.02182044792175293, 0.021749439239501952, 0.02194476890563965, 0.02187526321411133, 0.021793792724609375, 0.021773311614990236, 0.021767967224121092, 0.021778079986572267, 0.021740095138549805, 0.021753856658935547, 0.02187843132019043, 0.02184012794494629, 0.02175542449951172, 0.02184281539916992, 0.02212380790710449, 0.02175820732116699, 0.021816959381103517, 0.021752351760864257, 0.022396928787231447, 0.021882207870483398, 0.02193824005126953, 0.021846912384033204, 0.02188163185119629, 0.02180191993713379, 0.021812383651733398, 0.021885791778564454, 0.021800800323486327, 0.02177244758605957, 0.02185420799255371, 0.021766143798828123, 0.02190336036682129, 0.02186444854736328, 0.021861663818359377, 0.021865184783935548, 0.021856607437133788, 0.021994592666625977, 0.021775999069213868, 0.021827648162841797, 0.021807519912719727, 0.021813247680664064, 0.02185468864440918, 0.0218787841796875, 0.02185158348083496, 0.02186297607421875, 0.02188467216491699, 0.0217989444732666, 0.021829856872558593, 0.021761215209960938, 0.021993631362915038, 0.02190608024597168, 0.021964256286621093, 0.022041120529174806, 0.021878047943115233, 0.02177712059020996, 0.021825056076049804, 0.02176223945617676, 0.021845951080322265, 0.0226243839263916, 0.02205900764465332, 0.02225971221923828, 0.021960319519042967, 0.02188070487976074, 0.021864479064941406, 0.021970720291137696, 0.021983936309814454, 0.0219748477935791, 0.021842111587524415, 0.021956159591674806, 0.022993152618408202, 0.021893632888793944, 0.021794496536254884, 0.02185775947570801, 0.021756383895874025, 0.02175391960144043, 0.021757823944091797, 0.021839807510375977, 0.021847295761108398, 0.02170585632324219, 0.02187654495239258, 0.021798912048339843, 0.02185625648498535, 0.021796863555908205, 0.021865760803222656, 0.0218668155670166, 0.02175008010864258, 0.021753023147583008, 0.021842687606811524, 0.021755456924438477, 0.02184662437438965, 0.021854015350341798, 0.021807296752929688, 0.021784576416015625, 0.022017568588256837, 0.021868640899658204, 0.021866880416870117, 0.021934080123901366, 0.02191360092163086, 0.021776256561279298, 0.021864864349365236, 0.021777856826782228, 0.02183718490600586, 0.021945247650146483, 0.021888160705566408, 0.021853248596191407, 0.021909280776977538, 0.021880256652832032, 0.02189958381652832, 0.02186016082763672, 0.021933759689331055, 0.02194476890563965, 0.021991071701049806, 0.021990047454833985, 0.022052799224853516, 0.021997152328491212, 0.021918176651000976, 0.02190300750732422, 0.021911903381347655, 0.021970943450927736, 0.02189276885986328, 0.02194118309020996, 0.02193427276611328, 0.021949247360229494, 0.02192076873779297, 0.02176425552368164, 0.022271520614624025, 0.02184432029724121, 0.021759967803955078, 0.021747264862060547, 0.02176383972167969, 0.021758655548095703, 0.0217938232421875, 0.021846303939819334, 0.02183225631713867, 0.02181337547302246, 0.021778432846069336, 0.021781696319580077, 0.02184448051452637, 0.021827903747558594, 0.02183772850036621, 0.022201440811157228, 0.021845216751098632, 0.021851743698120117, 0.021835935592651366, 0.02182966423034668, 0.021811071395874022, 0.02186252784729004, 0.02187811279296875, 0.021842464447021485, 0.02201817512512207, 0.022091775894165038, 0.02190470314025879, 0.021940383911132812, 0.02183830451965332, 0.022062368392944336, 0.021986080169677735, 0.02206947135925293, 0.021833215713500977, 0.022071584701538086, 0.021897216796875, 0.021931135177612304, 0.021795711517333984, 0.021944320678710938, 0.02184806442260742, 0.021882720947265625, 0.02183087921142578, 0.021861312866210937, 0.021816768646240235, 0.0218666877746582, 0.021873023986816405, 0.02211369514465332, 0.022024799346923828, 0.021937887191772462, 0.021915935516357423, 0.021941984176635742, 0.021895456314086913, 0.021874048233032226, 0.021866111755371093, 0.021846527099609374, 0.021930496215820314, 0.02188287925720215, 0.021780448913574217, 0.021905439376831055, 0.021968896865844727, 0.022099967956542968, 0.022189632415771484, 0.021909824371337892, 0.02187926483154297, 0.02183932876586914, 0.021858495712280275, 0.02187664031982422, 0.021796960830688477, 0.021780479431152345, 0.021755903244018555, 0.021774335861206053, 0.021835199356079103, 0.021768768310546874, 0.021794815063476563, 0.021779712677001954, 0.021842880249023436, 0.02175292778015137, 0.021798656463623046, 0.021760128021240235, 0.021916160583496092, 0.021888799667358398, 0.021842496871948242, 0.021751455307006836, 0.021787071228027345, 0.022005664825439454, 0.021760000228881835, 0.021780479431152345, 0.021790719985961913, 0.021792768478393554, 0.02347612762451172, 0.021782623291015626, 0.021770240783691407, 0.021823488235473632, 0.021772287368774415, 0.021816896438598632, 0.021775808334350586, 0.02180601692199707, 0.022409280776977538, 0.021745376586914063, 0.021878496170043945, 0.021762624740600586, 0.021798240661621095, 0.021713792800903322, 0.022502368927001953, 0.022209087371826173, 0.021936384201049805, 0.02251907157897949, 0.02196895980834961, 0.02189583969116211, 0.02200371170043945, 0.02196201515197754, 0.022069599151611326, 0.021838207244873047, 0.02178188705444336, 0.021825439453125, 0.021981727600097655, 0.021876224517822264, 0.021911840438842773, 0.021831775665283205, 0.021934303283691406, 0.021788768768310547, 0.022029823303222656, 0.02191360092163086, 0.021923776626586913, 0.02194643211364746, 0.021936128616333008, 0.021978912353515626, 0.021827167510986328, 0.021870752334594727, 0.02183216094970703, 0.021954816818237306, 0.021793632507324218, 0.021882976531982422, 0.021918527603149413, 0.0224768009185791, 0.021968896865844727, 0.021902975082397462, 0.021807424545288084, 0.021907520294189454, 0.021813247680664064, 0.021921503067016603, 0.02186025619506836, 0.021963136672973633, 0.02186854362487793, 0.021962080001831055, 0.021889215469360353, 0.02187926483154297, 0.021844255447387696, 0.02197452735900879, 0.021884767532348633, 0.02186697578430176, 0.021902336120605468, 0.021844863891601562, 0.021952543258666992, 0.021966207504272462, 0.021983776092529297, 0.02190550422668457, 0.02188697624206543, 0.021872671127319335, 0.021970912933349608, 0.022222848892211915, 0.022142431259155274, 0.021955007553100585, 0.021946048736572264, 0.02192220878601074, 0.02207923126220703, 0.02195884895324707, 0.02205427169799805, 0.021919424057006837, 0.022012928009033202, 0.021970943450927736, 0.02195180892944336, 0.021994176864624022, 0.022144191741943358, 0.021983711242675782, 0.022071680068969726, 0.021968544006347655, 0.022036800384521483, 0.02188857650756836, 0.021962944030761718, 0.02190332794189453, 0.02197942352294922, 0.0218819522857666, 0.021986207962036132, 0.021999839782714845, 0.022008703231811522, 0.021965887069702147, 0.021851360321044924, 0.021880544662475587, 0.021886144638061523, 0.021815935134887696, 0.021880159378051756, 0.022235967636108397, 0.021979135513305666, 0.022707487106323244, 0.022114591598510744, 0.021854368209838868, 0.022073663711547852, 0.022022144317626953, 0.02203036880493164, 0.021961759567260743, 0.021865440368652345, 0.021862367630004882, 0.021958656311035156, 0.02183510398864746, 0.021975711822509767, 0.021868127822875977, 0.021843936920166014, 0.021788991928100587, 0.02186662483215332, 0.02181747245788574, 0.02200761604309082, 0.02188528060913086, 0.021845727920532226, 0.0218666877746582, 0.021910528182983398, 0.02187516784667969, 0.021956607818603514, 0.02194207954406738, 0.021983776092529297, 0.02188083267211914, 0.021960704803466798, 0.02205411148071289, 0.02184998321533203, 0.02193935966491699, 0.02197475242614746, 0.021952192306518556, 0.02195465660095215, 0.021819648742675782, 0.021877824783325197, 0.022266815185546875, 0.022029760360717774, 0.021918272018432616, 0.02189926338195801, 0.022536191940307617, 0.023280672073364258, 0.02199577522277832, 0.02214681625366211, 0.02191423988342285, 0.021972864151000977, 0.021934560775756836, 0.021880064010620117, 0.021973760604858398, 0.021961919784545897, 0.021931039810180665, 0.021921567916870117, 0.02192793655395508, 0.021944320678710938, 0.023194271087646483, 0.022212928771972656, 0.022027551651000978, 0.02198739242553711, 0.021969472885131836, 0.021914911270141602, 0.02177507209777832, 0.021865888595581053, 0.021771135330200194, 0.021823200225830078, 0.021839487075805665, 0.021821823120117188, 0.02191279983520508, 0.022002111434936522, 0.02180668830871582, 0.02189356803894043, 0.02199750328063965, 0.021888608932495116, 0.021854303359985353, 0.02175766372680664, 0.021750112533569337, 0.0217258243560791, 0.02184601593017578, 0.021763935089111328, 0.021775968551635744, 0.021709375381469727, 0.02181769561767578, 0.021939487457275392, 0.02180748748779297, 0.02433020782470703, 0.02281884765625, 0.023905984878540038, 0.024772928237915038, 0.022245376586914063, 0.02204198455810547, 0.022207103729248046, 0.021946367263793946, 0.021798912048339843, 0.021853599548339844, 0.021949024200439454, 0.02192505645751953, 0.02184684753417969, 0.021816320419311523, 0.021776512145996095, 0.021740415573120116, 0.021770240783691407, 0.021820608139038085, 0.021768096923828126, 0.021805471420288085, 0.021774431228637696, 0.02171945571899414, 0.02191564750671387, 0.021835487365722658, 0.021923391342163086, 0.0219237117767334, 0.021779392242431642, 0.022019584655761718, 0.02194063949584961, 0.0218787841796875, 0.021871648788452148, 0.021816287994384766, 0.021910560607910155, 0.021978303909301757]",tokens/s,45.57863822874223,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,922.238976,12647.79264,0.0,12245.270528,12234.440192,s,1,7.284193359375,7.284193359375,0.0,7.284193359375,7.284193359375,7.284193359375,7.284193359375,[7.284193359375],,kWh,9.525952012487929e-06,1.043499587049343e-06,4.940003952007277e-06,1.550945555154455e-05,,MB,1200.820224,12930.90816,0.0,12517.900288,12440.746496,s,10,10.97425146484375,1.097425146484375,0.0040286052658744485,1.0974434814453125,1.1018480712890624,1.1021927124023438,1.1024684252929688,"[1.088208740234375, 1.094565673828125, 1.09539111328125, 1.0961654052734375, 1.0979058837890625, 1.0997518310546874, 1.0969810791015624, 1.100972900390625, 1.101771484375, 1.102537353515625]",tokens/s,233.2733132825519,kWh,3.207290536041227e-05,3.5359974814089007e-06,2.1303100375800325e-05,5.69120032176215e-05,tokens/kWh,4498172.363061989,MB,1218.248704,12993.82272,0.0,12580.814848,12543.681536,s,10,36.778388671875,3.6778388671875,0.002861439106842723,3.6787720947265625,3.680465380859375,3.680764428710938,3.6810036669921877,"[3.67152392578125, 3.677550048828125, 3.677276611328125, 3.67951025390625, 3.673789794921875, 3.679032958984375, 3.68039892578125, 3.67851123046875, 3.6797314453125, 3.6810634765625]",tokens/s,17.12962483540696,kWh,0.000107451533052509,1.1851808840234275e-05,7.1285529250599e-05,0.00019058887114334228,tokens/kWh,330554.4527446074,,s,630,36.775527507781995,0.05837385318695554,0.0003081628977628544,0.05836395072937012,0.05868203964233398,0.05882460441589356,0.059637660789489746,"[0.05941881561279297, 0.05815529632568359, 0.057880577087402345, 0.05786777496337891, 0.05786387252807617, 0.057961280822753904, 0.05804851150512695, 0.05796361541748047, 0.05801446533203125, 0.05797219085693359, 0.05812294387817383, 0.05806694412231445, 0.05808355331420898, 0.058126113891601563, 0.058019840240478515, 0.05805696105957031, 0.058119136810302736, 0.0582498893737793, 0.058474464416503905, 0.05831900787353515, 0.0581602554321289, 0.058385345458984376, 0.05802918243408203, 0.058123264312744144, 0.058076736450195315, 0.05818563079833984, 0.05804870223999024, 0.05806505584716797, 0.05814281463623047, 0.05850732803344726, 0.05845590209960937, 0.0584664306640625, 0.05828799819946289, 0.058386238098144534, 0.05844806289672851, 0.058406688690185546, 0.058328414916992186, 0.05831564712524414, 0.058113887786865236, 0.05812854385375977, 0.05809971237182617, 0.0582529296875, 0.05846364974975586, 0.05837740707397461, 0.05827151870727539, 0.058224639892578124, 0.05823196792602539, 0.05830947113037109, 0.0586321907043457, 0.058578815460205075, 0.058427455902099606, 0.05842335891723633, 0.05872848129272461, 0.05855228805541992, 0.058363903045654295, 0.05834137725830078, 0.05840691375732422, 0.05836304092407227, 0.05845004653930664, 0.05839155197143555, 0.05833465576171875, 0.05853196716308594, 0.058628257751464845, 0.05979852676391602, 0.0585799674987793, 0.05817913436889648, 0.05801990509033203, 0.058173248291015625, 0.05798339080810547, 0.05803228759765625, 0.058060798645019535, 0.05795430374145508, 0.05813631820678711, 0.05789923095703125, 0.0581181755065918, 0.05808556747436523, 0.05807699203491211, 0.0580340461730957, 0.05817385482788086, 0.05822339248657227, 0.05831280136108399, 0.058163135528564454, 0.05884406280517578, 0.058173439025878904, 0.05815091323852539, 0.058218334197998045, 0.05819203186035156, 0.05831679916381836, 0.05878988647460937, 0.05831875228881836, 0.05832527923583984, 0.05827142333984375, 0.05819113540649414, 0.05823574447631836, 0.05840671920776367, 0.0583842887878418, 0.05852422332763672, 0.05836566543579102, 0.05837004852294922, 0.05903545761108398, 0.058656959533691405, 0.05869772720336914, 0.058531841278076174, 0.05833276748657226, 0.05820048141479492, 0.05819564819335937, 0.05824748611450195, 0.058449920654296876, 0.058531841278076174, 0.058468544006347656, 0.0583408317565918, 0.058302654266357425, 0.058620254516601564, 0.058537185668945314, 0.058479198455810545, 0.05846419143676758, 0.05857081604003906, 0.05851136016845703, 0.05855369567871094, 0.058575519561767576, 0.058568702697753904, 0.058480640411376954, 0.05842844772338867, 0.05826070404052734, 0.0585665283203125, 0.058565536499023435, 0.05964799880981445, 0.05820415878295898, 0.057968639373779295, 0.057929729461669924, 0.05793414306640625, 0.057968318939208986, 0.058054656982421876, 0.05811779022216797, 0.05806473541259766, 0.058233345031738284, 0.05816339111328125, 0.05814457702636719, 0.05801916885375977, 0.05809436798095703, 0.05792697525024414, 0.05812879943847656, 0.05843996810913086, 0.05865868759155273, 0.05826764678955078, 0.05829983901977539, 0.05819247817993164, 0.05801161575317383, 0.05807206344604492, 0.05807820892333984, 0.05817497634887695, 0.05828857421875, 0.058664833068847656, 0.05854227066040039, 0.0582369270324707, 0.058988128662109375, 0.05835203170776367, 0.05847859191894531, 0.05876863861083984, 0.058620159149169924, 0.058464256286621094, 0.05845248031616211, 0.05824512100219727, 0.05818096160888672, 0.05840304183959961, 0.05828006362915039, 0.05838800048828125, 0.058325790405273435, 0.05828515243530273, 0.05815094375610352, 0.05824380874633789, 0.058495136260986326, 0.05837619018554688, 0.05843308639526367, 0.05830489730834961, 0.05874694442749023, 0.05855846405029297, 0.05852569580078125, 0.058482494354248044, 0.05865081787109375, 0.05853907012939453, 0.05851641464233399, 0.05849401473999023, 0.05859779357910156, 0.05857334518432617, 0.05858448028564453, 0.05868195343017578, 0.05867251205444336, 0.05854819107055664, 0.05956633758544922, 0.058423583984375, 0.058126945495605466, 0.05802102279663086, 0.05806729507446289, 0.058061153411865234, 0.05809542465209961, 0.05800508880615234, 0.05820476913452149, 0.05811609649658203, 0.05847615814208985, 0.057904640197753904, 0.05799411010742188, 0.05810790252685547, 0.057970687866210936, 0.05803036880493164, 0.058288894653320315, 0.05820134353637695, 0.058401599884033206, 0.058337249755859376, 0.05825616073608399, 0.05819948959350586, 0.05815135955810547, 0.058019489288330076, 0.05803830337524414, 0.05813100814819336, 0.05811203384399414, 0.058964160919189455, 0.058407966613769534, 0.05826412963867188, 0.058175582885742184, 0.05827129745483398, 0.05854051208496094, 0.05894540786743164, 0.05853724670410156, 0.05859833526611328, 0.058527328491210937, 0.05846384048461914, 0.058360702514648435, 0.05875251388549805, 0.05868281555175781, 0.058417598724365236, 0.05837910461425781, 0.05860086441040039, 0.05909056091308594, 0.05912031936645508, 0.05856620788574219, 0.05861423873901367, 0.058684608459472654, 0.058718463897705075, 0.05851510238647461, 0.05858924865722656, 0.05854246520996094, 0.058527328491210937, 0.05849129486083984, 0.05865619277954102, 0.058510337829589844, 0.058410240173339845, 0.05834121704101562, 0.05847849655151367, 0.05842227172851563, 0.05842739105224609, 0.05834735870361328, 0.05949030303955078, 0.05822054290771484, 0.057933887481689456, 0.05784979248046875, 0.05797452926635742, 0.057960224151611325, 0.05806512069702149, 0.05799318313598633, 0.0579516487121582, 0.057979774475097653, 0.05805395126342774, 0.05803852844238281, 0.05812179183959961, 0.05804531097412109, 0.058025760650634764, 0.05803193664550781, 0.058162689208984375, 0.058160030364990234, 0.05814262390136719, 0.05851964950561524, 0.058257278442382814, 0.05825753784179687, 0.05820415878295898, 0.05807923126220703, 0.057987071990966796, 0.05809132766723633, 0.05808297729492187, 0.058232574462890624, 0.05818243026733398, 0.058162654876708984, 0.058485279083251955, 0.058396831512451175, 0.05851119995117188, 0.058689537048339846, 0.05855846405029297, 0.05848678588867187, 0.05838643264770508, 0.05868544006347656, 0.0588551025390625, 0.05846457672119141, 0.05831612777709961, 0.05825305557250977, 0.05841747283935547, 0.058413311004638674, 0.05841267013549805, 0.05826224136352539, 0.05822991943359375, 0.058362049102783205, 0.05849980926513672, 0.05857478332519531, 0.058423297882080075, 0.05836547088623047, 0.05864313507080078, 0.05855209732055664, 0.05848710250854492, 0.0582817268371582, 0.05860755157470703, 0.058482177734375, 0.05839923095703125, 0.05821583938598633, 0.05829878234863281, 0.05862995147705078, 0.05862153625488281, 0.05970937728881836, 0.05826921463012695, 0.057965087890625, 0.05794611358642578, 0.05784892654418945, 0.058034431457519534, 0.058108577728271486, 0.05811180877685547, 0.05795244979858399, 0.058009502410888675, 0.05811004638671875, 0.05802572631835937, 0.058018047332763674, 0.05796217727661133, 0.05791686248779297, 0.058103809356689455, 0.058255775451660156, 0.058282463073730466, 0.058108062744140626, 0.05850300979614258, 0.05826355361938477, 0.05818598556518555, 0.05814790344238281, 0.0582624626159668, 0.05819776153564453, 0.05812176132202149, 0.05817398452758789, 0.05835769653320313, 0.058386272430419925, 0.058362014770507814, 0.05834668731689453, 0.05822956848144531, 0.05819744110107422, 0.05843190383911133, 0.05885507202148438, 0.05879759979248047, 0.058956768035888674, 0.05854003143310547, 0.058236991882324216, 0.058258846282958986, 0.05882246398925781, 0.05847635269165039, 0.05838735961914063, 0.058361312866210935, 0.05855196762084961, 0.05862928009033203, 0.05868931198120117, 0.05828796768188477, 0.058549407958984376, 0.05864051055908203, 0.05859110260009766, 0.05870278549194336, 0.058929153442382816, 0.05867724609375, 0.059017215728759766, 0.05840012741088867, 0.05841132736206055, 0.05875334548950195, 0.05862809753417969, 0.058576000213623046, 0.05871440124511719, 0.05876591873168945, 0.058638046264648434, 0.059690593719482425, 0.05844630432128906, 0.05807068634033203, 0.0578873291015625, 0.057914142608642576, 0.057981857299804686, 0.05799702453613281, 0.0580365104675293, 0.058048225402832034, 0.05821273422241211, 0.058096767425537106, 0.05803702545166016, 0.057955711364746094, 0.05801433563232422, 0.057995040893554686, 0.058087646484375, 0.05823897552490234, 0.05852988815307617, 0.058549503326416015, 0.05849257659912109, 0.05803238296508789, 0.05797286224365234, 0.05795446395874024, 0.0581165771484375, 0.058238433837890624, 0.05847849655151367, 0.05832511901855469, 0.05826201629638672, 0.05844297790527344, 0.058390880584716795, 0.05830905532836914, 0.058317951202392575, 0.05851615905761719, 0.05850457763671875, 0.05845235061645508, 0.05866745758056641, 0.05887295913696289, 0.05870822525024414, 0.058552608489990235, 0.05840256118774414, 0.05859388732910156, 0.05834447860717774, 0.0584672966003418, 0.05845923233032226, 0.058372737884521485, 0.05835091018676758, 0.05961235046386719, 0.05842063903808594, 0.058642814636230466, 0.05867929458618164, 0.058644126892089844, 0.05875542449951172, 0.05869724655151367, 0.05857942581176758, 0.058801567077636716, 0.05863238525390625, 0.058515903472900394, 0.0584455680847168, 0.058671329498291014, 0.05872617721557617, 0.05849929428100586, 0.05871785736083984, 0.0586591682434082, 0.06005753707885742, 0.05839734268188477, 0.058001407623291014, 0.05794211196899414, 0.05798880004882812, 0.05802959823608399, 0.05807388687133789, 0.058066753387451174, 0.05814076614379883, 0.05805670547485352, 0.05811775970458984, 0.05807164764404297, 0.05800732803344726, 0.05808867263793945, 0.05796739196777344, 0.0580055046081543, 0.05825680160522461, 0.05849967956542969, 0.05838438415527344, 0.058323070526123046, 0.058077056884765624, 0.0580239372253418, 0.058019840240478515, 0.058221729278564456, 0.058418304443359374, 0.05861529541015625, 0.058456287384033204, 0.05827596664428711, 0.05831452941894531, 0.05827388763427734, 0.058140670776367184, 0.058394432067871094, 0.058512577056884764, 0.05854294586181641, 0.05835808181762695, 0.05857001495361328, 0.058497409820556644, 0.05841241455078125, 0.05823385620117188, 0.05826540756225586, 0.05856774520874024, 0.05843180847167969, 0.058522174835205075, 0.05837420654296875, 0.05840281677246094, 0.05867248153686523, 0.058561374664306644, 0.0584409294128418, 0.058918689727783205, 0.05868556976318359, 0.05850313568115234, 0.05882556915283203, 0.05867507171630859, 0.05846015930175781, 0.05849679946899414, 0.05855392074584961, 0.05852841567993164, 0.05846220779418945, 0.05840281677246094, 0.05881856155395508, 0.05850931167602539, 0.058587265014648435, 0.05870934295654297, 0.05996783828735352, 0.05842243194580078, 0.058143585205078126, 0.05792768096923828, 0.05800960159301758, 0.05800076675415039, 0.05811199951171875, 0.05799568176269531, 0.05808355331420898, 0.05813849639892578, 0.058064350128173826, 0.05810412979125976, 0.05826492691040039, 0.058256385803222656, 0.05811321640014649, 0.058225505828857424, 0.05842326354980469, 0.0583732795715332, 0.058267967224121094, 0.05813711929321289, 0.058331134796142575, 0.0582369270324707, 0.058087646484375, 0.05819952011108399, 0.058181888580322264, 0.05825852966308594, 0.05825609588623047, 0.05836006546020508, 0.05862105560302734, 0.058495616912841795, 0.05830819320678711, 0.058315231323242185, 0.05840643310546875, 0.05858303833007812, 0.05860444641113281, 0.05849055862426758, 0.058422847747802736, 0.05853756713867188, 0.058399391174316403, 0.05837027359008789, 0.05817756652832031, 0.05837145614624024, 0.05858902359008789, 0.0584793930053711, 0.05859123229980469, 0.05843059158325195, 0.05856345748901367, 0.058615425109863284, 0.05847187042236328, 0.05865158462524414, 0.0586506233215332, 0.05865881729125977, 0.058480415344238285, 0.0586539192199707, 0.05891788864135742, 0.05847244644165039, 0.05898188781738281, 0.058542015075683594, 0.05853241729736328, 0.058560096740722656, 0.058500606536865236, 0.0584815673828125, 0.058597377777099606, 0.05973196792602539, 0.05834137725830078, 0.05805670547485352, 0.05801311874389648, 0.05789728164672851, 0.05789510345458984, 0.05798678588867188, 0.05814540863037109, 0.05816716766357422, 0.058149856567382814, 0.058024608612060546, 0.05808560180664062, 0.057912574768066404, 0.05796121597290039, 0.05810995101928711, 0.05827353668212891, 0.058406303405761716, 0.058420063018798825, 0.05836399841308594, 0.058300159454345704, 0.05827824020385742, 0.05817939376831055, 0.05854185485839844, 0.058251487731933595, 0.05827104187011719, 0.05832387161254883, 0.0581855354309082, 0.058175617218017575, 0.058185791015625, 0.058506561279296876, 0.05835532760620117, 0.0585081901550293, 0.05827372741699219, 0.058470302581787106, 0.05866915130615234, 0.05852137756347656, 0.05839894485473633, 0.058385665893554685, 0.05882342529296875, 0.05855369567871094, 0.05841100692749023, 0.05834124755859375, 0.05852668762207031, 0.058619712829589846, 0.05838643264770508, 0.058363903045654295, 0.0587081298828125, 0.058596321105957035, 0.05861471939086914, 0.05888608169555664, 0.05905203247070313, 0.05881996917724609, 0.058768001556396485, 0.058595584869384765, 0.05867705535888672, 0.0591847038269043, 0.05888832092285156, 0.05862630462646484, 0.058437534332275394, 0.05866096115112305, 0.05862809753417969, 0.058480640411376954, 0.05841929626464844]",tokens/s,17.13095753328588,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 892, in __init__ self.transformer = GPTJModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 646, in __init__ self.h = nn.ModuleList([GPTJBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 646, in self.h = nn.ModuleList([GPTJBlock(config) for _ in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 453, in __init__ self.mlp = GPTJMLP(inner_dim, config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 433, in __init__ self.fc_in = nn.Linear(embed_dim, intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 14.12 MiB is free. Process 186427 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 6.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.028608,576.585728,0.0,174.063616,172.57984,s,1,7.078572265625,7.078572265625,0.0,7.078572265625,7.078572265625,7.078572265625,7.078572265625,[7.078572265625],,kWh,4.176345512526799e-06,4.5082985979359654e-07,9.141673979889964e-07,5.541342770309391e-06,,MB,1313.902592,649.986048,0.0,234.881024,215.589888,s,31,0.18553411102294926,0.0059849713233209425,0.00015685441251964689,0.005951295852661133,0.006066463947296143,0.0061711201667785645,0.006605356740951538,"[0.0062576642036437985, 0.005898367881774902, 0.005957280158996582, 0.0067543678283691405, 0.005931327819824219, 0.005977536201477051, 0.0058895998001098635, 0.005928512096405029, 0.0059316158294677734, 0.005890143871307373, 0.005879039764404297, 0.005951295852661133, 0.005955391883850097, 0.005938176155090332, 0.00598521614074707, 0.005962399959564209, 0.0059705920219421385, 0.005974688053131103, 0.005925504207611084, 0.005954559803009033, 0.0059192957878112795, 0.006066463947296143, 0.005953983783721924, 0.005903264045715332, 0.005944704055786133, 0.005931807994842529, 0.005938687801361084, 0.0059222722053527835, 0.0059532799720764164, 0.0060845761299133305, 0.0060024957656860355]",tokens/s,42773.80561582217,kWh,1.7482521564812758e-07,1.9280265410318092e-08,8.427058868664007e-08,2.783760697450857e-07,tokens/kWh,919619277.0248681,MB,1346.650112,654.180352,0.0,239.075328,215.592448,s,31,10.14097814941406,0.32712832740045356,0.0023233006319843745,0.32631405639648436,0.3289915466308594,0.3302656402587891,0.33511358642578126,"[0.33685308837890626, 0.3277040710449219, 0.32665084838867187, 0.32587371826171874, 0.32631405639648436, 0.3258727722167969, 0.3289915466308594, 0.32511080932617187, 0.32491461181640624, 0.3256078796386719, 0.32496270751953127, 0.3285772705078125, 0.32586465454101565, 0.32647906494140627, 0.3269606018066406, 0.3252958679199219, 0.3255294189453125, 0.32860928344726564, 0.32609112548828123, 0.3294765319824219, 0.3285175170898437, 0.3262732238769531, 0.3280379943847656, 0.3278991394042969, 0.3253768310546875, 0.3252130126953125, 0.3260997314453125, 0.32560269165039063, 0.3274004821777344, 0.33105474853515626, 0.3277628479003906]",tokens/s,192.5849726944578,kWh,9.364580192818662e-06,1.0327438983386163e-06,3.485956281829244e-06,1.3883280372986522e-05,tokens/kWh,4537832.436387487,,s,1953,10.126910752296455,0.005185310165026342,0.00013126803385241824,0.005157919883728027,0.0052553408622741694,0.0053215935707092285,0.00574660852432251,"[0.005431327819824219, 0.00535859203338623, 0.005612415790557861, 0.005517727851867675, 0.0053367681503295895, 0.0054091839790344235, 0.005315584182739258, 0.005298431873321533, 0.005321055889129639, 0.005314720153808594, 0.0052713918685913085, 0.005342304229736328, 0.005348256111145019, 0.005291647911071777, 0.005326848030090332, 0.005253503799438477, 0.005209792137145996, 0.005246912002563477, 0.005228608131408691, 0.005226816177368164, 0.005224607944488526, 0.005308256149291992, 0.005211679935455322, 0.005212639808654785, 0.005248703956604004, 0.005230688095092774, 0.00522208023071289, 0.0056878399848937986, 0.006809919834136963, 0.006872896194458008, 0.006924287796020508, 0.006003583908081055, 0.005319295883178711, 0.0052302079200744625, 0.00528495979309082, 0.005312160015106201, 0.005150368213653564, 0.005171679973602295, 0.005166975975036621, 0.005181087970733643, 0.005134399890899658, 0.005168735980987549, 0.005152671813964844, 0.005138495922088623, 0.005147039890289307, 0.005175776004791259, 0.0051976318359375, 0.005123712062835693, 0.005158336162567139, 0.005256159782409668, 0.005181344032287598, 0.005136447906494141, 0.00520300817489624, 0.005228928089141846, 0.005126751899719238, 0.005189888000488281, 0.005146175861358643, 0.00514243221282959, 0.005117663860321045, 0.005166719913482666, 0.005262527942657471, 0.005191487789154053, 0.005222367763519287, 0.005081823825836182, 0.005302271842956543, 0.005123295783996582, 0.005161632061004639, 0.00515283203125, 0.005174367904663086, 0.005112800121307373, 0.005169151782989502, 0.005244639873504639, 0.005128479957580566, 0.005148672103881836, 0.005146880149841309, 0.005158656120300293, 0.005113311767578125, 0.0051799359321594235, 0.005160672187805176, 0.005145088195800781, 0.005166336059570313, 0.0051594557762146, 0.005163072109222412, 0.005133920192718506, 0.005195295810699463, 0.005180223941802979, 0.005138432025909424, 0.005168960094451904, 0.005349567890167236, 0.005215712070465088, 0.005146751880645752, 0.005291840076446533, 0.005141024112701416, 0.005374015808105469, 0.005221888065338135, 0.005559840202331543, 0.005253151893615722, 0.005277696132659912, 0.005147583961486816, 0.005243072032928467, 0.005189504146575928, 0.005173183917999267, 0.005748672008514404, 0.005232960224151611, 0.005121695995330811, 0.005167168140411377, 0.005222432136535645, 0.005168159961700439, 0.005157279968261719, 0.005112095832824707, 0.005262944221496582, 0.005116799831390381, 0.005134079933166504, 0.005132319927215576, 0.005152256011962891, 0.00516764783859253, 0.005117631912231445, 0.005194272041320801, 0.005195551872253418, 0.005165056228637695, 0.005179391860961914, 0.005146624088287354, 0.005238399982452392, 0.005151072025299072, 0.005212192058563232, 0.005163008213043213, 0.005125247955322266, 0.005164000034332275, 0.005285791873931885, 0.005204031944274903, 0.005182943820953369, 0.005147103786468506, 0.005197152137756348, 0.005174208164215088, 0.005131999969482422, 0.005212160110473632, 0.005138432025909424, 0.005169023990631103, 0.005122176170349121, 0.005181632041931152, 0.005176544189453125, 0.005134943962097168, 0.005204192161560059, 0.005127007961273193, 0.005446400165557861, 0.005292223930358887, 0.005173344135284424, 0.005204063892364502, 0.005143583774566651, 0.005215007781982422, 0.005197824001312256, 0.005141664028167725, 0.005180255889892578, 0.005233856201171875, 0.005205088138580323, 0.005149439811706543, 0.005178336143493652, 0.005136447906494141, 0.005157983779907226, 0.00525603199005127, 0.005203968048095703, 0.005152128219604492, 0.00516319990158081, 0.005312160015106201, 0.005132800102233887, 0.005177631855010987, 0.005164608001708985, 0.005249631881713867, 0.005166207790374756, 0.005157248020172119, 0.005202271938323975, 0.005114016056060791, 0.005168831825256347, 0.005138751983642578, 0.005139391899108886, 0.0051435518264770505, 0.005189023971557617, 0.005147103786468506, 0.005126143932342529, 0.0052080960273742675, 0.005119999885559082, 0.005154016017913818, 0.005149695873260498, 0.005187359809875489, 0.005124095916748047, 0.005109983921051026, 0.00525494384765625, 0.005127679824829101, 0.005130208015441894, 0.005110208034515381, 0.005322400093078613, 0.005179359912872315, 0.005151072025299072, 0.0051588802337646485, 0.005191775798797607, 0.005158815860748291, 0.005144544124603271, 0.005185152053833008, 0.0051595520973205565, 0.00517087984085083, 0.005188992023468017, 0.005167840003967285, 0.00512175989151001, 0.0051298880577087405, 0.005282144069671631, 0.005187871932983398, 0.005144288063049316, 0.005236447811126709, 0.005112703800201416, 0.005127583980560303, 0.0051669120788574215, 0.005202400207519531, 0.005179391860961914, 0.0051868162155151365, 0.005191904067993164, 0.005249695777893066, 0.005141471862792969, 0.005161856174468994, 0.005162271976470947, 0.005153759956359864, 0.005158432006835937, 0.005183743953704834, 0.005126143932342529, 0.005131360054016113, 0.005363647937774658, 0.005145792007446289, 0.005150015830993652, 0.005195424079895019, 0.005196479797363282, 0.005144447803497315, 0.0051263999938964844, 0.005178624153137207, 0.005126912117004395, 0.005105023860931397, 0.005182079792022705, 0.005193727970123291, 0.005150847911834717, 0.00511740779876709, 0.005216608047485352, 0.005157023906707764, 0.005111711978912354, 0.005105663776397705, 0.005156544208526612, 0.005200128078460694, 0.0051487360000610355, 0.005170656204223633, 0.005125984191894532, 0.005124800205230713, 0.005096960067749024, 0.0051512961387634275, 0.00514412784576416, 0.005112480163574218, 0.005062335968017578, 0.005222720146179199, 0.005154111862182617, 0.005130879878997803, 0.005124320030212402, 0.0052119998931884765, 0.005116096019744873, 0.0051311998367309574, 0.005135231971740722, 0.00521830415725708, 0.005142496109008789, 0.005141791820526123, 0.005145343780517578, 0.005127168178558349, 0.0051344637870788575, 0.005131296157836914, 0.005149888038635254, 0.005132512092590332, 0.005130239963531494, 0.005175424098968506, 0.005137919902801514, 0.005139264106750488, 0.005179615974426269, 0.005181215763092041, 0.005185535907745361, 0.005265408039093018, 0.00520143985748291, 0.005177824020385742, 0.005152607917785644, 0.005285183906555176, 0.0053558402061462404, 0.0051446080207824705, 0.0052444801330566405, 0.005294400215148926, 0.005174111843109131, 0.0051586880683898925, 0.0052674560546875, 0.005144576072692871, 0.005152768135070801, 0.005175295829772949, 0.005255392074584961, 0.005113632202148438, 0.005453824043273926, 0.0051363840103149415, 0.005203936100006103, 0.005144927978515625, 0.005160096168518066, 0.005154496192932129, 0.0051327037811279294, 0.005171264171600342, 0.005145088195800781, 0.005133440017700195, 0.005176064014434815, 0.005179391860961914, 0.005163008213043213, 0.0051365761756896975, 0.005242688179016113, 0.005130239963531494, 0.0051099519729614256, 0.005154623985290528, 0.005137472152709961, 0.005139391899108886, 0.005130239963531494, 0.005068416118621826, 0.005291808128356934, 0.00522105598449707, 0.005205344200134277, 0.005150400161743164, 0.0051823358535766605, 0.005128543853759765, 0.005117599964141846, 0.005156191825866699, 0.00512886381149292, 0.005126143932342529, 0.005131328105926513, 0.005235648155212403, 0.005098847866058349, 0.005159039974212646, 0.005154528141021728, 0.005128191947937012, 0.005124927997589112, 0.00521830415725708, 0.005156320095062256, 0.005263679981231689, 0.005243103981018066, 0.005336544036865234, 0.005166816234588623, 0.005144768238067627, 0.005180031776428223, 0.005152768135070801, 0.00521727991104126, 0.005231616020202637, 0.00510975980758667, 0.005139935970306397, 0.005093920230865479, 0.0051662721633911135, 0.005245759963989258, 0.005128191947937012, 0.005181439876556396, 0.005210015773773193, 0.005169248104095459, 0.005259200096130371, 0.005178944110870361, 0.0051288318634033206, 0.005134175777435303, 0.005369887828826904, 0.005126143932342529, 0.005128223896026612, 0.005140448093414306, 0.005127488136291504, 0.005106400012969971, 0.005117440223693847, 0.005154751777648926, 0.005157695770263672, 0.005117663860321045, 0.005163008213043213, 0.0051404800415039064, 0.005138207912445068, 0.005144159793853759, 0.0051569280624389644, 0.005171775817871094, 0.005126016139984131, 0.005149087905883789, 0.005137856006622314, 0.005107999801635742, 0.00517855978012085, 0.005033728122711182, 0.0051560959815979005, 0.0052848000526428225, 0.005129407882690429, 0.00511846399307251, 0.005151008129119873, 0.005119616031646728, 0.005110400199890137, 0.0051669120788574215, 0.005120031833648682, 0.005104703903198242, 0.005145023822784424, 0.005193408012390137, 0.005116256237030029, 0.005101376056671143, 0.005136032104492188, 0.005142816066741943, 0.00523360013961792, 0.005113632202148438, 0.005203616142272949, 0.005116256237030029, 0.005103199958801269, 0.00514025592803955, 0.005146719932556152, 0.00515337610244751, 0.005162271976470947, 0.005155488014221192, 0.005219840049743653, 0.00512665605545044, 0.005148191928863525, 0.005157343864440918, 0.005119999885559082, 0.00512995195388794, 0.005914527893066407, 0.006467648029327392, 0.006301792144775391, 0.006526527881622314, 0.005220352172851562, 0.005169824123382568, 0.005175295829772949, 0.005115839958190918, 0.005128255844116211, 0.005123424053192139, 0.0051411519050598145, 0.005122047901153564, 0.0051036162376403805, 0.005183487892150879, 0.005119008064270019, 0.005145567893981934, 0.005107935905456543, 0.005203743934631347, 0.005090303897857666, 0.0051077442169189455, 0.005123231887817383, 0.005125951766967773, 0.005146624088287354, 0.005107711791992187, 0.0051485118865966795, 0.005123807907104492, 0.005091263771057129, 0.005122560024261475, 0.005195839881896973, 0.0051313920021057126, 0.005029888153076172, 0.0052015681266784665, 0.005157536029815674, 0.005112959861755371, 0.005104000091552734, 0.0051099519729614256, 0.005191743850708008, 0.0051077442169189455, 0.00513424015045166, 0.00513862419128418, 0.005142335891723633, 0.005165056228637695, 0.005115903854370117, 0.005177055835723877, 0.0051099519729614256, 0.005093183994293213, 0.005135744094848633, 0.005114783763885498, 0.005103392124176025, 0.0051181759834289554, 0.005189536094665527, 0.005142623901367187, 0.005154111862182617, 0.005153759956359864, 0.005152480125427246, 0.005285088062286377, 0.0051612801551818846, 0.005173727989196777, 0.005115903854370117, 0.005119935989379883, 0.005345439910888672, 0.005254591941833496, 0.005136864185333252, 0.005148416042327881, 0.005289728164672851, 0.005140672206878662, 0.0051444158554077146, 0.0052269759178161625, 0.005154816150665284, 0.005138432025909424, 0.005189727783203125, 0.005211232185363769, 0.005145408153533935, 0.005141791820526123, 0.005233407974243164, 0.0051363520622253415, 0.005145696163177491, 0.005155807971954346, 0.0051262397766113285, 0.005133823871612549, 0.005155456066131592, 0.005189568042755127, 0.005129983901977539, 0.005146656036376953, 0.005146912097930908, 0.005148608207702637, 0.005174367904663086, 0.005122623920440674, 0.005113984107971191, 0.005128191947937012, 0.005127583980560303, 0.005140960216522217, 0.005123295783996582, 0.005123392105102539, 0.005145535945892334, 0.005117311954498291, 0.005199808120727539, 0.005126848220825196, 0.0051380801200866695, 0.005116256237030029, 0.005157055854797363, 0.005117023944854736, 0.005135104179382324, 0.005173535823822022, 0.00511353588104248, 0.005128064155578613, 0.005105984210968017, 0.00517299222946167, 0.005160096168518066, 0.005118271827697754, 0.00515337610244751, 0.005095583915710449, 0.005145440101623535, 0.00511078405380249, 0.00517855978012085, 0.005088031768798828, 0.005414944171905517, 0.005187583923339844, 0.0052633600234985355, 0.005144576072692871, 0.005162528038024902, 0.005233119964599609, 0.005138432025909424, 0.005162559986114502, 0.005213696002960205, 0.005112768173217774, 0.00514572811126709, 0.0052509760856628415, 0.005151711940765381, 0.005121376037597656, 0.005151391983032227, 0.005248928070068359, 0.005119808197021484, 0.005136896133422852, 0.005139359951019287, 0.005112703800201416, 0.005109439849853516, 0.005140768051147461, 0.005148159980773926, 0.0051298880577087405, 0.005094079971313476, 0.005150688171386719, 0.005142752170562744, 0.0051077442169189455, 0.0051337919235229495, 0.005142816066741943, 0.00512227201461792, 0.005163008213043213, 0.005158912181854248, 0.005142528057098389, 0.005137663841247558, 0.0051370558738708496, 0.005150815963745117, 0.0051363840103149415, 0.005119135856628418, 0.00516809606552124, 0.005016831874847412, 0.00516480016708374, 0.005195775985717774, 0.005174592018127442, 0.0051660799980163576, 0.005160639762878418, 0.005193984031677246, 0.005113759994506836, 0.005187424182891845, 0.005122015953063965, 0.005140511989593506, 0.005166719913482666, 0.005197408199310302, 0.0051470079421997075, 0.005185184001922608, 0.0051660480499267575, 0.005101344108581543, 0.0051298561096191405, 0.00513801622390747, 0.0051669120788574215, 0.005132512092590332, 0.005134751796722412, 0.005155168056488037, 0.005136735916137695, 0.0051394238471984864, 0.005194431781768799, 0.005144576072692871, 0.005214208126068115, 0.005138432025909424, 0.00516096019744873, 0.00512556791305542, 0.005130208015441894, 0.005164703845977783, 0.005137343883514404, 0.005138175964355469, 0.005158720016479493, 0.0051493759155273435, 0.005109504222869873, 0.005158080101013184, 0.005140799999237061, 0.005184000015258789, 0.00511084794998169, 0.0051942400932312015, 0.0051164479255676265, 0.005140384197235107, 0.005123680114746094, 0.005744480133056641, 0.005425727844238281, 0.005248511791229248, 0.005147488117218018, 0.005108736038208008, 0.00512886381149292, 0.00516051197052002, 0.005092927932739258, 0.005134528160095215, 0.005107967853546142, 0.00521449613571167, 0.0051110081672668456, 0.005114816188812256, 0.00516864013671875, 0.0051140480041503905, 0.005134208202362061, 0.005104063987731933, 0.005019936084747314, 0.005168320178985596, 0.005230112075805664, 0.005114880084991455, 0.005135583877563477, 0.00514086389541626, 0.005128608226776123, 0.0050869441032409665, 0.005171487808227539, 0.005146143913269043, 0.0051177282333374026, 0.005153632164001465, 0.005143871784210205, 0.005159679889678955, 0.00511190414428711, 0.005177279949188232, 0.005134208202362061, 0.00508838415145874, 0.005128960132598877, 0.005130239963531494, 0.005134592056274414, 0.005094431877136231, 0.005165791988372802, 0.005142720222473145, 0.0050993280410766605, 0.005123680114746094, 0.005150784015655518, 0.005176959991455078, 0.005114655971527099, 0.005162943840026855, 0.005119264125823975, 0.005128928184509277, 0.005125152111053467, 0.005354080200195312, 0.005134047985076905, 0.005208352088928223, 0.005197792053222656, 0.005109920024871826, 0.0051468157768249515, 0.005084256172180176, 0.0051775679588317875, 0.00521292781829834, 0.005121119976043701, 0.005188704013824463, 0.005185567855834961, 0.005150496006011963, 0.005148672103881836, 0.005150720119476319, 0.005139808177947998, 0.00518390417098999, 0.005218560218811035, 0.00524889612197876, 0.0051645121574401855, 0.00516323184967041, 0.005169600009918213, 0.005154816150665284, 0.005137983798980713, 0.005189343929290772, 0.005196447849273682, 0.0051303358078002926, 0.005140384197235107, 0.005130303859710693, 0.00514796781539917, 0.005057983875274659, 0.005148928165435791, 0.005181759834289551, 0.005384543895721436, 0.0051680002212524416, 0.0051987838745117185, 0.00514031982421875, 0.005173215866088867, 0.005127232074737548, 0.005209184169769287, 0.005150591850280762, 0.005152768135070801, 0.0051773438453674315, 0.0051339840888977055, 0.005133823871612549, 0.0051123518943786625, 0.005209887981414795, 0.005101215839385986, 0.0051559038162231445, 0.005216063976287842, 0.005197824001312256, 0.00515283203125, 0.005373983860015869, 0.006244256019592285, 0.005146048069000244, 0.005376575946807861, 0.005482687950134278, 0.005131552219390869, 0.005326528072357178, 0.005227359771728515, 0.00517900800704956, 0.005208447933197021, 0.005150015830993652, 0.00522105598449707, 0.005192800045013428, 0.005251999855041504, 0.005142528057098389, 0.005144447803497315, 0.005185664176940918, 0.005163008213043213, 0.005203423976898193, 0.00525980806350708, 0.005132351875305176, 0.005730239868164062, 0.005189311981201172, 0.00528934383392334, 0.00513651180267334, 0.005196288108825684, 0.005159232139587402, 0.005124095916748047, 0.005138432025909424, 0.0051561279296875, 0.005124832153320312, 0.005116960048675537, 0.005168159961700439, 0.005156703948974609, 0.00512608003616333, 0.005126368045806884, 0.005158912181854248, 0.005180511951446533, 0.005118271827697754, 0.005161503791809082, 0.0051404800415039064, 0.005068863868713379, 0.005332799911499023, 0.005159039974212646, 0.005328896045684814, 0.005154816150665284, 0.005459775924682617, 0.0051775360107421875, 0.005163008213043213, 0.005126143932342529, 0.005144927978515625, 0.005160607814788818, 0.005139647960662842, 0.0051289920806884765, 0.005195519924163818, 0.005183775901794433, 0.005139520168304443, 0.005174208164215088, 0.005152768135070801, 0.005149759769439697, 0.005153279781341552, 0.005209760189056396, 0.0051784000396728515, 0.005140223979949951, 0.005171328067779541, 0.005139904022216797, 0.005176799774169922, 0.005135007858276367, 0.005158624172210693, 0.005233248233795166, 0.005160639762878418, 0.005154592037200928, 0.0051339840888977055, 0.005140384197235107, 0.0051370558738708496, 0.005178847789764404, 0.00512937593460083, 0.005133471965789795, 0.005170048236846924, 0.005166751861572266, 0.005208064079284668, 0.005130112171173096, 0.005212287902832031, 0.005118048191070557, 0.005119103908538819, 0.00515561580657959, 0.005128191947937012, 0.005127840042114258, 0.005124576091766357, 0.005215424060821533, 0.005134816169738769, 0.005132512092590332, 0.005209727764129639, 0.005165440082550049, 0.0051138558387756345, 0.005177055835723877, 0.00517139196395874, 0.0051344318389892575, 0.005123295783996582, 0.005169343948364258, 0.005157472133636475, 0.005122047901153564, 0.005144383907318115, 0.005181632041931152, 0.005060895919799805, 0.005101119995117188, 0.005112383842468262, 0.005224448204040527, 0.005126143932342529, 0.005125728130340576, 0.005105343818664551, 0.005169888019561767, 0.005119999885559082, 0.0051560959815979005, 0.005144991874694824, 0.0051636481285095214, 0.005135615825653076, 0.005159135818481445, 0.0052709121704101565, 0.005147520065307617, 0.005127520084381104, 0.005139103889465332, 0.005137887954711914, 0.005138495922088623, 0.005164703845977783, 0.005175551891326904, 0.005150847911834717, 0.0051329278945922855, 0.005152575969696045, 0.005164768218994141, 0.005149184226989746, 0.005267231941223144, 0.005184512138366699, 0.005147552013397217, 0.0051344318389892575, 0.005195775985717774, 0.005138432025909424, 0.005375743865966797, 0.005193984031677246, 0.005212160110473632, 0.0051561598777771, 0.0051493759155273435, 0.005195551872253418, 0.005168992042541504, 0.005158751964569092, 0.005314144134521484, 0.0052581758499145505, 0.005148799896240235, 0.005215968132019043, 0.0051521601676940915, 0.005169919967651367, 0.005148672103881836, 0.0052080960273742675, 0.00516707181930542, 0.005183487892150879, 0.005218592166900635, 0.005180960178375244, 0.005247392177581787, 0.005187359809875489, 0.005224575996398926, 0.005195456027984619, 0.005166687965393066, 0.005239327907562256, 0.0051569280624389644, 0.005148672103881836, 0.005285120010375977, 0.005173984050750732, 0.005112063884735107, 0.005157504081726074, 0.005138432025909424, 0.0051875200271606445, 0.005147935867309571, 0.005157919883728027, 0.005181344032287598, 0.005156703948974609, 0.005171264171600342, 0.0051567997932434086, 0.005206016063690186, 0.005124320030212402, 0.005221792221069336, 0.005202303886413575, 0.0051467838287353514, 0.0051660799980163576, 0.00516812801361084, 0.005223328113555908, 0.005123007774353028, 0.005148159980773926, 0.005214719772338867, 0.005123807907104492, 0.0051571521759033205, 0.00514576005935669, 0.005174111843109131, 0.005122047901153564, 0.005140031814575195, 0.005198272228240966, 0.0051363840103149415, 0.005537792205810547, 0.005283040046691894, 0.005185408115386963, 0.005138944149017334, 0.005216383934020996, 0.005163519859313965, 0.005316383838653564, 0.005197824001312256, 0.005187424182891845, 0.0051693120002746585, 0.005123136043548584, 0.005227680206298828, 0.005158432006835937, 0.005148928165435791, 0.005269536018371582, 0.00516099214553833, 0.0051641278266906735, 0.005122911930084228, 0.0054354238510131835, 0.0051313920021057126, 0.00514083194732666, 0.005184000015258789, 0.00516707181930542, 0.005141759872436523, 0.005139232158660889, 0.005187583923339844, 0.0051404800415039064, 0.005134335994720459, 0.005400576114654541, 0.005260479927062988, 0.005136256217956543, 0.0051652159690856935, 0.005137184143066406, 0.005132544040679932, 0.005044288158416748, 0.005112063884735107, 0.0052247681617736816, 0.005283552169799805, 0.005235104084014892, 0.0051560640335083004, 0.005164639949798584, 0.005132895946502685, 0.005124256134033203, 0.005181824207305908, 0.005181215763092041, 0.005125311851501465, 0.005147615909576416, 0.005131648063659668, 0.005120319843292236, 0.005107872009277344, 0.005144576072692871, 0.0051212158203125, 0.005126976013183594, 0.005175295829772949, 0.005162144184112549, 0.005270368099212647, 0.00516096019744873, 0.005189663887023925, 0.005119967937469483, 0.005117472171783447, 0.005265888214111328, 0.005142528057098389, 0.005240447998046875, 0.005146975994110107, 0.005189663887023925, 0.005125760078430176, 0.005146111965179443, 0.005161824226379394, 0.005116159915924072, 0.005123871803283692, 0.005132287979125977, 0.005146495819091797, 0.005122176170349121, 0.005115903854370117, 0.005156576156616211, 0.005187871932983398, 0.00512934398651123, 0.005122879981994629, 0.005209311962127685, 0.005104608058929443, 0.005107903957366943, 0.005148064136505127, 0.00516048002243042, 0.005138656139373779, 0.00510745620727539, 0.005335840225219726, 0.005171167850494385, 0.005183328151702881, 0.005167295932769775, 0.005117663860321045, 0.005112095832824707, 0.005128191947937012, 0.005148064136505127, 0.005089888095855713, 0.005166848182678222, 0.005284095764160156, 0.005133887767791748, 0.005083392143249512, 0.005121791839599609, 0.005125440120697021, 0.005249504089355469, 0.005126368045806884, 0.005124256134033203, 0.005158751964569092, 0.0051223039627075195, 0.005129983901977539, 0.005117951869964599, 0.0051662402153015135, 0.005128223896026612, 0.005124320030212402, 0.00514518404006958, 0.005133855819702149, 0.0050999999046325685, 0.005166143894195557, 0.005140416145324707, 0.005147615909576416, 0.005122079849243164, 0.005322751998901368, 0.00512559986114502, 0.005158592224121094, 0.005135424137115478, 0.005166816234588623, 0.005136640071868897, 0.005208992004394532, 0.005183807849884033, 0.005278624057769775, 0.005314239978790283, 0.005269536018371582, 0.005135359764099121, 0.005147359848022461, 0.005118207931518555, 0.005359615802764893, 0.005259263992309571, 0.005214208126068115, 0.005164864063262939, 0.005124159812927246, 0.005092671871185303, 0.005145408153533935, 0.005132415771484375, 0.005109632015228271, 0.005093152046203613, 0.005193920135498047, 0.005144351959228515, 0.005109536170959473, 0.0051307201385498045, 0.005117695808410644, 0.005212448120117188, 0.005148575782775879, 0.005174623966217041, 0.005128767967224121, 0.005091328144073487, 0.005199711799621582, 0.005161407947540283, 0.005144735813140869, 0.005140192031860351, 0.005203551769256591, 0.00517900800704956, 0.005140448093414306, 0.005174079895019531, 0.005150720119476319, 0.005141056060791015, 0.005166656017303467, 0.005161407947540283, 0.005212160110473632, 0.005494592189788819, 0.0051365761756896975, 0.005256735801696778, 0.005136032104492188, 0.005417119979858398, 0.0053515520095825195, 0.005259967803955078, 0.005211775779724121, 0.005150815963745117, 0.005178751945495605, 0.00513424015045166, 0.005143263816833496, 0.005181568145751953, 0.005146975994110107, 0.005153728008270264, 0.005160768032073975, 0.005171552181243897, 0.005194303989410401, 0.00514470386505127, 0.005153855800628662, 0.005141088008880615, 0.0051448001861572265, 0.0051538882255554195, 0.005188511848449707, 0.005781568050384521, 0.005263296127319336, 0.005217696189880371, 0.005527520179748535, 0.005210239887237549, 0.005283872127532959, 0.005353824138641357, 0.005183775901794433, 0.005246655941009521, 0.005257376194000244, 0.005162911891937256, 0.005214303970336914, 0.005141503810882568, 0.005274623870849609, 0.005181407928466797, 0.005152927875518799, 0.005145984172821045, 0.005281472206115722, 0.005181695938110352, 0.005141056060791015, 0.005169151782989502, 0.005189375877380371, 0.005153279781341552, 0.005149888038635254, 0.005169695854187012, 0.005164095878601074, 0.005196767807006836, 0.00518668794631958, 0.00515337610244751, 0.00516534423828125, 0.005122047901153564, 0.005201504230499267, 0.005138847827911377, 0.005119264125823975, 0.005176032066345215, 0.005061183929443359, 0.005176864147186279, 0.0051521921157836915, 0.005145696163177491, 0.005306335926055908, 0.005170783996582031, 0.005136735916137695, 0.005164576053619385, 0.005177855968475342, 0.005158463954925537, 0.005144224166870117, 0.005284704208374023, 0.005146463871002197, 0.005138495922088623, 0.005150720119476319, 0.005165023803710938, 0.005125728130340576, 0.005132192134857177, 0.0051801280975341795, 0.005179200172424316, 0.005146624088287354, 0.0051833920478820805, 0.0051604161262512206, 0.005185215950012207, 0.005161920070648193, 0.005186880111694336, 0.005244703769683838, 0.00516319990158081, 0.0051814718246459964, 0.005214911937713623, 0.005152768135070801, 0.00515283203125, 0.005158175945281982, 0.005184160232543945, 0.005142528057098389, 0.00520143985748291, 0.005122528076171875, 0.005174911975860595, 0.005194111824035645, 0.005150623798370361, 0.0051303358078002926, 0.005119200229644775, 0.005226399898529053, 0.0051430721282958985, 0.005179423809051514, 0.005163072109222412, 0.005269760131835937, 0.005159039974212646, 0.005226655960083008, 0.005218368053436279, 0.005162655830383301, 0.005138432025909424, 0.005160128116607666, 0.005142720222473145, 0.005148799896240235, 0.005169663906097412, 0.0051809921264648435, 0.005126688003540039, 0.005164959907531738, 0.005162911891937256, 0.005132383823394775, 0.00514412784576416, 0.00520195198059082, 0.005033760070800781, 0.005187071800231933, 0.005130847930908203, 0.005151648044586182, 0.0052432317733764645, 0.005279776096343994, 0.005134975910186768, 0.005099520206451416, 0.005182752132415772, 0.005116767883300781, 0.005128064155578613, 0.00515180778503418, 0.0051123518943786625, 0.005154399871826172, 0.005110208034515381, 0.005191584110260009, 0.0051331200599670414, 0.005157983779907226, 0.005173823833465576, 0.005154463768005371, 0.005153120040893555, 0.0051363520622253415, 0.00519375991821289, 0.00510975980758667, 0.0051404800415039064, 0.005195775985717774, 0.005126143932342529, 0.00516096019744873, 0.005247231960296631, 0.005189216136932373, 0.005115359783172607, 0.005157567977905273, 0.005165056228637695, 0.0051404800415039064, 0.005146495819091797, 0.005260896205902099, 0.005161503791809082, 0.005189407825469971, 0.005156511783599853, 0.005155327796936035, 0.005132351875305176, 0.005130559921264649, 0.0051422080993652345, 0.0051363840103149415, 0.005110879898071289, 0.00554691219329834, 0.005775360107421875, 0.0063298878669738766, 0.005337567806243897, 0.0065838079452514645, 0.005464896202087403, 0.00514188814163208, 0.005173567771911621, 0.005201119899749756, 0.005258016109466553, 0.005168575763702393, 0.005173823833465576, 0.005144576072692871, 0.005160927772521973, 0.005179423809051514, 0.005123583793640136, 0.00512665605545044, 0.005133887767791748, 0.005082304000854492, 0.005159743785858154, 0.005154816150665284, 0.0051363840103149415, 0.0051422080993652345, 0.005201727867126465, 0.005203551769256591, 0.005184415817260742, 0.005210112094879151, 0.005163008213043213, 0.005127295970916748, 0.005131135940551758, 0.005197343826293946, 0.005153247833251953, 0.005142144203186035, 0.005181824207305908, 0.005141791820526123, 0.005179200172424316, 0.005129216194152832, 0.005150400161743164, 0.005129983901977539, 0.005147295951843262, 0.005168960094451904, 0.005124000072479248, 0.005169248104095459, 0.005154176235198975, 0.00515772819519043, 0.005134111881256104, 0.005129824161529541, 0.005228831768035889, 0.005119840145111084, 0.005246367931365967, 0.0051671361923217776, 0.005278816223144531, 0.005260287761688232, 0.005306367874145508, 0.00521292781829834, 0.005140575885772705, 0.005117856025695801, 0.005163008213043213, 0.005255519866943359, 0.005139872074127198, 0.005184864044189453, 0.0059155521392822265, 0.005449984073638916, 0.005326591968536377, 0.00536352014541626, 0.005641791820526123, 0.00544217586517334, 0.0053023362159729005, 0.005343167781829834, 0.0051951041221618656, 0.005174240112304687, 0.005134016036987305, 0.005191904067993164, 0.005147744178771973, 0.005146527767181396, 0.005190432071685791, 0.0051437759399414066, 0.005143328189849854, 0.005133600234985352, 0.005189919948577881, 0.005181439876556396, 0.005064703941345215, 0.005150015830993652, 0.005178048133850098, 0.005464223861694336, 0.0052980160713195805, 0.005163008213043213, 0.005206016063690186, 0.0051543679237365725, 0.005189536094665527, 0.005188127994537353, 0.005158912181854248, 0.005146751880645752, 0.005154975891113281, 0.005283391952514648, 0.005182655811309814, 0.005153759956359864, 0.005199071884155274, 0.005139232158660889, 0.005152768135070801, 0.005185056209564209, 0.005160672187805176, 0.005133056163787842, 0.005156576156616211, 0.005218175888061524, 0.005163040161132813, 0.005144288063049316, 0.005234879970550537, 0.005134816169738769, 0.005253344058990478, 0.005258143901824951, 0.00517571210861206, 0.005146463871002197, 0.005153408050537109, 0.005188608169555664, 0.005131616115570069, 0.005147615909576416, 0.005187679767608642, 0.005157472133636475, 0.005132287979125977, 0.0051690878868103024, 0.005169216156005859, 0.005144576072692871, 0.005140448093414306, 0.005221920013427734, 0.005128416061401367, 0.005152224063873291, 0.005147456169128418, 0.00522649621963501, 0.005162464141845703, 0.005184351921081543, 0.0051680002212524416, 0.005132895946502685, 0.005146656036376953, 0.005159103870391845, 0.005137504100799561, 0.005116960048675537, 0.005140575885772705, 0.005157760143280029, 0.005133408069610596, 0.005129439830780029, 0.005155456066131592, 0.00516707181930542, 0.005144576072692871, 0.0050360321998596195, 0.005122144222259522, 0.005159039974212646, 0.00522649621963501, 0.0051402878761291505, 0.005127552032470703, 0.00516755199432373, 0.005119167804718018, 0.005138527870178222, 0.005172287940979004, 0.0051402878761291505, 0.005130303859710693, 0.005145792007446289, 0.005176064014434815, 0.005139711856842041, 0.005106048107147217, 0.005177696228027343, 0.005111839771270752, 0.0051703681945800785, 0.005108255863189698, 0.005174655914306641, 0.005112736225128174, 0.005150720119476319, 0.005169151782989502, 0.005131423950195313, 0.005116767883300781, 0.005100768089294433, 0.005208864212036133, 0.005227551937103271, 0.005163584232330322, 0.005200287818908691, 0.00516096019744873, 0.005201920032501221, 0.005191999912261963, 0.005166783809661866, 0.005257215976715088, 0.005227551937103271, 0.005169407844543457, 0.005153439998626709, 0.005119775772094727, 0.005191967964172363, 0.005119232177734375, 0.005122079849243164, 0.005139167785644531, 0.005169151782989502, 0.005150976181030273, 0.005154655933380127, 0.005345183849334717, 0.0054967679977417, 0.005791135787963867, 0.0052806720733642575, 0.005201536178588867, 0.0052288641929626465, 0.005235648155212403, 0.00559603214263916, 0.005669151782989502, 0.00528329610824585, 0.005173344135284424, 0.005228352069854736, 0.005219744205474853, 0.005145311832427979, 0.0051504640579223635, 0.0052098240852355955, 0.005142623901367187, 0.005398528099060058, 0.005191487789154053, 0.005137760162353516, 0.0054852161407470705, 0.005160448074340821, 0.005147136211395263, 0.00525055980682373, 0.005151455879211426, 0.005146592140197754, 0.005201920032501221, 0.005332992076873779, 0.005201824188232422, 0.005182816028594971, 0.005178112030029297, 0.00515283203125, 0.005150752067565918, 0.005236639976501465, 0.005146687984466553, 0.005152703762054444, 0.005346528053283691, 0.00513647985458374, 0.0051712322235107425, 0.005150815963745117, 0.005171775817871094, 0.005119999885559082, 0.005141759872436523, 0.005165823936462402, 0.005158720016479493, 0.005203360080718994, 0.005194528102874756, 0.0052674560546875, 0.005125408172607422, 0.005099679946899414, 0.005241407871246338, 0.005494783878326416, 0.005163008213043213, 0.005346975803375244, 0.005155168056488037, 0.00516096019744873, 0.005181439876556396, 0.00516099214553833, 0.005145919799804687, 0.005149343967437744, 0.00560748815536499, 0.005310080051422119, 0.005152128219604492, 0.005213312149047852, 0.005224256038665771, 0.005135359764099121, 0.005204768180847168, 0.005134560108184814, 0.0051497921943664555, 0.005120063781738281, 0.005171648025512695, 0.005149087905883789, 0.005122047901153564, 0.005174623966217041, 0.00512886381149292, 0.0051645441055297855, 0.005157375812530517, 0.005185535907745361, 0.005140384197235107, 0.005090528011322022, 0.005149472236633301, 0.005187583923339844, 0.005148672103881836, 0.005298111915588379, 0.005187551975250244, 0.005181280136108399, 0.005151999950408936, 0.005133632183074951, 0.005168416023254394, 0.005120416164398193, 0.005127168178558349, 0.00511897611618042, 0.005159200191497803, 0.005142240047454834, 0.005152031898498535, 0.005168159961700439, 0.0051194877624511715, 0.005154079914093017, 0.0051452798843383786, 0.005167327880859375, 0.005126368045806884, 0.0051253437995910645, 0.005167679786682129, 0.005152128219604492, 0.005153312206268311, 0.005195871829986572, 0.005158912181854248, 0.005134079933166504, 0.005202176094055176, 0.005173247814178467, 0.005130112171173096, 0.005123712062835693, 0.0051327037811279294, 0.0052204480171203615, 0.005134143829345703, 0.005126336097717285, 0.0051649918556213375, 0.005127647876739502, 0.005112192153930664, 0.005154880046844482, 0.005167263984680176, 0.005115903854370117, 0.005117856025695801, 0.00519385576248169, 0.005117919921875, 0.005167103767395019, 0.0051662721633911135, 0.005261856079101563, 0.005177696228027343, 0.005178688049316406, 0.00518720006942749, 0.005141503810882568, 0.005132287979125977, 0.005189856052398681, 0.005154592037200928, 0.005129439830780029, 0.005118752002716064, 0.0052566399574279785, 0.005134111881256104, 0.005153567790985108, 0.0052408318519592285, 0.005138271808624267, 0.005077792167663574, 0.005138400077819824, 0.005113984107971191, 0.005183328151702881, 0.005179552078247071, 0.005100959777832031, 0.005152959823608399, 0.0051307201385498045, 0.0051422080993652345, 0.0051157760620117185, 0.005144351959228515, 0.005115935802459717, 0.0051178879737854, 0.005128223896026612, 0.005157504081726074, 0.005111072063446045, 0.005153471946716308, 0.0051998720169067385, 0.005107200145721436, 0.0051777281761169434, 0.00515008020401001, 0.00516377592086792, 0.0051363520622253415, 0.0051792640686035155, 0.005174592018127442, 0.005132480144500733, 0.005130911827087402, 0.00514412784576416, 0.005267583847045899, 0.005124415874481201, 0.005134592056274414, 0.005189375877380371, 0.005134687900543213, 0.005177087783813476, 0.005166719913482666, 0.005322783946990967, 0.005230783939361572, 0.005191743850708008, 0.005197824001312256, 0.0051199040412902835, 0.005159103870391845, 0.005218272209167481, 0.005113791942596435, 0.0051363840103149415, 0.00514412784576416, 0.005132736206054688, 0.0051133761405944824, 0.005115359783172607, 0.005153791904449463, 0.005185696125030517, 0.005121888160705567, 0.005148672103881836, 0.005181439876556396, 0.005128384113311768, 0.0051214079856872555, 0.0052044157981872554, 0.005183487892150879, 0.005134335994720459, 0.005152256011962891, 0.005255008220672607, 0.005111839771270752, 0.005157087802886963, 0.005173664093017578, 0.005070559978485108, 0.005144864082336426, 0.0051363840103149415, 0.005110047817230225, 0.005177055835723877, 0.005152512073516846, 0.005148672103881836, 0.005202176094055176, 0.005154816150665284, 0.005170527935028076, 0.00512886381149292, 0.0051580162048339845, 0.005122943878173828, 0.005158912181854248, 0.005187583923339844, 0.005166336059570313, 0.005145343780517578, 0.005135583877563477, 0.005200672149658203, 0.005152768135070801, 0.005220352172851562, 0.0052408318519592285, 0.005150335788726807, 0.005263743877410888, 0.005200128078460694, 0.005153952121734619, 0.0051413440704345705, 0.005242623805999756, 0.005162879943847656, 0.005166463851928711, 0.005209184169769287, 0.005209760189056396, 0.005142528057098389, 0.005148672103881836, 0.005174272060394287, 0.0051348481178283695, 0.0051964159011840825, 0.005105535984039306, 0.005416192054748535, 0.005142528057098389, 0.005260032176971435, 0.005185535907745361, 0.005127711772918701, 0.00511353588104248, 0.005189919948577881, 0.00514409589767456, 0.005164224147796631, 0.005135647773742676, 0.00518617582321167, 0.00524070405960083, 0.005253280162811279, 0.005205855846405029, 0.005168352127075195, 0.005114655971527099, 0.005138336181640625, 0.005165152072906494, 0.005119423866271972, 0.005149248123168945, 0.005158912181854248, 0.005138432025909424, 0.0051404800415039064, 0.005121823787689209, 0.005183616161346436, 0.005072735786437989, 0.005111519813537598, 0.0051224961280822755, 0.005126143932342529, 0.005171199798583984, 0.0052304320335388186, 0.005314400196075439, 0.005204288005828858, 0.005128352165222168, 0.005170720100402832, 0.005171520233154297, 0.0051530561447143556, 0.005127007961273193, 0.005123199939727784, 0.005158783912658692, 0.005127200126647949, 0.005131103992462158, 0.005154816150665284, 0.005154560089111328, 0.005112063884735107, 0.005136640071868897, 0.005211487770080566, 0.00512656021118164, 0.0051569600105285645, 0.005154304027557373, 0.00513593578338623, 0.0051580162048339845, 0.005250847816467285, 0.005197760105133057, 0.005243008136749268, 0.0051559357643127446, 0.005183263778686524, 0.005136703968048096, 0.005114816188812256, 0.005117695808410644, 0.0051625919342041015, 0.0051363840103149415, 0.005120416164398193, 0.005242496013641357, 0.005142111778259278, 0.005129280090332031, 0.00512175989151001, 0.0051850881576538084, 0.005127679824829101, 0.005118912220001221, 0.0051645441055297855, 0.005286111831665039, 0.0051233601570129395, 0.005161856174468994, 0.005179520130157471, 0.005162559986114502, 0.005201504230499267, 0.005140543937683106, 0.005162112236022949, 0.005277440071105957, 0.005178815841674804, 0.005163136005401611, 0.005130784034729004, 0.005146431922912598, 0.0051736001968383785, 0.0051380801200866695, 0.005150688171386719, 0.005184576034545898, 0.0051970877647399905, 0.005247712135314941, 0.005179391860961914, 0.0051550078392028804, 0.005268320083618164, 0.005251264095306397, 0.005142496109008789, 0.005170303821563721, 0.005131968021392822, 0.005144576072692871, 0.005139520168304443, 0.005211071968078613, 0.005331200122833252, 0.0051643519401550295, 0.005212607860565185, 0.005162335872650146, 0.005169695854187012, 0.005202047824859619, 0.005173279762268067, 0.005135359764099121, 0.0051511039733886715, 0.005293856143951416, 0.0051495041847229, 0.005130239963531494, 0.005176608085632324, 0.005133024215698242, 0.005128416061401367, 0.005251967906951904, 0.0053721599578857426, 0.005161632061004639, 0.00516483211517334, 0.005175519943237305, 0.005154816150665284, 0.005142528057098389, 0.0052258877754211425, 0.005138463973999023, 0.005480192184448242, 0.005284095764160156, 0.005163584232330322, 0.005131296157836914, 0.005229407787322998, 0.005182784080505371, 0.0051240320205688476, 0.00515283203125, 0.005184095859527588, 0.005136608123779297, 0.0051363840103149415, 0.005149695873260498, 0.005149087905883789, 0.005184383869171143, 0.0051363840103149415, 0.0052650561332702635, 0.005148096084594727, 0.005123007774353028, 0.005201248168945313, 0.00514083194732666, 0.005140448093414306, 0.0053636798858642575, 0.005199935913085938, 0.005199327945709229, 0.005255136013031006, 0.005177472114562988, 0.005168863773345947, 0.005137919902801514, 0.00516761589050293, 0.0051363840103149415, 0.005185535907745361, 0.005148384094238281, 0.005385983943939209, 0.0052120318412780765, 0.005168896198272705, 0.005168032169342041, 0.00519484806060791, 0.0051948800086975095, 0.005178175926208496, 0.00515990400314331, 0.005185184001922608, 0.00514902400970459, 0.00516096019744873, 0.005178847789764404, 0.0051594557762146, 0.005179391860961914, 0.0051998720169067385, 0.005158912181854248, 0.0051857919692993165, 0.005207808017730713, 0.0051727681159973145, 0.005169631958007813, 0.005144576072692871, 0.00523206377029419, 0.005196352005004883, 0.005212160110473632, 0.005180672168731689, 0.0052882242202758785, 0.00531935977935791, 0.005138207912445068, 0.005236447811126709, 0.005331232070922852, 0.005748672008514404, 0.005744703769683838, 0.0060026879310607914, 0.005254623889923096, 0.005205760002136231, 0.006259583950042725, 0.005187456130981445, 0.005168767929077148, 0.00524121618270874, 0.005194975852966309, 0.005391359806060791, 0.005271359920501709, 0.005205440044403076, 0.005185344219207764, 0.00515558385848999, 0.0052362561225891115, 0.005241375923156738, 0.005171135902404785, 0.005214208126068115, 0.005154975891113281, 0.0051851201057434085, 0.005143904209136963, 0.0052130880355834965, 0.00522431993484497, 0.005185664176940918, 0.005208064079284668, 0.005174592018127442, 0.005206079959869385, 0.0050969281196594236, 0.0051594557762146, 0.005220096111297607, 0.005191199779510498, 0.005333248138427734, 0.005276480197906494, 0.0051495361328125, 0.0051942400932312015, 0.00518393611907959, 0.005187456130981445, 0.005154816150665284, 0.00517855978012085, 0.005203968048095703, 0.005153151988983154, 0.005150688171386719, 0.005181536197662353, 0.005159232139587402, 0.005142752170562744, 0.005194943904876709, 0.00520630407333374, 0.005142335891723633, 0.0051680002212524416, 0.005191359996795654, 0.005175295829772949, 0.005158912181854248, 0.00519596815109253, 0.005191487789154053, 0.005189568042755127, 0.005169536113739014, 0.005301951885223389, 0.005165056228637695, 0.0051998720169067385, 0.005193727970123291, 0.00515231990814209, 0.005132095813751221, 0.005189727783203125, 0.005154751777648926, 0.005169375896453858, 0.00517471981048584, 0.005171487808227539, 0.005138527870178222, 0.005116384029388427, 0.005191775798797607, 0.005269504070281982, 0.005156864166259765, 0.00516096019744873, 0.005160128116607666, 0.005157695770263672, 0.005138432025909424, 0.0052367358207702636, 0.005179327964782715, 0.005167168140411377, 0.0052715520858764645, 0.005141568183898926, 0.005178336143493652, 0.005138144016265869, 0.005224031925201416, 0.005143199920654297, 0.006116576194763183, 0.005220928192138672, 0.005140704154968262, 0.005171199798583984, 0.0051850881576538084]",tokens/s,192.85249448427544,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.884672,1333.6576,0.0,931.135488,917.648384,s,1,7.50802783203125,7.50802783203125,0.0,7.50802783203125,7.50802783203125,7.50802783203125,7.50802783203125,[7.50802783203125],,kWh,5.407496499992703e-06,5.890223519076623e-07,1.8000014400110231e-06,7.796520291911388e-06,,MB,1215.1808,1463.681024,0.0,1050.673152,1018.330112,s,10,0.6765899276733398,0.06765899276733398,0.0010763245690402594,0.06734484863281251,0.06795555114746094,0.06939136123657226,0.07054000930786133,"[0.0708271713256836, 0.06740064239501953, 0.06704054260253907, 0.06731613159179688, 0.06737356567382813, 0.06694643402099609, 0.06761177825927735, 0.06719913482666015, 0.06763648223876953, 0.06723804473876953]",tokens/s,3783.680328797295,kWh,2.177399256623094e-06,2.4012750983546384e-07,1.4401877193133965e-06,3.857714485771954e-06,tokens/kWh,66360535.73798184,MB,1247.240192,1480.45824,0.0,1067.450368,1032.767488,s,10,13.11050183105469,1.3110501831054688,0.0028262769490806226,1.310505126953125,1.3139527587890625,1.3152584838867187,1.3163030639648436,"[1.312654052734375, 1.3082523193359374, 1.310500732421875, 1.313636962890625, 1.316564208984375, 1.30844091796875, 1.308921142578125, 1.307359375, 1.310509521484375, 1.31366259765625]",tokens/s,48.05308050891893,kWh,3.815835139962815e-05,4.208462403042431e-06,1.6140881901086173e-05,5.850769570375676e-05,tokens/kWh,1076781.4257972015,,s,630,13.105530357360832,0.020802429138667998,0.00027086809499354375,0.02074827194213867,0.020971088600158692,0.021147575759887696,0.022044558410644535,"[0.020575456619262696, 0.020803199768066407, 0.02090707206726074, 0.021055776596069335, 0.02071126365661621, 0.0206648006439209, 0.02065648078918457, 0.02083839988708496, 0.02141961669921875, 0.020744607925415038, 0.020698848724365233, 0.020713760375976564, 0.020609024047851563, 0.02064384078979492, 0.020617216110229493, 0.020674560546875, 0.02063737678527832, 0.020656383514404297, 0.02099945640563965, 0.02082486343383789, 0.020632768630981447, 0.020671295166015624, 0.020677984237670897, 0.020608671188354494, 0.020792320251464845, 0.020733055114746095, 0.02071027183532715, 0.020710527420043947, 0.020620031356811522, 0.020994176864624025, 0.020760576248168947, 0.020708415985107424, 0.02383353614807129, 0.023340864181518553, 0.02081391906738281, 0.020758623123168944, 0.02074345588684082, 0.020728544235229494, 0.020719295501708986, 0.020655487060546877, 0.02058684730529785, 0.020652639389038087, 0.02064793586730957, 0.020608671188354494, 0.020697439193725586, 0.020611072540283205, 0.020692991256713866, 0.020684799194335936, 0.02080963134765625, 0.02079955291748047, 0.020743711471557617, 0.020853248596191407, 0.02082745552062988, 0.02074665641784668, 0.020683040618896486, 0.020747871398925782, 0.020823583602905274, 0.02074095916748047, 0.020670495986938476, 0.02071865653991699, 0.020695871353149414, 0.02074393653869629, 0.020733568191528322, 0.020486143112182616, 0.020739839553833007, 0.020723968505859374, 0.020733951568603515, 0.020733503341674803, 0.020697536468505858, 0.02065203285217285, 0.02070425605773926, 0.02073868751525879, 0.020691328048706055, 0.020690944671630858, 0.020619264602661135, 0.02066022491455078, 0.02061516761779785, 0.020728927612304687, 0.020689279556274413, 0.020761119842529298, 0.021233375549316407, 0.02072604751586914, 0.020674560546875, 0.020649984359741212, 0.02069487953186035, 0.020820127487182618, 0.020759904861450195, 0.020703744888305665, 0.02086070442199707, 0.020742528915405272, 0.020735456466674806, 0.02071196746826172, 0.020707328796386718, 0.02074153518676758, 0.020619359970092774, 0.020653631210327147, 0.02060905647277832, 0.02071571159362793, 0.02224608039855957, 0.020916255950927734, 0.020717567443847656, 0.02097488021850586, 0.020706016540527342, 0.020726911544799803, 0.02070822334289551, 0.020682079315185547, 0.020738719940185547, 0.02077004814147949, 0.020738016128540038, 0.020815872192382814, 0.020771615982055663, 0.020971040725708007, 0.020873056411743165, 0.02076326370239258, 0.02066399955749512, 0.02072403144836426, 0.02071958351135254, 0.020664384841918945, 0.02076176071166992, 0.020699968338012697, 0.020723712921142577, 0.020659360885620117, 0.020746271133422853, 0.020755104064941406, 0.02080531120300293, 0.020709856033325195, 0.020518911361694335, 0.020784191131591797, 0.020824607849121095, 0.021215423583984375, 0.021117151260375975, 0.020739103317260744, 0.02086569595336914, 0.020736095428466796, 0.020979679107666016, 0.020644096374511717, 0.02090188789367676, 0.02071513557434082, 0.02070560073852539, 0.020611135482788086, 0.020668415069580077, 0.02063155174255371, 0.02069215965270996, 0.02065875244140625, 0.02078940773010254, 0.020850847244262696, 0.02085862350463867, 0.02069318389892578, 0.020810911178588867, 0.02078963279724121, 0.02082454490661621, 0.020740095138549804, 0.020676607131958007, 0.02068057632446289, 0.02060915184020996, 0.02066160011291504, 0.020765344619750978, 0.02111692810058594, 0.02073504066467285, 0.020757440567016602, 0.020805631637573242, 0.02065577507019043, 0.020658527374267578, 0.020600479125976564, 0.02080803108215332, 0.020580352783203124, 0.02074163246154785, 0.020845056533813477, 0.020766719818115235, 0.020813823699951172, 0.020715520858764647, 0.020696863174438477, 0.02063587188720703, 0.020665599822998048, 0.02062553596496582, 0.020634239196777343, 0.02071347236633301, 0.020971519470214844, 0.02089779281616211, 0.020707328796386718, 0.02088470458984375, 0.020757280349731445, 0.020752384185791017, 0.020676607131958007, 0.020956319808959963, 0.02070185661315918, 0.02114396858215332, 0.022419551849365234, 0.02079302406311035, 0.020643903732299806, 0.020972768783569337, 0.021450624465942383, 0.02112188720703125, 0.021071935653686525, 0.02066009521484375, 0.02068492889404297, 0.020580352783203124, 0.02086832046508789, 0.020914560317993165, 0.020826528549194336, 0.020796512603759764, 0.020828479766845702, 0.02083014488220215, 0.02086502456665039, 0.020730527877807617, 0.020756223678588866, 0.020686559677124024, 0.020761119842529298, 0.020699199676513673, 0.02075641632080078, 0.020718719482421873, 0.02069798469543457, 0.020668415069580077, 0.020741504669189455, 0.020606624603271485, 0.020734079360961916, 0.020652896881103517, 0.02070528030395508, 0.02069231986999512, 0.020632160186767577, 0.020590080261230468, 0.02065875244140625, 0.020614944458007812, 0.020719839096069337, 0.02067046356201172, 0.02068070411682129, 0.020805631637573242, 0.020692991256713866, 0.020682752609252928, 0.020824064254760744, 0.02066022491455078, 0.02063907241821289, 0.0207379207611084, 0.022749984741210937, 0.021993824005126953, 0.021061279296875, 0.020794944763183595, 0.020765119552612305, 0.020704992294311525, 0.02078748893737793, 0.02065190315246582, 0.020797439575195312, 0.020738048553466795, 0.020809823989868165, 0.020699167251586915, 0.020658016204833984, 0.020645759582519532, 0.020666656494140626, 0.020715360641479493, 0.02206528091430664, 0.02156342315673828, 0.020917984008789064, 0.02059267234802246, 0.02097983932495117, 0.021047935485839844, 0.021000192642211913, 0.020967424392700194, 0.021223424911499023, 0.020791296005249024, 0.020748287200927733, 0.02094655990600586, 0.020902271270751952, 0.02089491271972656, 0.020710208892822265, 0.02065964889526367, 0.02078175926208496, 0.020774784088134764, 0.02085273551940918, 0.02085036849975586, 0.02098412895202637, 0.020915840148925783, 0.020752288818359374, 0.02078767967224121, 0.020777984619140624, 0.020798303604125976, 0.020721824645996093, 0.02072547149658203, 0.02075881576538086, 0.021370880126953123, 0.022147071838378905, 0.021608448028564452, 0.020918272018432618, 0.021020671844482423, 0.021344255447387696, 0.020953088760375976, 0.02086092758178711, 0.02079283142089844, 0.020842752456665038, 0.020904191970825194, 0.02079539108276367, 0.020781055450439453, 0.020723712921142577, 0.020776960372924806, 0.021153791427612305, 0.020887199401855468, 0.020776479721069337, 0.020928960800170898, 0.020819456100463866, 0.020795616149902343, 0.020798463821411133, 0.020927295684814454, 0.0207040958404541, 0.020798784255981445, 0.02077766418457031, 0.020789247512817383, 0.020934656143188478, 0.02085683250427246, 0.020712831497192382, 0.020845184326171873, 0.020762624740600585, 0.020793184280395508, 0.020711584091186522, 0.020752384185791017, 0.020932479858398436, 0.020822080612182617, 0.02067251205444336, 0.0209072322845459, 0.020788000106811522, 0.020832191467285155, 0.020770111083984376, 0.02093116760253906, 0.020857215881347656, 0.020794912338256834, 0.020869375228881836, 0.020746240615844725, 0.020713151931762694, 0.020748191833496094, 0.020769184112548827, 0.02075971221923828, 0.020716384887695314, 0.02067046356201172, 0.020735679626464845, 0.020751903533935547, 0.020724479675292968, 0.02081590461730957, 0.0207523193359375, 0.02073916816711426, 0.02078780746459961, 0.020908416748046874, 0.020686847686767578, 0.020682239532470705, 0.02072012710571289, 0.020789247512817383, 0.020738048553466795, 0.02066431999206543, 0.020707328796386718, 0.020851999282836913, 0.020871904373168944, 0.020899839401245117, 0.020750335693359375, 0.02083020782470703, 0.020781055450439453, 0.020721120834350584, 0.02066076850891113, 0.020674591064453126, 0.02056188774108887, 0.02066431999206543, 0.020735872268676757, 0.020815744400024414, 0.02081407928466797, 0.02078291130065918, 0.02072492790222168, 0.020846879959106446, 0.020832992553710936, 0.02075788879394531, 0.020729536056518554, 0.020650943756103515, 0.02071347236633301, 0.020764671325683593, 0.020740095138549804, 0.020688640594482423, 0.020698463439941406, 0.02058742332458496, 0.020741952896118163, 0.020741567611694336, 0.020904703140258787, 0.02086911964416504, 0.020763967514038088, 0.020447872161865235, 0.020717567443847656, 0.020770816802978515, 0.020746559143066407, 0.020654815673828125, 0.020894687652587892, 0.020890655517578124, 0.020771808624267578, 0.02086297607421875, 0.020760576248168947, 0.020781055450439453, 0.020763647079467772, 0.020767744064331056, 0.020703231811523438, 0.020714719772338866, 0.020711936950683595, 0.020906272888183593, 0.020760576248168947, 0.020796480178833007, 0.020704191207885744, 0.020694976806640626, 0.02077846336364746, 0.020754047393798828, 0.020834815979003905, 0.020668096542358398, 0.021269279479980467, 0.020792896270751954, 0.020889631271362306, 0.02059676742553711, 0.02078553581237793, 0.020798944473266603, 0.020825664520263673, 0.02089369583129883, 0.020841440200805663, 0.020946815490722658, 0.020920448303222657, 0.020787200927734374, 0.02121321678161621, 0.02093052864074707, 0.02076176071166992, 0.020816736221313477, 0.020703584671020507, 0.020696735382080077, 0.020546592712402344, 0.020547712326049804, 0.020745088577270508, 0.020604639053344725, 0.02060214424133301, 0.020601823806762697, 0.020582399368286132, 0.020785152435302736, 0.020547456741333008, 0.020688032150268553, 0.020847103118896485, 0.020742624282836915, 0.020658176422119142, 0.020658048629760742, 0.020514944076538085, 0.02126028823852539, 0.02104470443725586, 0.02070172882080078, 0.020729183197021484, 0.020690656661987303, 0.020471584320068358, 0.02069731140136719, 0.020668512344360353, 0.02069887924194336, 0.020680864334106444, 0.020678144454956054, 0.020656639099121094, 0.02069708824157715, 0.020553728103637696, 0.020748287200927733, 0.020570112228393556, 0.020600831985473633, 0.02081977653503418, 0.020809728622436522, 0.02066646385192871, 0.02069718360900879, 0.020717567443847656, 0.020619104385375977, 0.020887712478637695, 0.020772863388061523, 0.021155839920043946, 0.020672063827514648, 0.020764736175537108, 0.02172675132751465, 0.02075529670715332, 0.020996095657348633, 0.02081135940551758, 0.020687007904052736, 0.020525312423706053, 0.020684799194335936, 0.02059004783630371, 0.020623903274536132, 0.02060492706298828, 0.020611072540283205, 0.02062335968017578, 0.020981760025024415, 0.02075859260559082, 0.020998239517211914, 0.02157254409790039, 0.020724639892578126, 0.0206561279296875, 0.020666368484497072, 0.02067865562438965, 0.020565216064453124, 0.02065216064453125, 0.02055235290527344, 0.020756479263305663, 0.02084454345703125, 0.020810943603515625, 0.020624191284179687, 0.020617216110229493, 0.020610591888427735, 0.020690431594848634, 0.02077769660949707, 0.020672767639160157, 0.020645183563232423, 0.020802047729492186, 0.02067430305480957, 0.020598655700683595, 0.021166656494140627, 0.02068070411682129, 0.020809728622436522, 0.020724063873291017, 0.02059231948852539, 0.020838943481445313, 0.020692575454711915, 0.020629472732543945, 0.02084787178039551, 0.020605152130126952, 0.02065238380432129, 0.02070550346374512, 0.02114374351501465, 0.020741695404052733, 0.02065247917175293, 0.020717567443847656, 0.02062131118774414, 0.020578304290771485, 0.020867071151733398, 0.020625408172607423, 0.020774911880493165, 0.02076588821411133, 0.020838943481445313, 0.020748575210571288, 0.020801536560058592, 0.020917247772216797, 0.020898815155029296, 0.020807680130004884, 0.02080544090270996, 0.020750240325927736, 0.020687135696411132, 0.0206561279296875, 0.020776960372924806, 0.02085478401184082, 0.02070672035217285, 0.020632320404052735, 0.02062934494018555, 0.020703231811523438, 0.020739456176757813, 0.02067068862915039, 0.02066473579406738, 0.02067865562438965, 0.02066022491455078, 0.02069708824157715, 0.020807136535644533, 0.020795936584472655, 0.020766143798828126, 0.020709951400756835, 0.02071507263183594, 0.02114748764038086, 0.020832799911499025, 0.020805696487426757, 0.020822015762329102, 0.020891424179077148, 0.02083772850036621, 0.02070822334289551, 0.021863967895507812, 0.02105187225341797, 0.020779008865356444, 0.02123161506652832, 0.020845888137817382, 0.020846303939819337, 0.02080793571472168, 0.0207490234375, 0.020774911880493165, 0.020879264831542968, 0.02093840026855469, 0.020795200347900392, 0.021019968032836914, 0.020934911727905275, 0.021062271118164062, 0.021012479782104493, 0.02083430480957031, 0.020840448379516603, 0.020762624740600585, 0.020868255615234376, 0.02074825668334961, 0.02089206314086914, 0.020965856552124025, 0.020926464080810548, 0.020754432678222655, 0.020699071884155273, 0.020710752487182616, 0.020779743194580078, 0.020999263763427735, 0.020958112716674804, 0.021147647857666017, 0.020711200714111328, 0.020722015380859375, 0.020891136169433593, 0.020691328048706055, 0.020756479263305663, 0.020903167724609376, 0.020839168548583985, 0.021028863906860353, 0.02127052879333496, 0.020801536560058592, 0.02081123161315918, 0.020777503967285157, 0.020786304473876954, 0.020859775543212892, 0.020815200805664062, 0.021299871444702148, 0.02087343978881836, 0.020784927368164063, 0.02059779167175293, 0.02075132751464844, 0.020612735748291016, 0.020801088333129884, 0.020624191284179687, 0.020709375381469726, 0.02084249687194824, 0.02068889617919922, 0.020791296005249024, 0.020848735809326172, 0.020715423583984375, 0.020707263946533203, 0.02086822319030762, 0.021713855743408204, 0.02123119926452637, 0.021030336380004882, 0.02082419204711914, 0.020734752655029297, 0.02074835205078125, 0.02067865562438965, 0.020651552200317384, 0.02077142333984375, 0.02066009521484375, 0.02068396759033203, 0.02056275177001953]",tokens/s,48.07130904444129,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 240526 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.524224,1333.6576,0.0,931.135488,917.648384,s,1,7.1786025390625,7.1786025390625,0.0,7.1786025390625,7.1786025390625,7.1786025390625,7.1786025390625,[7.1786025390625],,kWh,5.499051570814117e-06,5.996629808024604e-07,2.0122238319902896e-06,8.110938383606867e-06,,MB,1205.526528,1463.681024,0.0,1050.673152,1018.330112,s,10,0.6384683532714843,0.06384683532714844,0.001413369166701557,0.06355270385742187,0.06432075729370117,0.0661330997467041,0.06758297370910644,"[0.06794544219970704, 0.06287577438354493, 0.06343478393554687, 0.06376988983154297, 0.06304214477539062, 0.06329363250732421, 0.06282419204711914, 0.06367062377929687, 0.06391801452636718, 0.06369385528564453]",tokens/s,4009.595756598851,kWh,2.0795970205674042e-06,2.2925381504506672e-07,1.3723691041986203e-06,3.6812199398110917e-06,tokens/kWh,69542163.78963141,MB,1230.716928,1480.45824,0.0,1067.450368,1033.282048,s,10,11.937548828125001,1.1937548828125002,0.032114017747860754,1.1962936401367186,1.2280666870117187,1.2285925476074218,1.2290132360839845,"[1.1472298583984375, 1.1569981689453126, 1.157586181640625, 1.177384033203125, 1.175416748046875, 1.2279498291015625, 1.229118408203125, 1.2276732177734375, 1.2152032470703125, 1.2229891357421876]",tokens/s,52.774653245037435,kWh,3.308439241401431e-05,3.648841774273538e-06,1.4675365956201016e-05,5.140860014448887e-05,tokens/kWh,1225475.8896941831,,s,630,11.931307874679566,0.018938583928062802,0.0006867629212757198,0.019067839622497557,0.019458147048950193,0.019615766525268554,0.02110810047149659,"[0.018685951232910156, 0.01852182388305664, 0.018395423889160156, 0.018306047439575195, 0.0182609920501709, 0.018281728744506835, 0.018039712905883788, 0.018052095413208007, 0.018183008193969726, 0.018265247344970703, 0.018459487915039062, 0.018406656265258788, 0.01845030403137207, 0.018402175903320314, 0.018579072952270508, 0.018811199188232423, 0.01856425666809082, 0.018451360702514647, 0.01860403251647949, 0.018355775833129882, 0.018370367050170897, 0.018276575088500977, 0.018172319412231446, 0.018149375915527344, 0.01804083251953125, 0.01835379219055176, 0.01803219223022461, 0.018096128463745118, 0.01804742431640625, 0.01797567939758301, 0.01807993507385254, 0.018075456619262697, 0.01811577606201172, 0.018151456832885743, 0.017939231872558595, 0.018088287353515625, 0.01801136016845703, 0.018117055892944336, 0.01817571258544922, 0.018019840240478514, 0.01799616050720215, 0.018192800521850586, 0.018131967544555663, 0.0179800968170166, 0.018202367782592772, 0.018092416763305665, 0.018045120239257813, 0.018059263229370116, 0.018167776107788088, 0.018222816467285158, 0.018194143295288085, 0.018049631118774414, 0.018068639755249024, 0.01807014465332031, 0.01815532875061035, 0.018067071914672852, 0.018043167114257814, 0.018209280014038084, 0.01819443130493164, 0.018077695846557617, 0.018070816040039062, 0.01810915184020996, 0.01798940849304199, 0.01782374382019043, 0.018113536834716795, 0.018121152877807616, 0.018054943084716796, 0.018068256378173827, 0.01823744010925293, 0.017983007431030273, 0.01794710350036621, 0.017991424560546875, 0.018202272415161133, 0.017995744705200194, 0.01805606460571289, 0.01819603157043457, 0.019161279678344727, 0.01840246391296387, 0.018051904678344728, 0.018096160888671876, 0.01798281669616699, 0.017957056045532226, 0.017909503936767577, 0.017973440170288086, 0.017961503982543946, 0.018059263229370116, 0.018963775634765624, 0.018045503616333006, 0.01801024055480957, 0.01795465660095215, 0.018045087814331055, 0.018113727569580077, 0.01853727912902832, 0.018391040802001952, 0.018413375854492188, 0.01835641670227051, 0.018327360153198243, 0.018374847412109374, 0.01834819221496582, 0.01831100845336914, 0.018405376434326173, 0.018333343505859374, 0.01848956871032715, 0.018347648620605467, 0.018487199783325196, 0.01854307174682617, 0.01861139106750488, 0.018553951263427734, 0.01860806465148926, 0.018552736282348634, 0.01856892776489258, 0.01879439926147461, 0.01877235221862793, 0.01879849624633789, 0.01885171127319336, 0.018788576126098633, 0.01865727996826172, 0.01874947166442871, 0.018732383728027345, 0.018704864501953126, 0.018641056060791014, 0.0186507511138916, 0.018655616760253905, 0.01855023956298828, 0.018556703567504884, 0.018500352859497072, 0.018595903396606446, 0.018827775955200195, 0.01890928077697754, 0.018628511428833008, 0.01860982322692871, 0.018664031982421874, 0.018673280715942382, 0.0187347526550293, 0.01849401664733887, 0.018573440551757813, 0.018566495895385744, 0.018510271072387695, 0.018339839935302735, 0.018306367874145506, 0.018244064331054688, 0.01824176025390625, 0.01835113525390625, 0.018372608184814454, 0.018344736099243163, 0.018352319717407226, 0.01827840042114258, 0.018274431228637696, 0.018290367126464844, 0.018292928695678713, 0.018363712310791015, 0.018239200592041014, 0.018415584564208984, 0.01823027229309082, 0.01844223976135254, 0.018283615112304686, 0.018303840637207032, 0.018221183776855467, 0.01825542449951172, 0.01827465629577637, 0.018259199142456054, 0.01826486396789551, 0.018300928115844727, 0.018289983749389647, 0.018408159255981445, 0.0182271671295166, 0.018304479598999022, 0.01822287940979004, 0.01830374336242676, 0.01821811294555664, 0.01846771240234375, 0.018231327056884766, 0.01830031967163086, 0.0182544002532959, 0.018249216079711913, 0.018260480880737305, 0.018225151062011717, 0.018307071685791015, 0.018307071685791015, 0.0182108154296875, 0.01844374465942383, 0.018233823776245116, 0.01828598403930664, 0.01826473617553711, 0.018373823165893553, 0.018334592819213867, 0.01839823913574219, 0.018317920684814453, 0.018313535690307615, 0.01828646469116211, 0.01831657600402832, 0.018259967803955078, 0.018219776153564453, 0.01829043197631836, 0.019118751525878906, 0.018417343139648438, 0.01845030403137207, 0.0184106559753418, 0.018183135986328126, 0.018606016159057617, 0.018249536514282228, 0.018259328842163088, 0.018309471130371093, 0.018297216415405274, 0.01830659294128418, 0.018335840225219727, 0.018182336807250978, 0.01829091262817383, 0.01843212890625, 0.01836169624328613, 0.018254495620727538, 0.01830665588378906, 0.018221311569213867, 0.018403520584106447, 0.018182111740112306, 0.018181983947753905, 0.01820182418823242, 0.018238399505615236, 0.018439680099487304, 0.018249311447143556, 0.018834304809570313, 0.018967744827270507, 0.01836732864379883, 0.01870195198059082, 0.019087743759155274, 0.019187711715698243, 0.019105247497558594, 0.019062719345092773, 0.01901628875732422, 0.01904422378540039, 0.019136640548706056, 0.019150848388671874, 0.01902367973327637, 0.01947385597229004, 0.01924787139892578, 0.019091648101806642, 0.019187519073486328, 0.01908095932006836, 0.019110143661499022, 0.019177728652954102, 0.01928995132446289, 0.01912022399902344, 0.01894175910949707, 0.018630464553833007, 0.01881443214416504, 0.01867616081237793, 0.018939840316772462, 0.019158624649047853, 0.019051263809204102, 0.01893071937561035, 0.01891836738586426, 0.01895404815673828, 0.019171487808227538, 0.019045280456542968, 0.01881062316894531, 0.018788063049316406, 0.01888688087463379, 0.018892000198364258, 0.01860812759399414, 0.01844508743286133, 0.018298879623413086, 0.018276287078857423, 0.018292800903320312, 0.01830297660827637, 0.01826121520996094, 0.01833145523071289, 0.0181847038269043, 0.018192863464355467, 0.018421760559082033, 0.018345216751098632, 0.01831158447265625, 0.018354080200195313, 0.018334272384643555, 0.018536319732666017, 0.018778112411499022, 0.018515968322753908, 0.018298879623413086, 0.018450111389160157, 0.018327167510986328, 0.018485952377319335, 0.01866044807434082, 0.01858243179321289, 0.01859584045410156, 0.01913055992126465, 0.018790239334106444, 0.018511648178100585, 0.018570655822753905, 0.018535200119018554, 0.01864089584350586, 0.018671295166015626, 0.018727359771728517, 0.01847462463378906, 0.019074464797973634, 0.018600160598754883, 0.01852275276184082, 0.01863065528869629, 0.018571264266967775, 0.018704383850097657, 0.01869004821777344, 0.018716928482055663, 0.018677663803100587, 0.018682016372680663, 0.018618047714233397, 0.018848960876464843, 0.019041088104248045, 0.01894806480407715, 0.018864160537719728, 0.018964479446411133, 0.018980512619018553, 0.018949760437011718, 0.018922271728515624, 0.018870336532592773, 0.018869312286376953, 0.01894272041320801, 0.019241024017333984, 0.019274175643920897, 0.020977664947509765, 0.01979327964782715, 0.01924064064025879, 0.01925984001159668, 0.01907676887512207, 0.019150720596313477, 0.01912214469909668, 0.019247711181640623, 0.019217855453491212, 0.019205087661743163, 0.019048479080200194, 0.019171295166015626, 0.019174623489379882, 0.019208736419677734, 0.019507455825805663, 0.019224576950073242, 0.019156160354614257, 0.01965977668762207, 0.01960736083984375, 0.020892864227294923, 0.020267967224121095, 0.01948249626159668, 0.01930659294128418, 0.01923676872253418, 0.019241151809692384, 0.019196800231933594, 0.019241952896118165, 0.019232736587524415, 0.019213632583618166, 0.01922108840942383, 0.019256576538085938, 0.019252063751220704, 0.019229951858520507, 0.019321760177612304, 0.019288063049316406, 0.019506559371948243, 0.020842975616455078, 0.02427654457092285, 0.019469791412353516, 0.019307487487792967, 0.01965670394897461, 0.019533824920654298, 0.019276031494140623, 0.01930419158935547, 0.01929167938232422, 0.01927337646484375, 0.01918355178833008, 0.01922697639465332, 0.019255264282226563, 0.0193337287902832, 0.019423200607299806, 0.019392223358154298, 0.01926563262939453, 0.019333311080932617, 0.019425472259521483, 0.019324735641479494, 0.019396799087524414, 0.01930415916442871, 0.01951705551147461, 0.01928550338745117, 0.01930339241027832, 0.019329023361206055, 0.019466239929199217, 0.01926348876953125, 0.01932646369934082, 0.019397119522094726, 0.019281919479370118, 0.02021753692626953, 0.019527872085571288, 0.0201147518157959, 0.01940115165710449, 0.019320831298828126, 0.01989468765258789, 0.019716064453125, 0.019376384735107423, 0.019275520324707033, 0.01942947196960449, 0.019484544754028322, 0.019347583770751953, 0.01960736083984375, 0.019408992767333984, 0.01925119972229004, 0.019425119400024414, 0.019599519729614257, 0.019465919494628905, 0.019386144638061525, 0.01960188865661621, 0.024129600524902345, 0.019555360794067382, 0.01931977653503418, 0.019424543380737305, 0.01929417610168457, 0.019266368865966797, 0.019298368453979493, 0.01930806350708008, 0.019560480117797853, 0.019195968627929688, 0.019326528549194335, 0.019274112701416015, 0.019333440780639647, 0.019289119720458985, 0.019356895446777343, 0.019301727294921876, 0.01930486488342285, 0.0194969596862793, 0.01941094398498535, 0.019337312698364258, 0.019478015899658203, 0.01924710464477539, 0.019364255905151367, 0.019519296646118164, 0.019348703384399413, 0.019297311782836914, 0.019609535217285156, 0.01946828842163086, 0.019345407485961915, 0.019406848907470704, 0.019427520751953125, 0.019334400177001953, 0.019327552795410156, 0.019434783935546877, 0.019332000732421875, 0.019289920806884766, 0.019292160034179686, 0.01951923179626465, 0.019186975479125977, 0.01934409523010254, 0.019378496170043946, 0.019277856826782225, 0.01930771255493164, 0.0193372802734375, 0.019362207412719726, 0.019290111541748048, 0.019369983673095705, 0.019322879791259767, 0.019349504470825195, 0.01965465545654297, 0.0240963191986084, 0.019782079696655273, 0.019326015472412108, 0.019248256683349608, 0.01941689682006836, 0.01921843147277832, 0.0192225284576416, 0.019244800567626952, 0.01931596755981445, 0.019325952529907226, 0.019324928283691405, 0.019175167083740233, 0.019238815307617188, 0.01943177604675293, 0.019302400588989257, 0.019252288818359376, 0.01933203125, 0.019295711517333985, 0.01928041648864746, 0.019310239791870118, 0.019461631774902344, 0.01932316780090332, 0.01942380714416504, 0.01923481559753418, 0.019620864868164063, 0.019289087295532227, 0.019308544158935546, 0.019656511306762697, 0.019334911346435547, 0.01926416015625, 0.019394336700439455, 0.019398656845092774, 0.01988921546936035, 0.01937299156188965, 0.01945599937438965, 0.019883840560913087, 0.020354623794555663, 0.01934931182861328, 0.019837631225585937, 0.019269439697265626, 0.019423551559448242, 0.01926044845581055, 0.019184608459472657, 0.019232927322387697, 0.019371423721313476, 0.01923526382446289, 0.0196844482421875, 0.019960735321044924, 0.01934492874145508, 0.019311071395874024, 0.019533824920654298, 0.01934339141845703, 0.01932624053955078, 0.01908403205871582, 0.019213760375976562, 0.019712255477905272, 0.019376415252685547, 0.01926553535461426, 0.019154943466186524, 0.019072223663330078, 0.019270431518554686, 0.019350624084472655, 0.019059904098510744, 0.019111167907714843, 0.019089887619018555, 0.01905824089050293, 0.019052288055419923, 0.018984672546386718, 0.018968671798706056, 0.01906345558166504, 0.01898236846923828, 0.018995904922485353, 0.01895840072631836, 0.019256799697875977, 0.019180288314819337, 0.019128095626831054, 0.01927939224243164, 0.01902672004699707, 0.01906233596801758, 0.01927516746520996, 0.019039104461669922, 0.018988895416259765, 0.019275039672851563, 0.019164127349853517, 0.01907891273498535, 0.019178911209106444, 0.01905731201171875, 0.019140159606933594, 0.019208576202392576, 0.01923686408996582, 0.01918083190917969, 0.01917206382751465, 0.0191527042388916, 0.01921887969970703, 0.019339263916015623, 0.019094335556030274, 0.01931260871887207, 0.019058879852294923, 0.019114784240722656, 0.0192225284576416, 0.019528831481933594, 0.019132991790771485, 0.019390527725219726, 0.02181164741516113, 0.02160198402404785, 0.01941107177734375, 0.01921574401855469, 0.019266176223754882, 0.01931167984008789, 0.01932383918762207, 0.019390527725219726, 0.019420671463012695, 0.019370431900024413, 0.01937753677368164, 0.01937830352783203, 0.01937740707397461, 0.01956928062438965, 0.019381919860839845, 0.01930847930908203, 0.01932259178161621, 0.019403263092041014, 0.019654848098754882, 0.019294015884399413, 0.01932716751098633, 0.01958255958557129, 0.019400928497314455, 0.019384063720703126, 0.0193767032623291, 0.019358816146850585, 0.01921308708190918, 0.019328832626342773, 0.019431615829467775, 0.019842432022094725, 0.019408639907836915, 0.019407039642333986, 0.019442367553710937, 0.019494880676269533, 0.019415103912353515, 0.01937763214111328, 0.01931660842895508, 0.019483264923095704, 0.019333120346069335, 0.019425376892089844, 0.0196313591003418, 0.019425952911376953, 0.01925939178466797, 0.019311647415161132, 0.019380992889404296, 0.019310815811157227, 0.01923891258239746, 0.019457759857177733, 0.019165151596069335, 0.01945222473144531, 0.021161376953125, 0.019478879928588867, 0.019259679794311525, 0.019222463607788086, 0.01921414375305176, 0.019245279312133788, 0.022571008682250978, 0.019433183670043944, 0.01940297508239746, 0.019242528915405274, 0.019155359268188475, 0.019275487899780272, 0.019202463150024413, 0.019324256896972657, 0.01903657531738281, 0.019077375411987306, 0.01901705551147461, 0.019104415893554688, 0.0190501766204834, 0.019177375793457033, 0.019081632614135743, 0.018972671508789063, 0.01881497573852539, 0.01916860771179199]",tokens/s,52.80225827857281,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 634, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 306, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 102, in __init__ self.query_key_value = nn.Linear(config.hidden_size, 3 * config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 219173 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,890.322944,725.48352,0.0,322.961408,314.743808,s,1,7.21602978515625,7.21602978515625,0.0,7.21602978515625,7.21602978515625,7.21602978515625,7.21602978515625,[7.21602978515625],,kWh,3.98744482499751e-06,4.324824491844992e-07,8.97500718008426e-07,5.317427992190435e-06,,MB,1215.942656,817.758208,0.0,404.750336,391.119872,s,27,0.2293473272323609,0.008494345453050403,0.00011215174911409335,0.008458304405212402,0.008639744186401368,0.008696070671081543,0.008805867404937743,"[0.008545087814331056, 0.008398240089416503, 0.008458304405212402, 0.008482751846313476, 0.008438655853271484, 0.00839244842529297, 0.008478528022766113, 0.008358976364135743, 0.0084334716796875, 0.008432000160217286, 0.008415295600891113, 0.008408479690551758, 0.008486047744750976, 0.008422335624694825, 0.008361727714538573, 0.008417856216430665, 0.008475263595581055, 0.008447327613830567, 0.008470911979675293, 0.008436448097229004, 0.008577247619628907, 0.008678239822387695, 0.008703712463378907, 0.008613792419433594, 0.00884175968170166, 0.008558336257934571, 0.008614080429077148]",tokens/s,30137.69588427415,kWh,2.5618840329236247e-07,2.8253100205328493e-08,1.6935507646531867e-07,4.5379657996300963e-07,tokens/kWh,564129416.7991909,MB,1227.505664,830.34112,0.0,417.333248,391.122432,s,27,9.88351303100586,0.3660560381854022,0.005418310483276229,0.3638743896484375,0.37412991943359375,0.37510900268554687,0.3762263702392578,"[0.37310012817382815, 0.36300054931640624, 0.36100634765625, 0.36089663696289065, 0.3615369873046875, 0.360325927734375, 0.36177520751953124, 0.36172683715820314, 0.36024124145507813, 0.36097747802734376, 0.3616002197265625, 0.3604239807128906, 0.36455325317382814, 0.3638432312011719, 0.36687408447265624, 0.3640677185058594, 0.3637076721191406, 0.3638743896484375, 0.3674337463378906, 0.36500900268554687, 0.3749681396484375, 0.37659774780273436, 0.37516937255859373, 0.37357110595703125, 0.3733769226074219, 0.37082763671875, 0.3730274658203125]",tokens/s,172.10479661065278,kWh,1.0533645226916644e-05,1.1616847122922038e-06,4.130588839677617e-06,1.582591877888647e-05,tokens/kWh,3980811.5332961893,,s,1701,9.870914746284484,0.00580300690551704,0.00017108809894094356,0.005755807876586914,0.0059552001953125,0.0060063362121582035,0.006483903884887695,"[0.0056471037864685054, 0.005869152069091797, 0.00587011194229126, 0.005785280227661132, 0.005867424011230468, 0.005775455951690674, 0.005816319942474365, 0.005795839786529541, 0.005783328056335449, 0.005830239772796631, 0.005767807960510254, 0.005810175895690918, 0.006289408206939697, 0.0075038719177246095, 0.007538271903991699, 0.007615039825439453, 0.007363711833953858, 0.005829343795776367, 0.0058388481140136715, 0.005795487880706787, 0.005835103988647461, 0.005791488170623779, 0.005793983936309814, 0.005748799800872803, 0.0057712640762329105, 0.005789696216583252, 0.005713920116424561, 0.0058122239112854005, 0.005732160091400146, 0.0057489280700683595, 0.005725215911865234, 0.005729152202606201, 0.005750879764556884, 0.0057710719108581545, 0.006410431861877441, 0.005760640144348145, 0.005783936023712158, 0.005734047889709473, 0.005794112205505371, 0.005727615833282471, 0.00580841588973999, 0.0057177281379699706, 0.005753695964813232, 0.005711584091186523, 0.0057233600616455075, 0.005734943866729737, 0.005687007904052735, 0.0058371200561523435, 0.005679423809051514, 0.005742591857910156, 0.005939167976379394, 0.005969215869903564, 0.005738463878631592, 0.005716256141662598, 0.005777632236480713, 0.005716224193572998, 0.005726208209991455, 0.00572979211807251, 0.005745471954345703, 0.005699103832244873, 0.006592671871185302, 0.005777247905731201, 0.005753376007080078, 0.005505023956298828, 0.005765312194824219, 0.0057710719108581545, 0.005799935817718506, 0.0058995838165283206, 0.005745344161987305, 0.005797887802124023, 0.005738495826721191, 0.005801983833312988, 0.00574294376373291, 0.005905695915222168, 0.00574403190612793, 0.00577839994430542, 0.005752831935882568, 0.005756703853607178, 0.005758719921112061, 0.005763840198516845, 0.005773248195648194, 0.0057259840965271, 0.005796063899993897, 0.005754655838012696, 0.005747744083404541, 0.005743584156036377, 0.005714176177978515, 0.005781248092651368, 0.005736447811126709, 0.005744639873504639, 0.005719488143920899, 0.005749311923980713, 0.005729407787322998, 0.005743519783020019, 0.0057749757766723635, 0.005790048122406006, 0.0057643518447875975, 0.005688064098358155, 0.005791744232177734, 0.005728256225585937, 0.005775360107421875, 0.005758975982666016, 0.0057118721008300784, 0.005758975982666016, 0.005726079940795898, 0.005752960205078125, 0.005746784210205078, 0.005721183776855468, 0.005739071846008301, 0.005693727970123291, 0.005876736164093017, 0.005732639789581299, 0.005770016193389893, 0.005719967842102051, 0.005787648200988769, 0.005707456111907959, 0.005744639873504639, 0.0057736320495605465, 0.005849088191986084, 0.005794879913330078, 0.0057066879272460935, 0.0057712640762329105, 0.005732160091400146, 0.005713664054870606, 0.00574508810043335, 0.005709824085235596, 0.005475999832153321, 0.005723616123199463, 0.0057209601402282715, 0.005683199882507324, 0.005752511978149414, 0.005699903964996338, 0.005749983787536621, 0.005681568145751953, 0.005720191955566406, 0.005751039981842041, 0.0056908798217773435, 0.005725056171417236, 0.00570854377746582, 0.00573529577255249, 0.005715744018554687, 0.0057264318466186525, 0.005740543842315674, 0.0057118721008300784, 0.005766848087310791, 0.005702303886413574, 0.005727744102478028, 0.005712031841278076, 0.005724160194396972, 0.005728096008300781, 0.005724031925201416, 0.005780799865722656, 0.005698527812957764, 0.005744639873504639, 0.00572380781173706, 0.005736288070678711, 0.005741055965423584, 0.005705728054046631, 0.00573641586303711, 0.005705088138580322, 0.0058026561737060545, 0.005731872081756592, 0.005732319831848145, 0.005705408096313477, 0.005704512119293213, 0.00573795223236084, 0.00567903995513916, 0.0057309122085571285, 0.005695583820343017, 0.005771168231964111, 0.005731711864471436, 0.005737088203430176, 0.00573747205734253, 0.0057497601509094234, 0.0057487359046936035, 0.005706975936889649, 0.005749536037445068, 0.005703680038452149, 0.005750175952911377, 0.0057411518096923825, 0.005711135864257813, 0.005872352123260498, 0.005701632022857666, 0.00575929594039917, 0.00568287992477417, 0.005695263862609864, 0.005728447914123535, 0.005681183815002442, 0.005729472160339356, 0.005457312107086182, 0.005780191898345948, 0.005727136135101318, 0.005743584156036377, 0.005710976123809814, 0.005727104187011719, 0.005707168102264405, 0.005718336105346679, 0.00571449613571167, 0.005705440044403076, 0.0057565121650695805, 0.005707231998443604, 0.005718815803527832, 0.005735680103302002, 0.0056984639167785645, 0.006385663986206055, 0.005732096195220947, 0.005756383895874024, 0.005724736213684082, 0.005769055843353271, 0.005697120189666748, 0.005745728015899659, 0.005696288108825684, 0.005696512222290039, 0.005719999790191651, 0.005679103851318359, 0.0057264318466186525, 0.005669695854187012, 0.005716959953308106, 0.005683199882507324, 0.00570908784866333, 0.005700064182281494, 0.005666048049926758, 0.005708799839019775, 0.0056852478981018065, 0.005728511810302734, 0.00568665599822998, 0.005689727783203125, 0.005723423957824707, 0.005745376110076904, 0.005734399795532226, 0.00566476821899414, 0.005705728054046631, 0.005701632022857666, 0.005688960075378418, 0.005742976188659668, 0.005693439960479736, 0.005738368034362793, 0.005717951774597168, 0.0057325439453125, 0.005766240119934082, 0.005710752010345459, 0.005758624076843261, 0.005696191787719727, 0.005729951858520508, 0.005691167831420899, 0.00572438383102417, 0.0056852478981018065, 0.005682784080505371, 0.005736480236053467, 0.0056958718299865724, 0.0057357120513916015, 0.005693376064300537, 0.005470047950744629, 0.005810336112976074, 0.0057429118156433106, 0.005684927940368652, 0.005775135993957519, 0.005730368137359619, 0.005705408096313477, 0.005732448101043701, 0.005689568042755127, 0.0057140798568725585, 0.005681375980377198, 0.005693215847015381, 0.005713920116424561, 0.005732416152954101, 0.00577894401550293, 0.005674943923950195, 0.005738463878631592, 0.005681600093841553, 0.0057283520698547365, 0.005707712173461914, 0.0056600642204284665, 0.00572438383102417, 0.0058577280044555665, 0.005804128170013427, 0.0057768640518188474, 0.005726880073547364, 0.005708928108215332, 0.005687520027160644, 0.005720191955566406, 0.0056789121627807615, 0.005738080024719238, 0.0056943678855896, 0.00572211217880249, 0.005748479843139648, 0.006233823776245117, 0.005736063957214355, 0.005775584220886231, 0.00578220796585083, 0.005732672214508057, 0.00579699182510376, 0.0057247037887573245, 0.005752255916595459, 0.0057309122085571285, 0.005767168045043946, 0.005706783771514893, 0.005691423892974854, 0.005729119777679443, 0.005675104141235352, 0.005728256225585937, 0.005679103851318359, 0.00567625617980957, 0.005751584053039551, 0.005712128162384033, 0.005727071762084961, 0.005687967777252197, 0.005732607841491699, 0.005679103851318359, 0.005709824085235596, 0.005842656135559082, 0.005720352172851563, 0.005758975982666016, 0.005679103851318359, 0.0057641921043396, 0.005419007778167725, 0.005715360164642334, 0.005650527954101562, 0.005685760021209717, 0.005699295997619629, 0.005662303924560547, 0.005710527896881103, 0.005720064163208007, 0.005712992191314698, 0.005675712108612061, 0.005658559799194336, 0.005695775985717773, 0.005735616207122803, 0.005720672130584717, 0.005679456233978272, 0.005756800174713135, 0.0059202880859375, 0.005771743774414063, 0.005719264030456543, 0.005698336124420166, 0.005758975982666016, 0.005703008174896241, 0.005744863986968994, 0.005697984218597412, 0.005712224006652832, 0.005736095905303955, 0.005753024101257324, 0.005761119842529297, 0.005680863857269287, 0.005758975982666016, 0.005720320224761963, 0.005744383811950683, 0.005728256225585937, 0.0057149438858032225, 0.0057497601509094234, 0.005691391944885254, 0.005830687999725342, 0.005713888168334961, 0.005738143920898438, 0.005694079875946045, 0.00568291187286377, 0.005711296081542969, 0.005698207855224609, 0.0057235841751098636, 0.005670944213867188, 0.005777823925018311, 0.005699615955352783, 0.005688416004180908, 0.005755807876586914, 0.0057019200325012205, 0.005711071968078614, 0.005680863857269287, 0.005703584194183349, 0.005692255973815918, 0.005671135902404785, 0.005773119926452637, 0.005660287857055664, 0.005742496013641358, 0.005693920135498047, 0.0057012162208557125, 0.005701087951660157, 0.0056943998336792, 0.005734303951263428, 0.005458687782287598, 0.005739903926849365, 0.005703551769256592, 0.0057535037994384764, 0.005665120124816895, 0.00574838399887085, 0.005715167999267578, 0.00571292781829834, 0.005730239868164062, 0.005721920013427735, 0.005785600185394287, 0.005819392204284668, 0.005748960018157959, 0.00571676778793335, 0.005761023998260498, 0.005732639789581299, 0.005717152118682861, 0.005750879764556884, 0.0057123517990112305, 0.005801439762115479, 0.005704192161560059, 0.005742623805999756, 0.005756415843963623, 0.005747456073760987, 0.00576691198348999, 0.005744639873504639, 0.005763040065765381, 0.005730175971984863, 0.005769375801086426, 0.005783552169799804, 0.005736671924591065, 0.0057606081962585445, 0.005710015773773194, 0.005742368221282959, 0.005701856136322021, 0.005744639873504639, 0.005692671775817871, 0.005736671924591065, 0.005783743858337402, 0.005706175804138184, 0.005744416236877442, 0.005718336105346679, 0.005726016044616699, 0.005786752223968506, 0.0057404799461364744, 0.005690303802490235, 0.005690976142883301, 0.0058527359962463376, 0.0057740478515625, 0.0057647361755371095, 0.005710624217987061, 0.0057586879730224605, 0.005709824085235596, 0.005713696002960205, 0.005746175765991211, 0.005704415798187256, 0.0057379198074340824, 0.00571888017654419, 0.0057586879730224605, 0.005717599868774414, 0.0057266240119934085, 0.005724160194396972, 0.005775360107421875, 0.005437183856964112, 0.005684512138366699, 0.005734560012817383, 0.005727039813995361, 0.005729375839233398, 0.005704607963562012, 0.005726208209991455, 0.005720064163208007, 0.005694687843322754, 0.005746943950653076, 0.005674943923950195, 0.005746975898742675, 0.005731711864471436, 0.00574560022354126, 0.005711743831634522, 0.005716032028198243, 0.005765279769897461, 0.005800864219665527, 0.005786623954772949, 0.005718016147613526, 0.0057773118019104, 0.005709216117858886, 0.005786303997039795, 0.005718016147613526, 0.005701280117034912, 0.0057511358261108396, 0.0057010560035705565, 0.005808544158935547, 0.005717599868774414, 0.005880383968353271, 0.005715968132019043, 0.005755008220672608, 0.005738336086273194, 0.0056934719085693355, 0.005713920116424561, 0.005675007820129394, 0.005724063873291016, 0.005681248188018799, 0.005696832180023193, 0.005728960037231445, 0.005688767910003662, 0.005795904159545898, 0.005958144187927246, 0.005772704124450683, 0.005691904067993164, 0.005761119842529297, 0.005733727931976318, 0.005691359996795655, 0.005739039897918701, 0.005707935810089111, 0.005851136207580566, 0.0057621440887451175, 0.005753407955169678, 0.005697440147399902, 0.005719583988189697, 0.00573737621307373, 0.005691359996795655, 0.00573033618927002, 0.005699584007263184, 0.005720064163208007, 0.00573583984375, 0.005962495803833008, 0.005728096008300781, 0.005452799797058106, 0.005699615955352783, 0.0056877121925354, 0.005723968029022217, 0.005671040058135986, 0.005790336132049561, 0.005689343929290771, 0.005705503940582275, 0.0056869759559631345, 0.005693984031677246, 0.005695487976074219, 0.005709824085235596, 0.005709824085235596, 0.00569382381439209, 0.005693056106567383, 0.005793407917022705, 0.005699967861175537, 0.005753983974456787, 0.005718944072723389, 0.0057405118942260745, 0.005681151866912842, 0.005799456119537354, 0.005697152137756348, 0.005684063911437988, 0.005762752056121826, 0.0056888961791992185, 0.0057064957618713375, 0.005701632022857666, 0.005705728054046631, 0.005697535991668701, 0.005693439960479736, 0.005719808101654053, 0.0056928000450134275, 0.005718912124633789, 0.005735775947570801, 0.005713696002960205, 0.005732384204864502, 0.005714591979980469, 0.005718207836151123, 0.005686624050140381, 0.00572873592376709, 0.0056847681999206544, 0.005702303886413574, 0.005734399795532226, 0.005721695899963379, 0.00574015998840332, 0.005688096046447754, 0.0057300481796264645, 0.005708032131195069, 0.005735487937927246, 0.005710015773773194, 0.005712639808654785, 0.005732287883758545, 0.005736512184143066, 0.005734079837799072, 0.005673279762268066, 0.005719840049743652, 0.0057264318466186525, 0.005705023765563965, 0.005748640060424805, 0.005679679870605469, 0.0057346878051757815, 0.005764607906341553, 0.005452447891235351, 0.005684480190277099, 0.005711711883544922, 0.005690271854400635, 0.005666912078857422, 0.005705440044403076, 0.005680448055267334, 0.005716832160949707, 0.00573632001876831, 0.005775519847869873, 0.005699039936065674, 0.005674528121948242, 0.005733376026153565, 0.005677055835723877, 0.00572211217880249, 0.005682240009307862, 0.005712575912475586, 0.0056977920532226565, 0.005709824085235596, 0.005727871894836426, 0.005674975872039795, 0.0057325758934021, 0.005666240215301514, 0.005700287818908692, 0.005959743976593017, 0.005695487976074219, 0.005724160194396972, 0.00571289587020874, 0.0057497601509094234, 0.005699456214904785, 0.005703807830810547, 0.0057069120407104495, 0.0057289919853210445, 0.005736256122589111, 0.005697855949401856, 0.005736447811126709, 0.005701632022857666, 0.0057487359046936035, 0.005705728054046631, 0.005684319972991943, 0.005773663997650147, 0.005677631855010986, 0.005736447811126709, 0.00566428804397583, 0.005693183898925781, 0.005681663990020752, 0.005783423900604248, 0.005808479785919189, 0.005699264049530029, 0.005971551895141602, 0.005765952110290527, 0.005853087902069092, 0.005949120044708252, 0.005773151874542236, 0.005695968151092529, 0.005703680038452149, 0.005754879951477051, 0.0056748161315917965, 0.005736991882324218, 0.005680799961090088, 0.005742591857910156, 0.005672959804534912, 0.005718016147613526, 0.005473919868469238, 0.0057883839607238766, 0.0057560958862304685, 0.00571673583984375, 0.005752895832061768, 0.005750559806823731, 0.005762368202209473, 0.00570198392868042, 0.005734272003173828, 0.005717696189880371, 0.005755904197692871, 0.0057565121650695805, 0.005726592063903808, 0.005741600036621094, 0.005725024223327636, 0.005791903972625733, 0.005721504211425781, 0.005760704040527344, 0.005763775825500488, 0.005714144229888916, 0.005818336009979248, 0.005834784030914307, 0.005761023998260498, 0.00576643180847168, 0.005845215797424317, 0.005724671840667724, 0.005754879951477051, 0.005736447811126709, 0.005710912227630615, 0.00577836799621582, 0.00578275203704834, 0.00576796817779541, 0.005746943950653076, 0.0057300481796264645, 0.0057487359046936035, 0.005728256225585937, 0.005717887878417969, 0.005695807933807373, 0.005727615833282471, 0.005752511978149414, 0.00572211217880249, 0.005703936100006104, 0.005697440147399902, 0.005892704010009766, 0.00571014404296875, 0.005758656024932861, 0.0056910400390625, 0.005730495929718017, 0.005712031841278076, 0.005750336170196533, 0.005708223819732666, 0.0057274560928344724, 0.005700160026550293, 0.0056854720115661625, 0.005709824085235596, 0.0056640000343322755, 0.005678175926208496, 0.005701280117034912, 0.005695231914520264, 0.0057021121978759765, 0.005668255805969238, 0.0056917757987976075, 0.005716063976287842, 0.0054921917915344235, 0.005706463813781738, 0.005684671878814697, 0.005749343872070312, 0.005680191993713379, 0.005716928005218506, 0.005681151866912842, 0.005711391925811767, 0.005691679954528808, 0.005681344032287598, 0.00573641586303711, 0.005665984153747559, 0.00571017599105835, 0.005714431762695312, 0.005713024139404297, 0.00569871997833252, 0.005684031963348389, 0.005741471767425537, 0.00566476821899414, 0.005799935817718506, 0.005808000087738037, 0.00569923210144043, 0.0056795840263366695, 0.005693439960479736, 0.005717504024505615, 0.005675519943237305, 0.005703680038452149, 0.0056763520240783695, 0.005804448127746582, 0.005674943923950195, 0.005703743934631348, 0.005749023914337158, 0.005735424041748047, 0.005747392177581787, 0.005676640033721924, 0.005698272228240967, 0.005717343807220459, 0.0056900157928466795, 0.005760191917419433, 0.005716800212860107, 0.0057118721008300784, 0.0057094721794128415, 0.005747039794921875, 0.005715968132019043, 0.005689343929290771, 0.005742591857910156, 0.005669951915740967, 0.005731264114379883, 0.005709375858306885, 0.005751232147216797, 0.005715968132019043, 0.005928895950317383, 0.0057569918632507325, 0.005707071781158447, 0.005732960224151611, 0.005703775882720947, 0.005734399795532226, 0.00570908784866333, 0.005704544067382813, 0.005710976123809814, 0.005689888000488281, 0.005773344039916992, 0.0057079682350158695, 0.00544217586517334, 0.005723487854003906, 0.005735136032104492, 0.005699647903442383, 0.005718111991882324, 0.005699488162994384, 0.005726208209991455, 0.0057337918281555175, 0.006023295879364014, 0.006027743816375732, 0.007187935829162597, 0.00656441593170166, 0.00647983980178833, 0.005775360107421875, 0.0057785921096801755, 0.0057803201675415035, 0.0058017921447753905, 0.00573635196685791, 0.00579145622253418, 0.005752448081970215, 0.0057942080497741695, 0.005758975982666016, 0.005722655773162842, 0.005808127880096436, 0.0056852478981018065, 0.0057766718864440915, 0.005685984134674072, 0.005685120105743408, 0.005763199806213379, 0.005689343929290771, 0.0057580161094665525, 0.0057099518775939944, 0.005774144172668457, 0.0056828479766845704, 0.005758336067199707, 0.0057223038673400875, 0.005692192077636719, 0.005738336086273194, 0.00569974422454834, 0.005728256225585937, 0.0056852478981018065, 0.0057012162208557125, 0.005723872184753418, 0.005685344219207764, 0.005775263786315918, 0.005718239784240723, 0.005730783939361573, 0.005668863773345947, 0.005734399795532226, 0.005713920116424561, 0.005698592185974121, 0.005732416152954101, 0.005694176197052002, 0.005754271984100342, 0.005669343948364258, 0.005700992107391357, 0.005721024036407471, 0.00566806411743164, 0.005710624217987061, 0.005675007820129394, 0.005763072013854981, 0.005682943820953369, 0.005677631855010986, 0.005991583824157715, 0.005774208068847656, 0.006531136035919189, 0.006038784027099609, 0.006216383934020996, 0.005761151790618897, 0.005717951774597168, 0.005751967906951904, 0.005763936042785645, 0.005740543842315674, 0.005735807895660401, 0.005706592082977295, 0.005756703853607178, 0.00571343994140625, 0.005761504173278809, 0.005823584079742432, 0.005774367809295655, 0.005758431911468506, 0.005735136032104492, 0.005723904132843018, 0.0056995201110839845, 0.005758975982666016, 0.005724063873291016, 0.006043744087219238, 0.005709824085235596, 0.005744639873504639, 0.005726208209991455, 0.005728256225585937, 0.005746687889099121, 0.005726016044616699, 0.0057502717971801755, 0.0056798081398010255, 0.005717440128326416, 0.005698112010955811, 0.005711647987365722, 0.005772928237915039, 0.005742591857910156, 0.0057227201461791995, 0.005687295913696289, 0.005705408096313477, 0.005697855949401856, 0.005721920013427735, 0.005719840049743652, 0.005732511997222901, 0.005742623805999756, 0.005673024177551269, 0.005765279769897461, 0.005683199882507324, 0.005681056022644043, 0.005715456008911133, 0.005681407928466797, 0.005730656147003174, 0.00608460807800293, 0.005769087791442871, 0.005752768039703369, 0.005754784107208252, 0.005705984115600586, 0.005703711986541748, 0.005738463878631592, 0.005668384075164795, 0.005731904029846192, 0.005690303802490235, 0.005697535991668701, 0.005433343887329102, 0.006315104007720947, 0.005829408168792725, 0.005861504077911377, 0.005688576221466064, 0.005698048114776612, 0.005717343807220459, 0.005670944213867188, 0.005733471870422363, 0.005700543880462646, 0.005774176120758057, 0.005748095989227295, 0.005777472019195556, 0.005720640182495117, 0.0057118721008300784, 0.005750495910644532, 0.0057012481689453125, 0.005744639873504639, 0.005728928089141846, 0.005746687889099121, 0.005768511772155762, 0.005677760124206543, 0.005731679916381836, 0.005689856052398682, 0.005734560012817383, 0.005666463851928711, 0.00570198392868042, 0.005713791847229004, 0.005654592037200928, 0.005793856143951416, 0.005674655914306641, 0.0057563199996948245, 0.005696447849273681, 0.005695680141448975, 0.00571347188949585, 0.00566707181930542, 0.005744703769683838, 0.005693056106567383, 0.005773791790008545, 0.005736288070678711, 0.005791744232177734, 0.005703807830810547, 0.005678976058959961, 0.005758975982666016, 0.005674880027770996, 0.005756800174713135, 0.005748415946960449, 0.005749279975891113, 0.005774847984313965, 0.0057247037887573245, 0.005723487854003906, 0.005734591960906982, 0.005742368221282959, 0.005683584213256836, 0.005742303848266602, 0.006082399845123291, 0.0058821120262146, 0.006649631977081299, 0.006640351772308349, 0.005734399795532226, 0.005756351947784424, 0.008097696304321288, 0.006175392150878907, 0.0055073280334472655, 0.005753312110900879, 0.005818367958068848, 0.005761023998260498, 0.005777696132659912, 0.005743648052215576, 0.00609772777557373, 0.005731455802917481, 0.005863584041595459, 0.0057862081527709965, 0.005742271900177002, 0.005754975795745849, 0.005748223781585693, 0.005747424125671387, 0.005700895786285401, 0.005748896121978759, 0.005712416172027588, 0.005719583988189697, 0.005700096130371094, 0.005692895889282227, 0.005746463775634766, 0.005772031784057617, 0.00580515193939209, 0.0058213438987731935, 0.005814271926879883, 0.005752031803131104, 0.005753407955169678, 0.005758175849914551, 0.005696352005004883, 0.005774943828582763, 0.0057346558570861815, 0.005852960109710694, 0.005810400009155273, 0.005833024024963379, 0.005754240036010742, 0.005732960224151611, 0.00575820779800415, 0.005720320224761963, 0.005795711994171142, 0.005724832057952881, 0.005832704067230224, 0.005767168045043946, 0.0057712640762329105, 0.005752831935882568, 0.005730656147003174, 0.0058958401679992676, 0.005781504154205322, 0.005836800098419189, 0.005894015789031983, 0.00581440019607544, 0.005821856021881103, 0.005812831878662109, 0.005758975982666016, 0.005753119945526123, 0.0057586879730224605, 0.0057094721794128415, 0.005795616149902344, 0.005716544151306152, 0.005773312091827393, 0.005827968120574951, 0.005802112102508545, 0.0057281599044799805, 0.005761023998260498, 0.0054906878471374515, 0.005774432182312011, 0.0057225279808044436, 0.006019584178924561, 0.005799359798431396, 0.005709824085235596, 0.005759552001953125, 0.005726208209991455, 0.005746719837188721, 0.005699552059173584, 0.005730303764343261, 0.005750783920288086, 0.005688799858093262, 0.005765664100646973, 0.005767168045043946, 0.00575270414352417, 0.005734432220458985, 0.005752927780151367, 0.005742623805999756, 0.00571398401260376, 0.005767072200775146, 0.005732351779937744, 0.005834176063537598, 0.005759552001953125, 0.005785600185394287, 0.005746687889099121, 0.005764512062072754, 0.0057309122085571285, 0.0057118721008300784, 0.0058241281509399416, 0.005740928173065186, 0.005766816139221191, 0.00570198392868042, 0.005811488151550293, 0.005704192161560059, 0.005708000183105469, 0.0057662720680236815, 0.005716127872467041, 0.005745728015899659, 0.00577507209777832, 0.005771200180053711, 0.005740543842315674, 0.005787648200988769, 0.005762688159942627, 0.0057181758880615234, 0.005765632152557373, 0.005770976066589355, 0.00588595199584961, 0.005754879951477051, 0.0058197760581970214, 0.0057452797889709474, 0.005868576049804688, 0.005943808078765869, 0.005845471858978272, 0.005795839786529541, 0.005730303764343261, 0.005801119804382324, 0.005720928192138672, 0.0057671360969543456, 0.005879327774047851, 0.005816832065582276, 0.005783552169799804, 0.005762976169586181, 0.005456160068511963, 0.005768256187438965, 0.005735360145568848, 0.00570576000213623, 0.005740863800048828, 0.005717408180236816, 0.0058616318702697755, 0.005715968132019043, 0.0057662720680236815, 0.005738719940185547, 0.005767424106597901, 0.005764800071716309, 0.005710432052612305, 0.005763167858123779, 0.005703104019165039, 0.005797791957855225, 0.006018815994262695, 0.005874623775482177, 0.005793471813201904, 0.005787360191345215, 0.0057636799812316895, 0.0057379198074340824, 0.005761600017547607, 0.005756768226623535, 0.005816480159759521, 0.005789279937744141, 0.005820000171661377, 0.005761375904083252, 0.005755328178405762, 0.0057569599151611325, 0.005761023998260498, 0.005754784107208252, 0.005699679851531982, 0.005761023998260498, 0.005713920116424561, 0.0058570241928100585, 0.005808383941650391, 0.005856383800506592, 0.005799808025360107, 0.005805344104766846, 0.005811935901641846, 0.005865471839904785, 0.005801760196685791, 0.005746912002563476, 0.005904672145843506, 0.005748447895050049, 0.005795423984527588, 0.005718560218811035, 0.005726079940795898, 0.005777408123016357, 0.005725279808044433, 0.005800159931182861, 0.005740543842315674, 0.005731296062469482, 0.005764224052429199, 0.005779776096343994, 0.005756864070892334, 0.005761375904083252, 0.005756192207336426, 0.005757728099822998, 0.005752768039703369, 0.005726208209991455, 0.005773312091827393, 0.005451776027679443, 0.005785600185394287, 0.005896224021911621, 0.0058080959320068355, 0.005746687889099121, 0.0057381119728088376, 0.005756735801696777, 0.005745215892791748, 0.005900479793548584, 0.00576691198348999, 0.005802048206329346, 0.005867519855499268, 0.0058206400871276855, 0.0057608962059021, 0.005800127983093262, 0.005773151874542236, 0.005795711994171142, 0.005791264057159424, 0.005790175914764405, 0.005795839786529541, 0.005707007884979248, 0.005761792182922363, 0.0057118721008300784, 0.005772928237915039, 0.00573199987411499, 0.005706272125244141, 0.005740255832672119, 0.005750304222106934, 0.0065342397689819335, 0.006590015888214111, 0.006962912082672119, 0.006301375865936279, 0.005790143966674805, 0.005843391895294189, 0.005773312091827393, 0.005830656051635743, 0.005833792209625244, 0.005819104194641113, 0.005785823822021485, 0.0057487359046936035, 0.005756063938140869, 0.00571068811416626, 0.005777408123016357, 0.005703616142272949, 0.00577132797241211, 0.005752831935882568, 0.005866975784301758, 0.005900832176208496, 0.005770976066589355, 0.005775904178619385, 0.005734144210815429, 0.005750751972198486, 0.005724192142486572, 0.005750144004821777, 0.005687327861785889, 0.005769824028015137, 0.005773312091827393, 0.005720064163208007, 0.005801824092864991, 0.005826879978179931, 0.005796832084655762, 0.005797760009765625, 0.005802112102508545, 0.005476319789886474, 0.0059004158973693845, 0.005716383934020996, 0.005797311782836914, 0.005741119861602783, 0.005695551872253418, 0.0057363839149475095, 0.005740543842315674, 0.005746079921722412, 0.00569814395904541, 0.005806272029876709, 0.005734208106994629, 0.005793791770935058, 0.00574780797958374, 0.005712800025939942, 0.005737823963165283, 0.005732031822204589, 0.005776351928710938, 0.0057259840965271, 0.0057530560493469236, 0.005795839786529541, 0.0057511358261108396, 0.005748223781585693, 0.0057366080284118656, 0.005773312091827393, 0.0057487359046936035, 0.005791359901428222, 0.005718400001525879, 0.005763072013854981, 0.005816319942474365, 0.005738495826721191, 0.005787583827972412, 0.0057325439453125, 0.005809792041778564, 0.005815743923187256, 0.005812928199768066, 0.00578163194656372, 0.005777408123016357, 0.005924575805664063, 0.005832223892211914, 0.005786367893218994, 0.005791744232177734, 0.00581334400177002, 0.005745567798614502, 0.005804128170013427, 0.005754784107208252, 0.005754879951477051, 0.005859327793121338, 0.005859327793121338, 0.006033311843872071, 0.0058269758224487305, 0.005806975841522217, 0.005753439903259277, 0.005846496105194092, 0.005784319877624512, 0.005885248184204102, 0.005831200122833252, 0.0059333438873291015, 0.005836671829223633, 0.005849088191986084, 0.00586956787109375, 0.005869088172912598, 0.005818880081176757, 0.005672959804534912, 0.005990399837493897, 0.006049088001251221, 0.005909183979034424, 0.005934463977813721, 0.005891744136810303, 0.005896671772003174, 0.005884416103363037, 0.005971968173980713, 0.005910208225250244, 0.005935423851013184, 0.005898240089416504, 0.005899424076080322, 0.005906879901885986, 0.005976672172546387, 0.005990208148956299, 0.006030623912811279, 0.005968607902526855, 0.005924863815307617, 0.005924863815307617, 0.005857279777526855, 0.005947391986846923, 0.0059136319160461425, 0.005951583862304688, 0.005913760185241699, 0.006010591983795166, 0.005860991954803467, 0.005916351795196533, 0.005851840019226074, 0.005891392230987549, 0.006072319984436035, 0.006269120216369629, 0.005933887958526611, 0.00598521614074707, 0.005870336055755615, 0.00591161584854126, 0.005874656200408936, 0.0059268479347229005, 0.0059220480918884275, 0.0059992637634277346, 0.005867648124694824, 0.005951648235321045, 0.005858719825744629, 0.005904863834381104, 0.005842368125915528, 0.005976607799530029, 0.00593830394744873, 0.005964608192443848, 0.006017087936401367, 0.005977248191833496, 0.005954080104827881, 0.006000800132751465, 0.005980095863342285, 0.006113056182861328, 0.005943136215209961, 0.005943903923034668, 0.005984255790710449, 0.005894144058227539, 0.006031360149383545, 0.005899903774261475, 0.006019584178924561, 0.005965695858001709, 0.005990047931671142, 0.0056423358917236325, 0.006110239982604981, 0.006104032039642334, 0.0059649600982666015, 0.006138720035552978, 0.005887263774871826, 0.00597270393371582, 0.005980160236358643, 0.005957312107086181, 0.005962048053741455, 0.00598960018157959, 0.005944096088409424, 0.006078656196594238, 0.005918528079986572, 0.006009056091308594, 0.005934463977813721, 0.005992544174194336, 0.005979648113250732, 0.006048319816589355, 0.006013216018676758, 0.006043583869934082, 0.006000671863555908, 0.006008831977844238, 0.005947008132934571, 0.006175104141235351, 0.005965280055999756, 0.006033952236175537, 0.005947391986846923, 0.005967455863952637, 0.005896607875823975, 0.005963679790496826, 0.005927008152008056, 0.005951295852661133, 0.005908991813659668, 0.0059409279823303225, 0.005908480167388916, 0.00591161584854126, 0.005944255828857422, 0.005985631942749024, 0.0059992637634277346, 0.005999680042266846, 0.005968832015991211, 0.0059433279037475585, 0.005971936225891114, 0.005997632026672363, 0.005981279850006104, 0.005941152095794678, 0.006051167964935302, 0.006041984081268311, 0.005959904193878174, 0.005963456153869629, 0.005988671779632569, 0.005918240070343018, 0.0059683837890625, 0.005904096126556397, 0.006011136054992676, 0.005936895847320557, 0.005982463836669922, 0.005901440143585205, 0.005927807807922363, 0.005900288105010986, 0.005938943862915039, 0.0058529281616210935, 0.005595136165618897, 0.005918784141540527, 0.005994559764862061, 0.005916543960571289, 0.0059304962158203125, 0.005863935947418213, 0.00594323205947876, 0.0058566398620605465, 0.006099904060363769, 0.0059366078376770015, 0.006000383853912354, 0.0058969597816467285, 0.005983551979064941, 0.006028831958770752, 0.005952447891235352, 0.005846176147460937, 0.005937952041625977, 0.005937215805053711, 0.005951488018035888, 0.00590880012512207, 0.005963456153869629, 0.00590553617477417, 0.005929312229156494, 0.005877280235290527, 0.00596227216720581, 0.005931551933288574, 0.005990464210510254, 0.005945216178894043, 0.005936319828033447, 0.005887008190155029, 0.00591871976852417, 0.005854432106018066, 0.00587337589263916, 0.006013055801391601, 0.006044384002685547, 0.00605398416519165, 0.006051743984222412, 0.006022175788879394, 0.0060150399208068845, 0.005979072093963623, 0.005985631942749024, 0.005972608089447022, 0.006041600227355957, 0.0060063362121582035, 0.005919167995452881, 0.005893856048583984, 0.0058657598495483396, 0.0059269118309021, 0.005937151908874512, 0.005959680080413818, 0.005935200214385986, 0.005946368217468262, 0.005894303798675537, 0.006145088195800781, 0.00598803186416626, 0.006022143840789795, 0.005909471988677978, 0.0059656319618225095, 0.005925087928771972, 0.00591871976852417, 0.005924799919128418, 0.006000256061553955, 0.005931456089019776, 0.005665120124816895, 0.005917727947235107, 0.006003967761993408, 0.0059060478210449215, 0.005885600090026855, 0.005851776123046875, 0.005880703926086426, 0.0058659520149230955, 0.005945055961608887, 0.005872384071350097, 0.005947391986846923, 0.005890111923217773, 0.005944479942321777, 0.005850207805633545, 0.005928639888763428, 0.005912576198577881, 0.0059634242057800295, 0.00593065595626831, 0.005969888210296631, 0.005904704093933105, 0.005976480007171631, 0.0058849921226501465, 0.005859744071960449, 0.005908736228942871, 0.005873951911926269, 0.0059269118309021, 0.0058940801620483395, 0.006006847858428955, 0.005893152236938476, 0.005964735984802246, 0.005934720039367676, 0.005976480007171631, 0.005912576198577881, 0.006026815891265869, 0.0058986878395080565, 0.005967872142791748, 0.006030655860900879, 0.005972832202911377, 0.005902175903320313, 0.005940415859222412, 0.005870336055755615, 0.005904416084289551, 0.0058798398971557616, 0.0059269118309021, 0.005873472213745117, 0.005913983821868896, 0.0058990721702575685, 0.005970208168029785, 0.005932352066040039, 0.005986720085144043, 0.005879104137420654, 0.005890111923217773, 0.00589785623550415, 0.005912735939025879, 0.005960544109344482, 0.005908063888549805, 0.005947807788848877, 0.005918496131896973, 0.005986527919769287, 0.0059552001953125, 0.005937119960784912, 0.005875807762145996, 0.006055424213409424, 0.005798208236694336, 0.005969183921813965, 0.006015711784362793, 0.005953695774078369, 0.005924704074859619, 0.005912703990936279, 0.0059220800399780275, 0.0059585280418396, 0.005967584133148194, 0.0059060478210449215, 0.005917056083679199, 0.005930719852447509, 0.00589353609085083, 0.005962624073028565, 0.0059064321517944334, 0.005922815799713135, 0.005873856067657471, 0.006032639980316162, 0.005886176109313965, 0.005919072151184082, 0.005884191989898682, 0.006075679779052734, 0.005996672153472901, 0.005975776195526123, 0.005910431861877442, 0.005956448078155517, 0.005867231845855713, 0.005908256053924561, 0.005877855777740478, 0.006060287952423095, 0.005930399894714356, 0.005923423767089844, 0.005822463989257813, 0.005914720058441162, 0.0058607678413391115, 0.005943808078765869, 0.005903776168823242, 0.005937759876251221, 0.005881375789642334, 0.005824063777923584, 0.005862239837646484, 0.0057898879051208494, 0.005815455913543701, 0.005898975849151612, 0.005912831783294678, 0.005845056056976318, 0.005871295928955078, 0.005830304145812988, 0.005816832065582276, 0.0058215041160583495, 0.005782464027404785, 0.005898079872131348, 0.0063569917678833006, 0.006510560035705566, 0.005928991794586181, 0.0058716158866882326, 0.005912576198577881, 0.00582860803604126, 0.005908480167388916, 0.005852767944335938, 0.005818592071533203, 0.005853375911712646, 0.0058014078140258786, 0.005668992042541504, 0.006050303936004638, 0.005926752090454101, 0.005927072048187256, 0.00591871976852417, 0.005904160022735596, 0.005868800163269043, 0.005823423862457275, 0.005899839878082276, 0.005876192092895508, 0.005908415794372558, 0.005838912010192871, 0.005852320194244385, 0.005853248119354248, 0.005820352077484131, 0.005802847862243652, 0.005896192073822021, 0.005840127944946289, 0.005899007797241211, 0.005869088172912598, 0.0058427839279174805, 0.005851295948028565, 0.0058304319381713866, 0.005850111961364746, 0.005826240062713623, 0.005840896129608154, 0.005832704067230224, 0.005823935985565186, 0.005835552215576172, 0.005928671836853027, 0.0059146881103515625, 0.006047423839569092, 0.005847072124481201, 0.005898335933685303, 0.005914015769958496, 0.00594927978515625, 0.005936063766479492, 0.005930784225463867, 0.005959072113037109, 0.005882751941680908, 0.005975039958953857, 0.005892320156097412, 0.005869344234466552, 0.005858240127563477, 0.005900288105010986, 0.005974016189575195, 0.005885216236114502, 0.005796576023101806, 0.00585433578491211, 0.005865600109100342, 0.005941855907440186, 0.005856448173522949, 0.005860320091247559, 0.005809919834136963, 0.00586572790145874, 0.005854879856109619, 0.005853536128997802, 0.00586137580871582, 0.0058388481140136715, 0.005934624195098877, 0.005855584144592285, 0.005877088069915772, 0.005869472026824951, 0.00553984022140503, 0.005840095996856689, 0.005902304172515869, 0.005851136207580566, 0.005925024032592773, 0.005914527893066407, 0.005988319873809814, 0.005913375854492187, 0.0058853440284729, 0.005908351898193359, 0.005868256092071533, 0.005855231761932373, 0.005917759895324707, 0.005911488056182862, 0.005904384136199951, 0.005896192073822021, 0.006028704166412354, 0.005851744174957275, 0.005830656051635743, 0.006140031814575195, 0.005879903793334961, 0.005984032154083252, 0.005879903793334961, 0.0059780158996582036, 0.005928959846496582, 0.005879039764404297, 0.005831424236297607, 0.005894400119781494, 0.0058406081199646, 0.005881887912750244, 0.005793791770935058, 0.005829696178436279, 0.005827648162841797, 0.005829855918884277, 0.005909152030944824, 0.005857279777526855, 0.005881887912750244, 0.005854944229125977, 0.005849343776702881, 0.005787424087524414, 0.006061535835266113, 0.005899007797241211, 0.005978271961212158, 0.005795519828796386, 0.0058698558807373045, 0.005828479766845703, 0.0058525438308715825, 0.005832992076873779, 0.005830751895904541, 0.0058288640975952145, 0.005758975982666016, 0.0058504958152771, 0.005732384204864502, 0.005789984226226807, 0.005738624095916748, 0.005791935920715332, 0.005753920078277588, 0.005868735790252685, 0.005818048000335693, 0.005787775993347168, 0.006483903884887695, 0.006823359966278076, 0.007193056106567383]",tokens/s,172.3244545942689,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 316.12 MiB is free. Process 238996 has 14.43 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 13.04 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 609, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 312.12 MiB is free. Process 218046 has 14.43 GiB memory in use. Of the allocated memory 14.31 GiB is allocated by PyTorch, and 12.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 634, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 402, in __init__ super().__init__(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 102, in __init__ self.query_key_value = nn.Linear(config.hidden_size, 3 * config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 150.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 132.12 MiB is free. Process 218792 has 14.61 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 21.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,890.093568,6230.50752,0.0,5827.985408,5712.718848,s,1,7.2236201171875,7.2236201171875,0.0,7.2236201171875,7.2236201171875,7.2236201171875,7.2236201171875,[7.2236201171875],,kWh,6.782987458344299e-06,7.26916744342984e-07,3.9919476379907115e-06,1.1501851840677995e-05,,MB,1215.172608,6459.097088,0.0,6046.089216,5989.425664,s,10,4.9416988220214835,0.49416988220214836,0.0025247974744322587,0.4944638519287109,0.49621497192382813,0.49626993103027345,0.4963138983154297,"[0.4869715881347656, 0.49414266967773435, 0.49427615356445315, 0.49408099365234376, 0.49444406127929685, 0.494483642578125, 0.495351806640625, 0.4954202575683594, 0.4962027587890625, 0.49632489013671877]",tokens/s,518.0404739746542,kWh,1.4443688445835e-05,1.5924289621377066e-06,9.625404525714392e-06,2.5661521933687097e-05,tokens/kWh,9976025.609920535,MB,1245.26592,6522.011648,0.0,6109.003776,6090.851328,s,10,20.711378906250005,2.071137890625,0.004732079397948168,2.0697491455078127,2.0778911376953126,2.0796721313476563,2.0810969262695314,"[2.065811767578125, 2.06989501953125, 2.073414306640625, 2.077495361328125, 2.081453125, 2.066280029296875, 2.069603271484375, 2.0687841796875, 2.0707197265625, 2.067922119140625]",tokens/s,30.4180616293919,kWh,6.06090390825001e-05,6.6856696587655205e-06,4.011182970848678e-05,0.00010740653844975242,tokens/kWh,586556.4695530435,,s,630,20.7085668296814,0.032870740999494275,0.00036255934816945993,0.03282468795776367,0.03315680122375488,0.03333550148010254,0.03423234722137451,"[0.03409305572509766, 0.033337345123291014, 0.03273513412475586, 0.032575584411621096, 0.0324815673828125, 0.03251171112060547, 0.03250486373901367, 0.03270134353637695, 0.03264723205566406, 0.03256086349487305, 0.03253427124023438, 0.03252892684936524, 0.03260006332397461, 0.032681377410888675, 0.03260041427612305, 0.03261260986328125, 0.032570816040039065, 0.032505760192871096, 0.032502433776855466, 0.03244416046142578, 0.03244854354858399, 0.03250198364257813, 0.03252220916748047, 0.03266563034057617, 0.032645118713378905, 0.032634849548339846, 0.0325898551940918, 0.03269152069091797, 0.032973472595214846, 0.03297241592407227, 0.03296912002563476, 0.033199966430664064, 0.0329186897277832, 0.03285488128662109, 0.03278351974487305, 0.03272192001342773, 0.03274342346191406, 0.03276153564453125, 0.032805183410644534, 0.0327720947265625, 0.03282534408569336, 0.03275161743164062, 0.0327720947265625, 0.03283148956298828, 0.03280486297607422, 0.032747520446777346, 0.03270057678222656, 0.03278185653686523, 0.032755455017089846, 0.03282492828369141, 0.032806110382080075, 0.03288396835327148, 0.03284617614746094, 0.032820384979248045, 0.032824321746826174, 0.03287039947509766, 0.0328458251953125, 0.032966655731201173, 0.03368259048461914, 0.03326003265380859, 0.033105728149414065, 0.03296435165405273, 0.032975326538085936, 0.03412582397460937, 0.03336191940307617, 0.032868350982666016, 0.03264022445678711, 0.032612831115722656, 0.032460735321044924, 0.03249599838256836, 0.032573184967041015, 0.032662815093994144, 0.032543712615966794, 0.032953857421875, 0.037322593688964845, 0.03275843048095703, 0.03280486297607422, 0.03263868713378906, 0.03246518325805664, 0.0324771842956543, 0.03249296188354492, 0.032463455200195314, 0.03249903869628906, 0.03278435134887695, 0.03260691070556641, 0.03260156631469727, 0.03278448104858398, 0.03271491241455078, 0.032626976013183595, 0.03272687911987305, 0.0327125129699707, 0.032792926788330075, 0.03282329559326172, 0.03287587356567383, 0.03295305633544922, 0.0328166389465332, 0.03278041458129883, 0.03271238327026367, 0.03271744155883789, 0.032702495574951175, 0.03269833755493164, 0.032892929077148435, 0.03277139282226563, 0.0327625617980957, 0.032876544952392575, 0.03273052978515625, 0.03273379135131836, 0.03277142333984375, 0.03274614334106445, 0.03275980758666992, 0.03285321426391601, 0.03288553619384765, 0.03272428894042969, 0.03273183822631836, 0.03279209518432617, 0.032843647003173826, 0.03284028625488281, 0.032844959259033205, 0.032938846588134764, 0.03287039947509766, 0.03285971069335938, 0.03290156936645508, 0.03296041488647461, 0.03296876907348633, 0.03296851348876953, 0.03294844818115234, 0.03401043319702148, 0.03329929733276367, 0.03277996826171875, 0.03285449600219727, 0.03247907257080078, 0.0325582389831543, 0.03304470443725586, 0.03259423828125, 0.032523902893066406, 0.03271955108642578, 0.032615711212158206, 0.032504543304443356, 0.03254272079467774, 0.03253408050537109, 0.032616897583007814, 0.03257958221435547, 0.032677696228027346, 0.0325203857421875, 0.032530433654785154, 0.03264223861694336, 0.032664352416992185, 0.03261648178100586, 0.03266089630126953, 0.03250412750244141, 0.03261459350585937, 0.032650558471679685, 0.03258003234863281, 0.03272943878173828, 0.03284787368774414, 0.03291545486450195, 0.032985088348388675, 0.032947425842285154, 0.03295312118530273, 0.03284172821044922, 0.032851966857910156, 0.0328007698059082, 0.032884735107421875, 0.03279257583618164, 0.032871456146240235, 0.03288508987426758, 0.03314751815795899, 0.033053825378417966, 0.03307164764404297, 0.03304073715209961, 0.03303014373779297, 0.033023998260498046, 0.03293552017211914, 0.033055137634277344, 0.03304153442382812, 0.03304742431640625, 0.033017856597900394, 0.033103328704833984, 0.033169952392578125, 0.033240577697753904, 0.03324364852905273, 0.03312556838989258, 0.03316313552856445, 0.033397697448730466, 0.033352798461914065, 0.03328441619873047, 0.03336822509765625, 0.033503265380859376, 0.033414623260498044, 0.0343633918762207, 0.03349094390869141, 0.03312332916259766, 0.03290576171875, 0.032814720153808596, 0.03282563018798828, 0.03285878372192383, 0.0328306884765625, 0.032760063171386716, 0.03258617782592774, 0.03257513427734375, 0.032540672302246096, 0.03262617492675781, 0.032596832275390626, 0.032538623809814454, 0.03250915145874023, 0.03260496139526367, 0.032511425018310544, 0.03253510284423828, 0.032661022186279295, 0.03263235092163086, 0.03284473419189453, 0.03298099136352539, 0.032984737396240235, 0.03314031982421875, 0.03313046264648437, 0.03299203109741211, 0.03311945724487304, 0.03332995223999023, 0.033322494506835935, 0.03329894256591797, 0.03329228973388672, 0.03322880172729492, 0.03315609741210938, 0.03318272018432617, 0.03451903915405274, 0.03386163330078125, 0.03311363220214844, 0.033054878234863284, 0.032995647430419925, 0.033122303009033204, 0.03288630294799805, 0.0327512321472168, 0.03285091018676758, 0.032823169708251956, 0.03282444763183594, 0.032889728546142576, 0.032800800323486326, 0.03283145523071289, 0.032813152313232424, 0.03278969573974609, 0.032809696197509765, 0.03274137496948242, 0.032794624328613284, 0.032866336822509765, 0.032962528228759766, 0.03299327850341797, 0.032917247772216794, 0.033089790344238285, 0.03305472183227539, 0.0330599365234375, 0.033001472473144534, 0.033051231384277346, 0.035007488250732424, 0.03366502380371094, 0.03323241424560547, 0.032874622344970704, 0.032819679260253906, 0.032839168548583986, 0.032806495666503906, 0.0329911994934082, 0.03289785766601563, 0.03292364883422851, 0.03286982345581055, 0.0328699836730957, 0.03284476852416992, 0.032839038848876956, 0.03290176010131836, 0.03284787368774414, 0.032985088348388675, 0.03277414321899414, 0.03273523330688476, 0.03269836807250977, 0.032634849548339846, 0.03256937789916992, 0.03273651123046875, 0.03284454345703125, 0.03282329559326172, 0.032849918365478514, 0.032941761016845705, 0.03309804916381836, 0.033142784118652346, 0.0332861442565918, 0.03333324813842774, 0.033255424499511715, 0.033199649810791015, 0.033116863250732424, 0.03320297622680664, 0.03327651214599609, 0.033207008361816406, 0.03307487869262695, 0.033039806365966796, 0.032925697326660154, 0.032987712860107425, 0.032973953247070316, 0.03291635131835938, 0.032919551849365236, 0.03289907073974609, 0.03281305694580078, 0.032863712310791014, 0.03283564758300781, 0.0328067855834961, 0.03320073699951172, 0.03336908721923828, 0.03299430465698242, 0.032849918365478514, 0.03289907073974609, 0.03288063812255859, 0.03290521621704102, 0.03291475296020508, 0.03298569488525391, 0.03305254364013672, 0.033898529052734376, 0.034033214569091794, 0.03309017562866211, 0.03310992050170898, 0.034229278564453125, 0.03326047897338867, 0.03319619369506836, 0.03450668716430664, 0.03257398223876953, 0.03271903991699219, 0.032624351501464845, 0.03252156829833985, 0.03253958511352539, 0.03241743850708008, 0.032530784606933594, 0.0328007698059082, 0.03258678436279297, 0.032506687164306644, 0.03249142456054688, 0.0326822395324707, 0.032458110809326174, 0.0324450569152832, 0.03252838516235351, 0.032608158111572264, 0.03261859130859375, 0.03258572769165039, 0.032577438354492186, 0.032467041015625, 0.03259392166137695, 0.03269327926635742, 0.0328458251953125, 0.03266249465942383, 0.032779808044433596, 0.03296815872192383, 0.03287756729125976, 0.03295641708374023, 0.03280486297607422, 0.032778240203857424, 0.032779296875, 0.03270947265625, 0.0327470703125, 0.03268428802490234, 0.032650718688964846, 0.03265398406982422, 0.03268159866333008, 0.03267846298217773, 0.032923583984375, 0.03267942428588867, 0.03278041458129883, 0.032857982635498044, 0.0326866569519043, 0.03275161743164062, 0.03278211212158203, 0.03281638336181641, 0.032819774627685545, 0.032799137115478515, 0.032863807678222653, 0.03281740951538086, 0.03286441421508789, 0.032825374603271486, 0.03280681610107422, 0.0328966064453125, 0.032993377685546874, 0.03298281478881836, 0.03296723175048828, 0.03297081756591797, 0.03309878540039062, 0.033923072814941405, 0.03335139083862305, 0.03287273788452148, 0.03265468978881836, 0.032730976104736326, 0.03268860626220703, 0.032639328002929686, 0.03263286590576172, 0.03251180648803711, 0.03247647857666015, 0.032529247283935546, 0.032643070220947264, 0.032739551544189456, 0.032651039123535154, 0.03261644744873047, 0.032632190704345705, 0.03256383895874024, 0.03258911895751953, 0.03283014297485352, 0.032720542907714846, 0.032718624114990234, 0.03270035171508789, 0.03259065628051758, 0.03266950225830078, 0.03269404983520508, 0.032686302185058594, 0.03265769577026367, 0.032670913696289064, 0.03292009735107422, 0.03285606384277344, 0.03297494506835937, 0.03281430435180664, 0.03316831970214844, 0.032874080657958986, 0.03299369430541992, 0.032930816650390625, 0.03306367874145508, 0.03293584060668946, 0.03285776138305664, 0.03284819030761719, 0.03282662582397461, 0.03284204864501953, 0.03286454391479492, 0.03298537445068359, 0.032901119232177735, 0.03299299240112305, 0.03307548904418945, 0.03311820983886719, 0.03299123382568359, 0.03303567886352539, 0.032836063385009766, 0.03284534454345703, 0.03291120147705078, 0.032976638793945315, 0.03289948654174805, 0.03286886215209961, 0.03288076782226562, 0.03293795013427735, 0.03288502502441406, 0.033027809143066404, 0.03300556945800781, 0.03302956771850586, 0.0329733772277832, 0.0339787826538086, 0.03332499313354492, 0.03266511917114258, 0.0325552978515625, 0.03250735855102539, 0.03252339172363281, 0.03260979080200195, 0.032492000579833986, 0.03259142303466797, 0.03254288101196289, 0.0324865608215332, 0.03252716827392578, 0.03256320190429687, 0.032487422943115234, 0.03252822494506836, 0.032425537109375, 0.03248803329467773, 0.03252633666992188, 0.03254006576538086, 0.032621150970458986, 0.03274956893920898, 0.032655361175537106, 0.03256934356689453, 0.0325591049194336, 0.03250790405273438, 0.03259801483154297, 0.03263078308105469, 0.03274339294433594, 0.03423360061645508, 0.033116958618164063, 0.03303628921508789, 0.032849472045898435, 0.03298070526123047, 0.03276873779296875, 0.03277107238769531, 0.03273625564575195, 0.03269577789306641, 0.032739871978759764, 0.0327756462097168, 0.03278287887573242, 0.03277113723754883, 0.032736190795898436, 0.03279254531860352, 0.032659488677978514, 0.033327041625976564, 0.03473936080932617, 0.03286518478393555, 0.032923809051513674, 0.032847713470458985, 0.03284726333618164, 0.032979488372802734, 0.033003585815429684, 0.0328675537109375, 0.03281180953979492, 0.032882240295410155, 0.0328974723815918, 0.03294950485229492, 0.03295308685302734, 0.033048446655273435, 0.033013473510742186, 0.033099937438964847, 0.03306931304931641, 0.03295641708374023, 0.034163040161132814, 0.03318355178833008, 0.0328809928894043, 0.03272531127929688, 0.03262611389160156, 0.03285395050048828, 0.03255974578857422, 0.032595649719238284, 0.03316870498657227, 0.03257379150390625, 0.03259174346923828, 0.03259267044067383, 0.03273513412475586, 0.03266944122314453, 0.03274582290649414, 0.032645118713378905, 0.03261030578613281, 0.03260416030883789, 0.03263078308105469, 0.032628734588623046, 0.032669952392578125, 0.03273401641845703, 0.03272566223144531, 0.03273712158203125, 0.032688575744628905, 0.03256278228759766, 0.03264505767822266, 0.03276028823852539, 0.03294822311401367, 0.03286748886108398, 0.03288300704956055, 0.032938526153564456, 0.032812095642089846, 0.03285702514648438, 0.032796031951904295, 0.032860801696777346, 0.03283967971801758, 0.03303424072265625, 0.03276800155639648, 0.03279990386962891, 0.03285897445678711, 0.033067008972167966, 0.03336726379394531, 0.03283635330200195, 0.03297078323364258, 0.03292931365966797, 0.0328070068359375, 0.03276784133911133, 0.03274115371704102, 0.0327891845703125, 0.032782176971435546, 0.03292566299438476, 0.03375513458251953, 0.03295161437988281, 0.03298316955566406, 0.033067840576171875, 0.03291532897949219, 0.03307872009277344, 0.03298144149780274, 0.033048831939697265, 0.03306291198730469, 0.03308367919921875, 0.03296636962890625, 0.03392921447753906, 0.03328195190429688, 0.03274137496948242, 0.03260153579711914, 0.032636959075927736, 0.03256934356689453, 0.03257408142089844, 0.0327127685546875, 0.03253446578979492, 0.03257459259033203, 0.032561630249023435, 0.0325634880065918, 0.03254079818725586, 0.03255295944213867, 0.03252838516235351, 0.03251398468017578, 0.032745471954345705, 0.03268819046020508, 0.032612350463867186, 0.03256934356689453, 0.03255295944213867, 0.032655361175537106, 0.03262054443359375, 0.0326712646484375, 0.032660224914550784, 0.03274931335449219, 0.03269424057006836, 0.032802078247070314, 0.03302268981933594, 0.03298054504394531, 0.032919422149658206, 0.032979137420654295, 0.0329005126953125, 0.03290620803833008, 0.03294521713256836, 0.03279667282104492, 0.032795585632324216, 0.032761856079101564, 0.03283967971801758, 0.032849918365478514, 0.03276800155639648, 0.03278438568115234, 0.03278140640258789, 0.03283871841430664, 0.032898303985595706, 0.03286259078979492, 0.03283785629272461, 0.032898529052734375, 0.03300201416015625, 0.03280620956420898, 0.032936256408691404, 0.032893310546875, 0.03287859344482422, 0.03295792007446289, 0.03288323211669922, 0.03291731262207031, 0.03303200149536133, 0.03307763290405273, 0.033142047882080077, 0.03302880096435547, 0.03311209487915039, 0.033117790222167966, 0.03306665420532227]",tokens/s,30.422192186521908,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 14.12 MiB is free. Process 277187 has 14.72 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 4.70 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 216194 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,809.013248,6201.147392,0.0,5798.62528,5774.685184,s,1,7.1340859375,7.1340859375,0.0,7.1340859375,7.1340859375,7.1340859375,7.1340859375,[7.1340859375],,kWh,4.298634316667933e-06,4.6705230915268055e-07,2.186946193999395e-06,6.952632819820009e-06,,MB,1146.114048,6213.730304,0.0,5800.722432,5525.738496,s,10,2.158624084472656,0.21586240844726565,0.009251771323434807,0.2186098403930664,0.22281395111083985,0.22396814041137694,0.22489149185180665,"[0.19428317260742187, 0.2170152587890625, 0.22152642822265625, 0.2165257568359375, 0.21810272216796875, 0.21911695861816408, 0.22255746459960937, 0.22175885009765625, 0.22512232971191407, 0.20261514282226561]",tokens/s,1185.9406269088292,kWh,5.9609857500000935e-06,6.573576222943118e-07,3.9579056606530595e-06,1.0576249032947464e-05,tokens/kWh,24205178.90629284,MB,1151.65184,6215.827456,0.0,5802.819584,5623.660544,s,10,17.597815795898438,1.7597815795898437,0.0033620464557288167,1.75954541015625,1.7614556274414064,1.7651989440917968,1.7681935974121092,"[1.75586669921875, 1.757310546875, 1.7600311279296874, 1.759990966796875, 1.7582982177734374, 1.760623779296875, 1.757661376953125, 1.759371337890625, 1.759719482421875, 1.7689422607421874]",tokens/s,35.79989740242852,kWh,5.189208038916636e-05,5.7235318634949734e-06,3.440026391474734e-05,9.201587616740865e-05,tokens/kWh,684664.4581787304,,s,630,17.595448888778673,0.027929283950442356,0.00033168920885367576,0.02788568019866943,0.02808982696533203,0.028216232109069823,0.029514529418945314,"[0.029409311294555665, 0.028448511123657226, 0.02797113609313965, 0.027777696609497072, 0.02771664047241211, 0.027696096420288085, 0.027604352951049804, 0.02764044761657715, 0.027669599533081055, 0.027714464187622072, 0.027994367599487306, 0.027653312683105467, 0.027642431259155272, 0.027620351791381836, 0.02766041564941406, 0.027671360015869142, 0.027729984283447265, 0.02774630355834961, 0.027727296829223633, 0.02773049545288086, 0.027684160232543945, 0.027739904403686524, 0.027749216079711914, 0.02775657653808594, 0.027756607055664063, 0.02777292823791504, 0.02773948860168457, 0.02774880027770996, 0.027718080520629882, 0.02793891143798828, 0.02779916763305664, 0.027709503173828125, 0.027780384063720704, 0.027906784057617186, 0.02799407958984375, 0.028028671264648437, 0.02796988868713379, 0.027977407455444334, 0.028012800216674804, 0.027891359329223632, 0.027885311126708983, 0.027889312744140624, 0.027859903335571288, 0.028250112533569335, 0.027807743072509765, 0.027836416244506838, 0.027824127197265625, 0.027875328063964845, 0.027903392791748048, 0.027912607192993166, 0.027862720489501953, 0.02793462371826172, 0.027908031463623046, 0.027865760803222655, 0.02799168014526367, 0.027891199111938478, 0.027937215805053713, 0.027916736602783203, 0.027825632095336915, 0.027971872329711912, 0.027942752838134764, 0.027975168228149414, 0.028070783615112303, 0.029515392303466798, 0.028664831161499024, 0.028192768096923827, 0.028071935653686524, 0.027907487869262695, 0.027718496322631837, 0.027817920684814455, 0.027675872802734376, 0.027625951766967773, 0.027672576904296874, 0.02764793586730957, 0.02763795280456543, 0.027723039627075195, 0.027736799240112305, 0.027721376419067384, 0.027746368408203125, 0.027730207443237304, 0.027726112365722658, 0.027780832290649413, 0.027799360275268553, 0.027713151931762697, 0.02773593521118164, 0.02772243118286133, 0.02773753547668457, 0.027750335693359374, 0.027822944641113283, 0.02774608039855957, 0.027780799865722655, 0.027740480422973633, 0.027811967849731445, 0.02783014488220215, 0.028025920867919923, 0.027970495223999022, 0.027940799713134765, 0.028059520721435548, 0.028019168853759765, 0.02795084762573242, 0.028132863998413086, 0.027930463790893555, 0.027953792572021484, 0.027903072357177733, 0.027818496704101563, 0.027826591491699217, 0.02788761520385742, 0.027788415908813476, 0.027857791900634764, 0.027833824157714845, 0.027860767364501954, 0.02786751937866211, 0.02783843231201172, 0.027868959426879884, 0.027796096801757812, 0.027920703887939453, 0.02788528060913086, 0.027902015686035155, 0.027948768615722656, 0.02804512023925781, 0.028035455703735352, 0.027943967819213867, 0.027935712814331055, 0.027874879837036133, 0.027865535736083986, 0.028090431213378907, 0.029546367645263673, 0.028573535919189454, 0.028075008392333983, 0.02809663963317871, 0.02826793670654297, 0.02766281509399414, 0.027635648727416993, 0.027697216033935546, 0.027635040283203124, 0.027650047302246093, 0.027703968048095703, 0.02767033576965332, 0.02761952018737793, 0.027684640884399415, 0.02767791938781738, 0.02765488052368164, 0.027646240234375, 0.027615232467651366, 0.02781920051574707, 0.028780479431152344, 0.02777814483642578, 0.02766217613220215, 0.027727807998657227, 0.027745983123779298, 0.027660064697265625, 0.027722272872924805, 0.027792703628540038, 0.02784227180480957, 0.028414464950561522, 0.02864588737487793, 0.027976863861083983, 0.028010463714599608, 0.028085088729858397, 0.027918527603149414, 0.02798681640625, 0.028062656402587892, 0.027993600845336915, 0.027978208541870116, 0.02809187126159668, 0.028045888900756835, 0.027876575469970702, 0.027892799377441407, 0.02780067253112793, 0.02792678451538086, 0.027797887802124024, 0.02790550422668457, 0.027873823165893555, 0.02796134376525879, 0.027948448181152344, 0.028011327743530275, 0.0279017276763916, 0.02789990425109863, 0.027899103164672853, 0.027824928283691406, 0.027952287673950197, 0.02794713592529297, 0.02784739112854004, 0.027918367385864257, 0.027839839935302733, 0.027947967529296874, 0.028063423156738283, 0.027982879638671875, 0.027930816650390624, 0.02947318458557129, 0.028526847839355468, 0.028069087982177734, 0.02787817573547363, 0.02783750343322754, 0.027775936126708985, 0.027715776443481447, 0.027815807342529298, 0.027786495208740235, 0.027695039749145507, 0.027738176345825195, 0.02770604705810547, 0.027682815551757813, 0.027639808654785155, 0.027673887252807616, 0.02771334457397461, 0.02777177619934082, 0.027654495239257813, 0.027724672317504882, 0.027675487518310546, 0.027740127563476564, 0.027854848861694335, 0.027749599456787108, 0.02772867202758789, 0.027720735549926757, 0.027826784133911132, 0.027877248764038087, 0.02782259178161621, 0.027796607971191406, 0.027734560012817384, 0.02786924743652344, 0.027769119262695312, 0.02779136085510254, 0.02807606315612793, 0.02791164779663086, 0.027995840072631836, 0.027977695465087892, 0.028056415557861328, 0.028149696350097658, 0.028120864868164064, 0.028089759826660156, 0.02799807929992676, 0.027937215805053713, 0.02795155143737793, 0.027921695709228516, 0.028043615341186524, 0.028078527450561525, 0.02801055908203125, 0.027947008132934572, 0.027975616455078126, 0.027947200775146484, 0.02800444793701172, 0.0279683837890625, 0.028021888732910158, 0.02816592025756836, 0.028092575073242188, 0.028060735702514647, 0.028014976501464842, 0.02802729606628418, 0.028052959442138672, 0.028041759490966798, 0.028094112396240236, 0.028163711547851564, 0.02965340805053711, 0.028702943801879884, 0.028280416488647462, 0.02789606475830078, 0.027891647338867186, 0.027798751831054687, 0.027714847564697265, 0.0278768310546875, 0.027827615737915038, 0.027898719787597656, 0.0277258243560791, 0.027760032653808595, 0.027721952438354493, 0.02776019287109375, 0.027701183319091795, 0.02800860786437988, 0.027801599502563477, 0.027674463272094725, 0.027839359283447267, 0.027682111740112304, 0.02773062324523926, 0.027745855331420897, 0.027674144744873046, 0.027779136657714844, 0.027754655838012697, 0.027833023071289063, 0.027836063385009765, 0.027756895065307617, 0.02774835205078125, 0.027768447875976564, 0.027799295425415038, 0.027844608306884764, 0.0278022403717041, 0.027962591171264647, 0.028015104293823243, 0.027889951705932618, 0.027938528060913084, 0.02794918441772461, 0.02805571174621582, 0.0279881591796875, 0.028237632751464844, 0.027987968444824218, 0.027854848861694335, 0.027848703384399414, 0.02776038360595703, 0.027813663482666017, 0.02784332847595215, 0.02783817672729492, 0.02788547134399414, 0.027871328353881834, 0.027867040634155273, 0.027858720779418946, 0.02784492874145508, 0.027804800033569336, 0.027852895736694337, 0.027877952575683592, 0.027885791778564453, 0.028065792083740236, 0.028127328872680664, 0.027936832427978515, 0.02794438362121582, 0.027885984420776368, 0.028069471359252928, 0.029972192764282226, 0.028492063522338868, 0.028114015579223633, 0.027794303894042968, 0.027818016052246094, 0.027799583435058593, 0.02777494430541992, 0.027846656799316406, 0.027775039672851564, 0.027776575088500975, 0.027714048385620117, 0.027688224792480468, 0.027855167388916014, 0.027813919067382814, 0.028225088119506837, 0.02769923210144043, 0.02767657661437988, 0.027702016830444334, 0.027785215377807617, 0.02771567916870117, 0.02772166442871094, 0.027713504791259766, 0.02774620819091797, 0.027815135955810547, 0.027769727706909178, 0.027786815643310547, 0.027770591735839845, 0.02793449592590332, 0.02779641532897949, 0.027836416244506838, 0.0279564151763916, 0.02781692886352539, 0.027808895111083986, 0.027894304275512694, 0.02813884735107422, 0.02811939239501953, 0.027998720169067383, 0.02798591995239258, 0.028033023834228517, 0.027958944320678712, 0.028049375534057615, 0.02808460807800293, 0.028024831771850587, 0.027929920196533203, 0.027947711944580077, 0.027928415298461913, 0.02787548828125, 0.028017919540405275, 0.02796739196777344, 0.0280216007232666, 0.02801043128967285, 0.02811212730407715, 0.02792697525024414, 0.02792073631286621, 0.027932607650756835, 0.027902175903320312, 0.02799603271484375, 0.02793267250061035, 0.027997343063354493, 0.02802979278564453, 0.027971424102783204, 0.027986080169677734, 0.02818992042541504, 0.029346176147460937, 0.028390623092651366, 0.027982624053955078, 0.027815391540527343, 0.02771776008605957, 0.0277193603515625, 0.02767945671081543, 0.02796988868713379, 0.027763679504394533, 0.02790675163269043, 0.027659360885620116, 0.02769193649291992, 0.02773196792602539, 0.02772787284851074, 0.02771353530883789, 0.02775433540344238, 0.027698944091796875, 0.0277509765625, 0.02765807914733887, 0.027847904205322266, 0.02771164894104004, 0.02778995132446289, 0.027805055618286133, 0.02779404830932617, 0.027801599502563477, 0.027794784545898437, 0.027797407150268554, 0.027849472045898438, 0.027795520782470703, 0.027770784378051756, 0.027888704299926757, 0.02777766418457031, 0.027816415786743164, 0.027862911224365235, 0.02789948844909668, 0.02819113540649414, 0.027914239883422853, 0.02784604835510254, 0.028023263931274415, 0.027986047744750976, 0.02795724868774414, 0.027934207916259765, 0.02808678436279297, 0.02793471908569336, 0.02790774345397949, 0.0279268798828125, 0.02801430320739746, 0.027903903961181642, 0.027965824127197267, 0.027885568618774413, 0.0279256649017334, 0.027922431945800782, 0.02780041694641113, 0.027883712768554687, 0.027898975372314453, 0.027962080001831056, 0.028006719589233398, 0.027952512741088866, 0.0279815673828125, 0.02797203254699707, 0.02787139129638672, 0.028049600601196288, 0.028029760360717772, 0.029366207122802735, 0.028413280487060547, 0.02803727912902832, 0.02780182456970215, 0.027791999816894532, 0.027731712341308595, 0.0276810245513916, 0.028025856018066408, 0.028137792587280275, 0.02774496078491211, 0.027698591232299806, 0.027724000930786134, 0.02765884780883789, 0.027676448822021485, 0.027645055770874023, 0.027708511352539062, 0.02775014305114746, 0.02771945571899414, 0.027826431274414063, 0.02778121566772461, 0.027745664596557616, 0.027793439865112304, 0.02768118476867676, 0.027778463363647463, 0.02769990348815918, 0.027756256103515627, 0.027748640060424806, 0.02780134391784668, 0.027906272888183595, 0.027797344207763672, 0.027682559967041016, 0.027799232482910156, 0.02827510452270508, 0.028205408096313476, 0.027917600631713866, 0.02815670394897461, 0.0279549446105957, 0.02785875129699707, 0.027865728378295897, 0.027833663940429687, 0.027952896118164063, 0.02788528060913086, 0.027876447677612305, 0.028010656356811523, 0.02789913558959961, 0.02793110466003418, 0.02798521614074707, 0.028017343521118163, 0.028107776641845703, 0.028061792373657225, 0.028171295166015624, 0.02805062484741211, 0.02811564826965332, 0.027971584320068358, 0.027983583450317383, 0.027906335830688477, 0.027903263092041015, 0.02809929656982422, 0.028028928756713867, 0.02800377655029297, 0.02795782470703125, 0.027965120315551758, 0.028069471359252928, 0.029512416839599608, 0.028545183181762697, 0.02805881690979004, 0.027815967559814452, 0.02771766471862793, 0.027773183822631838, 0.0277521915435791, 0.027777151107788087, 0.027744735717773437, 0.027717632293701173, 0.027715648651123047, 0.027750560760498047, 0.02781737518310547, 0.02773206329345703, 0.02782441520690918, 0.02780486488342285, 0.027798528671264647, 0.02775196838378906, 0.027771167755126953, 0.027823455810546877, 0.02770636749267578, 0.027814592361450195, 0.027886463165283204, 0.027951135635375976, 0.02811296081542969, 0.027897247314453123, 0.027831008911132812, 0.027917984008789063, 0.027991455078125, 0.02789580726623535, 0.027821088790893556, 0.027905824661254883, 0.027830272674560546, 0.027920576095581056, 0.028024255752563478, 0.02805379295349121, 0.02793027114868164, 0.02798431968688965, 0.027991840362548828, 0.02793494415283203, 0.027914239883422853, 0.02798080062866211, 0.027887935638427733, 0.027941152572631835, 0.027873695373535155, 0.02790399932861328, 0.02805743980407715, 0.028035232543945313, 0.02800979232788086, 0.027976383209228517, 0.027931936264038087, 0.027951839447021485, 0.02786265563964844, 0.02790179252624512, 0.02795484733581543, 0.027956096649169922, 0.02792835235595703, 0.027938112258911133, 0.027903968811035157, 0.02810335922241211, 0.027932928085327147, 0.02794313621520996, 0.027989408493041993, 0.03198550415039063, 0.030364095687866213, 0.029232767105102538, 0.028629375457763673, 0.028184576034545897, 0.02796544075012207, 0.02779916763305664, 0.027762496948242187, 0.027726400375366212, 0.027764671325683592, 0.027656511306762697, 0.027810848236083985, 0.027710176467895507, 0.027905632019042968, 0.027754047393798827, 0.0278067512512207, 0.027746112823486328, 0.02780294418334961, 0.02768499183654785, 0.02773200035095215, 0.02784659194946289, 0.02781398391723633, 0.02776700782775879, 0.027807199478149414, 0.027752447128295898, 0.027980192184448242, 0.02778291130065918, 0.027809919357299803, 0.027771167755126953, 0.027814144134521483, 0.027997503280639647, 0.028000255584716797, 0.027947999954223632, 0.02787843132019043, 0.027888320922851564, 0.027819904327392578, 0.02791200065612793, 0.028040767669677735, 0.028058368682861327, 0.028053184509277344, 0.028045631408691405, 0.02798748779296875, 0.028063552856445313, 0.028038015365600587, 0.02798601531982422, 0.028038848876953126, 0.02790809631347656, 0.02795929527282715, 0.027892864227294922, 0.028138368606567384, 0.028102655410766602, 0.027983871459960938, 0.027962848663330077, 0.027944576263427733, 0.028079263687133788, 0.02983500862121582, 0.028073984146118162, 0.027992063522338868, 0.028063743591308594, 0.02791200065612793, 0.027912384033203126, 0.027899551391601562, 0.02811529541015625]",tokens/s,35.804713138166996,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 865, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 732, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 459, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 411, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 358, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 920, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 747, in self.layers = nn.ModuleList([GPTNeoXLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 635, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 608, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 182.12 MiB is free. Process 215458 has 14.56 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.08 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,801.890304,811.466752,0.0,408.94464,387.119104,s,1,7.09846923828125,7.09846923828125,0.0,7.09846923828125,7.09846923828125,7.09846923828125,7.09846923828125,[7.09846923828125],,kWh,2.6929475999925973e-06,2.9000547748840967e-07,8.92222935998388e-07,3.875176013479395e-06,,MB,1139.851264,834.535424,0.0,421.527552,354.085376,s,16,0.41372899436950694,0.025858062148094177,0.0005013926799469183,0.02567673587799072,0.026142239570617676,0.026762559890747068,0.027466202163696288,"[0.027642112731933594, 0.025729888916015624, 0.02575129508972168, 0.025629600524902343, 0.0256759033203125, 0.02581510353088379, 0.025661312103271484, 0.02561712074279785, 0.025658208847045897, 0.025677568435668947, 0.025632352828979493, 0.02559715270996094, 0.02572604751586914, 0.02578816032409668, 0.02646937561035156, 0.025657791137695313]",tokens/s,9900.200507440888,kWh,8.458901749519039e-07,9.325260591893439e-08,5.576754621965501e-07,1.4968182430673883e-06,tokens/kWh,171029449.4242576,MB,1171.849216,849.215488,0.0,436.207616,354.087936,s,16,9.634624572753905,0.6021640357971192,0.0028927834401368097,0.6015382995605469,0.605951416015625,0.6070446624755859,0.6085907684326172,"[0.60153271484375, 0.6043211059570313, 0.602462158203125, 0.6010297241210938, 0.6042994384765625, 0.608977294921875, 0.6015438842773437, 0.6005047607421875, 0.6064004516601562, 0.600072265625, 0.5991370849609375, 0.5993372192382812, 0.6055023803710937, 0.5982477416992188, 0.6017836303710937, 0.5994727172851563]",tokens/s,104.62265471667247,kWh,1.7301799559362203e-05,1.908149115999114e-06,7.122788198509382e-06,2.6332736873870703e-05,tokens/kWh,2392459.253352935,,s,1008,9.626732781410217,0.009550330140287914,0.00024770641605905314,0.0095109281539917,0.00968121271133423,0.009779137372970582,0.010242977085113522,"[0.009271136283874512, 0.009737088203430175, 0.00953654384613037, 0.009710559844970703, 0.009460927963256836, 0.009653056144714355, 0.009572352409362793, 0.009738240242004394, 0.009539584159851074, 0.009418911933898926, 0.009480031967163086, 0.009531359672546386, 0.009472031593322754, 0.009477727890014649, 0.009834912300109864, 0.009717439651489258, 0.00957267189025879, 0.009666624069213868, 0.009674688339233399, 0.009424896240234374, 0.009455327987670898, 0.00949276828765869, 0.00945462417602539, 0.009486847877502442, 0.009453568458557129, 0.009445631980895996, 0.009471967697143554, 0.00952451229095459, 0.009475040435791015, 0.009492480278015136, 0.00951260757446289, 0.009531999588012695, 0.009557663917541504, 0.00958080005645752, 0.009819040298461914, 0.0096146240234375, 0.009619104385375976, 0.009502752304077149, 0.009571776390075684, 0.009474592208862305, 0.009442303657531738, 0.009456735610961914, 0.009478143692016602, 0.00944547176361084, 0.009517120361328126, 0.00950656032562256, 0.009496607780456544, 0.009539551734924316, 0.009523232460021973, 0.009547743797302246, 0.009705471992492675, 0.009496576309204101, 0.009555968284606933, 0.009457663536071777, 0.00968835163116455, 0.009541888236999512, 0.009515487670898437, 0.009498623847961426, 0.009488384246826171, 0.009465312004089356, 0.009538368225097656, 0.009532671928405761, 0.009533887863159179, 0.009437376022338868, 0.009616607666015626, 0.0099202880859375, 0.00957868766784668, 0.009791744232177735, 0.009500415802001953, 0.009546367645263672, 0.009628928184509278, 0.009531935691833497, 0.009791647911071778, 0.00971395206451416, 0.009591839790344238, 0.009608160018920899, 0.01089846420288086, 0.009731167793273926, 0.009553183555603027, 0.009484671592712402, 0.009766912460327149, 0.009498271942138672, 0.009467424392700196, 0.009522015571594239, 0.009433055877685547, 0.009510880470275878, 0.009484319686889649, 0.009736384391784668, 0.009615167617797852, 0.009504768371582031, 0.00959609603881836, 0.009461631774902344, 0.009470720291137696, 0.009535679817199707, 0.009562015533447266, 0.00947212791442871, 0.009510080337524414, 0.00941055965423584, 0.00950556755065918, 0.009455967903137207, 0.009452256202697754, 0.009530303955078125, 0.009448896408081055, 0.009624128341674804, 0.009981951713562011, 0.009510144233703614, 0.009535776138305665, 0.009506784439086913, 0.009508799552917481, 0.009916064262390136, 0.009513888359069824, 0.009541631698608399, 0.00950870418548584, 0.009478303909301758, 0.00955942440032959, 0.00962934398651123, 0.009547840118408204, 0.009496928215026856, 0.009472000122070312, 0.009527456283569337, 0.009607680320739746, 0.009465760231018066, 0.00953932762145996, 0.009521087646484376, 0.009505087852478028, 0.009469471931457519, 0.009347743988037109, 0.009630016326904297, 0.009805824279785156, 0.009574432373046875, 0.009607135772705078, 0.00951904010772705, 0.009515071868896484, 0.00955344009399414, 0.009517536163330079, 0.009494527816772461, 0.009810144424438477, 0.009629376411437988, 0.009681183815002442, 0.009570112228393554, 0.009584639549255371, 0.009543680191040039, 0.009545503616333008, 0.009557503700256348, 0.00949135971069336, 0.009490367889404296, 0.009590399742126466, 0.009472448348999024, 0.00952899169921875, 0.00955408000946045, 0.009467103958129882, 0.00961411190032959, 0.009606240272521972, 0.009703455924987793, 0.00967686367034912, 0.009638079643249512, 0.00971168041229248, 0.009552351951599121, 0.009564064025878906, 0.00951315212249756, 0.009515007972717286, 0.00954736042022705, 0.009509280204772949, 0.009484255790710449, 0.009535519599914551, 0.009631296157836915, 0.009585087776184081, 0.009639936447143555, 0.009538687705993653, 0.009549759864807128, 0.009484959602355956, 0.009512672424316406, 0.009501248359680177, 0.009549471855163574, 0.009595168113708497, 0.009508864402770996, 0.009537599563598632, 0.009557951927185058, 0.00947593593597412, 0.0095250244140625, 0.009496416091918945, 0.009510944366455078, 0.009549759864807128, 0.009464127540588379, 0.00956982421875, 0.009478943824768066, 0.009469280242919922, 0.009515680313110352, 0.009477824211120605, 0.009186367988586425, 0.009493439674377442, 0.0095131196975708, 0.00948412799835205, 0.009536864280700684, 0.009482912063598633, 0.009453568458557129, 0.009530752182006837, 0.00949516773223877, 0.00946934413909912, 0.009556672096252442, 0.009498175621032714, 0.009465248107910156, 0.009532223701477051, 0.009488351821899414, 0.009590720176696777, 0.009619680404663086, 0.009479680061340333, 0.009736031532287597, 0.00956278419494629, 0.00951910400390625, 0.009492480278015136, 0.00943887996673584, 0.009523200035095216, 0.009468544006347656, 0.009440640449523925, 0.00947824001312256, 0.009445631980895996, 0.009467904090881347, 0.009570240020751953, 0.009633855819702148, 0.009768480300903321, 0.009504799842834472, 0.009472448348999024, 0.00959488010406494, 0.009526432037353516, 0.009741151809692383, 0.009528639793395997, 0.00952956771850586, 0.009567968368530274, 0.009488703727722168, 0.009490880012512207, 0.009451423645019531, 0.009635711669921875, 0.00957472038269043, 0.009502304077148438, 0.009558208465576172, 0.00955561637878418, 0.009480799674987793, 0.00955174446105957, 0.009473216056823731, 0.00948310375213623, 0.009562239646911622, 0.009699487686157226, 0.009564160346984863, 0.009519935607910156, 0.009492992401123047, 0.00955020809173584, 0.009553919792175293, 0.009633088111877441, 0.009684831619262695, 0.009612128257751465, 0.009537535667419434, 0.0093635196685791, 0.010248288154602051, 0.009604288101196288, 0.009611167907714845, 0.009485088348388671, 0.009588895797729492, 0.009487711906433105, 0.009582655906677246, 0.009513407707214356, 0.00945462417602539, 0.009509280204772949, 0.009486175537109376, 0.009503168106079101, 0.00960745620727539, 0.009637887954711915, 0.009854016304016113, 0.009636639595031738, 0.009541567802429199, 0.009591008186340332, 0.009496576309204101, 0.00953987216949463, 0.009504544258117675, 0.009484543800354003, 0.009535167694091797, 0.009510911941528321, 0.009461759567260742, 0.00951296043395996, 0.009465375900268554, 0.009509407997131347, 0.009504704475402833, 0.009563712120056153, 0.009490880012512207, 0.009441120147705078, 0.009434592247009278, 0.00950096035003662, 0.009476479530334472, 0.009752608299255372, 0.009518272399902343, 0.009478143692016602, 0.009519935607910156, 0.009476096153259277, 0.009454848289489746, 0.0094934720993042, 0.00949020767211914, 0.009848447799682617, 0.009547871589660644, 0.00949071979522705, 0.009543359756469727, 0.009485919952392579, 0.009571200370788574, 0.009502559661865235, 0.009500927925109863, 0.009648127555847168, 0.009514240264892579, 0.010086912155151367, 0.009523520469665528, 0.009500127792358398, 0.009760992050170898, 0.00978054428100586, 0.009591744422912598, 0.009541215896606446, 0.010777055740356445, 0.009664447784423828, 0.009500672340393066, 0.009621376037597657, 0.009735487937927246, 0.00963257598876953, 0.009873408317565918, 0.009680895805358887, 0.009769184112548828, 0.00975222396850586, 0.00969321632385254, 0.00957759952545166, 0.009583680152893067, 0.009764960289001465, 0.0098220157623291, 0.009567296028137207, 0.009542079925537109, 0.009531071662902832, 0.009562848091125488, 0.009495776176452636, 0.009575167655944824, 0.009570431709289551, 0.00958191967010498, 0.009607839584350585, 0.009652223587036133, 0.009582592010498046, 0.009539584159851074, 0.009525247573852539, 0.009563872337341309, 0.009562399864196778, 0.009562111854553223, 0.009597151756286622, 0.009625472068786622, 0.009737152099609375, 0.009743328094482423, 0.009699647903442382, 0.00969491195678711, 0.009671775817871094, 0.009593376159667969, 0.0096812801361084, 0.0096494722366333, 0.00961196804046631, 0.009564160346984863, 0.009477791786193847, 0.009612992286682128, 0.009579104423522949, 0.009631360054016113, 0.009697312355041504, 0.00966697597503662, 0.009613375663757324, 0.009601152420043946, 0.009590399742126466, 0.009674943923950196, 0.009810943603515625, 0.009743264198303223, 0.009848928451538086, 0.009790847778320313, 0.009747167587280274, 0.00961961555480957, 0.00958233642578125, 0.009571743965148925, 0.009613920211791992, 0.009835552215576172, 0.010439711570739746, 0.009817824363708497, 0.009317567825317383, 0.009834272384643555, 0.009575807571411132, 0.00971225643157959, 0.009579808235168456, 0.009780223846435548, 0.009524127960205079, 0.009696063995361328, 0.009551615715026855, 0.009498623847961426, 0.009545984268188477, 0.009516736030578614, 0.009504511833190918, 0.00949276828765869, 0.009486623764038086, 0.009712672233581543, 0.009777119636535644, 0.009589823722839355, 0.009549280166625977, 0.009685791969299317, 0.00950825595855713, 0.009447711944580078, 0.009455007553100586, 0.009464384078979492, 0.009433216094970703, 0.00946985626220703, 0.009445311546325684, 0.009436639785766601, 0.009488991737365723, 0.009475104331970215, 0.009528287887573242, 0.00965392017364502, 0.0095065279006958, 0.009562751770019532, 0.009477984428405762, 0.00951084804534912, 0.00951523208618164, 0.009435168266296387, 0.00960099220275879, 0.009445695877075196, 0.009465632438659667, 0.009477120399475097, 0.009460512161254883, 0.009407679557800292, 0.009501376152038575, 0.009410847663879394, 0.009471967697143554, 0.009486335754394531, 0.009442975997924805, 0.009482336044311524, 0.009470175743103028, 0.00948147201538086, 0.009552127838134765, 0.009466400146484375, 0.00942512035369873, 0.009672672271728515, 0.00946076774597168, 0.00946886444091797, 0.009532480239868165, 0.009433888435363769, 0.010567232131958008, 0.00950543975830078, 0.00962559986114502, 0.009530816078186035, 0.00955459213256836, 0.009654175758361817, 0.009566207885742188, 0.009668224334716797, 0.009516703605651855, 0.009916671752929688, 0.009892319679260254, 0.00973964786529541, 0.009663104057312012, 0.009568256378173828, 0.009500608444213868, 0.009543487548828125, 0.009502976417541505, 0.00944547176361084, 0.009637791633605957, 0.009541024208068847, 0.00952556800842285, 0.009482527732849121, 0.009451519966125489, 0.00950707244873047, 0.009512767791748047, 0.009496128082275391, 0.009522560119628907, 0.009417728424072265, 0.009443327903747559, 0.009471199989318847, 0.009446175575256347, 0.00948851203918457, 0.009498496055603028, 0.009571359634399415, 0.00950985622406006, 0.009477120399475097, 0.009495039939880372, 0.009524831771850586, 0.009478431701660156, 0.009507136344909668, 0.009423104286193848, 0.009431136131286622, 0.009545920372009277, 0.009483488082885742, 0.00949839973449707, 0.009493280410766601, 0.009461759567260742, 0.009574399948120118, 0.009539999961853028, 0.009500479698181153, 0.009683072090148926, 0.009499936103820801, 0.009475616455078125, 0.009504704475402833, 0.009403648376464844, 0.009481439590454102, 0.009456064224243164, 0.009445376396179199, 0.00953376007080078, 0.009426624298095702, 0.009524928092956544, 0.009489824295043945, 0.009437408447265625, 0.009565279960632325, 0.009449055671691894, 0.00942899227142334, 0.009244671821594238, 0.009499839782714844, 0.009495360374450684, 0.009447039604187012, 0.009787551879882813, 0.009437408447265625, 0.009497983932495117, 0.009463775634765624, 0.009433759689331054, 0.00944876766204834, 0.009434816360473632, 0.009421343803405762, 0.009606816291809083, 0.009562944412231445, 0.009443327903747559, 0.009706815719604493, 0.009461471557617188, 0.009448448181152343, 0.009411711692810059, 0.009414624214172364, 0.009449983596801758, 0.00938809585571289, 0.009392448425292969, 0.009560064315795898, 0.009442943572998047, 0.00946934413909912, 0.009501952171325684, 0.009434720039367676, 0.009430496215820313, 0.009433279991149903, 0.009423040390014648, 0.009451456069946289, 0.009427295684814454, 0.009457663536071777, 0.009539615631103516, 0.00942627239227295, 0.009425536155700683, 0.009484288215637206, 0.00944752025604248, 0.009373600006103516, 0.009449760437011718, 0.009416128158569336, 0.009453856468200683, 0.010434304237365723, 0.010269184112548829, 0.015894368171691896, 0.010077823638916015, 0.009580575942993164, 0.009553919792175293, 0.00952345561981201, 0.009619199752807617, 0.009512288093566895, 0.009499199867248535, 0.009441632270812989, 0.009606911659240722, 0.0095250244140625, 0.009595359802246094, 0.009447392463684081, 0.009615391731262206, 0.009435104370117188, 0.009486080169677735, 0.009419039726257324, 0.009436479568481445, 0.009173024177551269, 0.009629599571228028, 0.009477343559265137, 0.009492992401123047, 0.009509504318237305, 0.009438976287841797, 0.009488384246826171, 0.009486528396606445, 0.009459551811218261, 0.00953705596923828, 0.009576895713806152, 0.009494527816772461, 0.009537535667419434, 0.009517151832580567, 0.00949407958984375, 0.009529184341430665, 0.009478272438049317, 0.009523072242736817, 0.009460000038146973, 0.009466079711914063, 0.009574399948120118, 0.009451519966125489, 0.00955504035949707, 0.009546943664550781, 0.00943894386291504, 0.0094835844039917, 0.009454272270202636, 0.009470303535461426, 0.009471776008605957, 0.009756544113159179, 0.0094835844039917, 0.009490336418151855, 0.009468544006347656, 0.009584799766540527, 0.00942182445526123, 0.009542048454284668, 0.009459872245788575, 0.009429280281066894, 0.009443488121032715, 0.00962716770172119, 0.009495008468627929, 0.009542752265930175, 0.009487263679504395, 0.009651424407958985, 0.009961728096008302, 0.009545472145080567, 0.009659423828125, 0.009557760238647461, 0.009532416343688965, 0.009556991577148437, 0.009498559951782226, 0.009529408454895019, 0.009500672340393066, 0.009477888107299805, 0.009635616302490234, 0.009492064476013183, 0.009423744201660157, 0.009650176048278808, 0.009457663536071777, 0.009502719879150391, 0.009455615997314454, 0.009532768249511719, 0.00951363182067871, 0.00923033618927002, 0.009644031524658203, 0.009433343887329101, 0.00958028793334961, 0.009482239723205567, 0.009553919792175293, 0.00948419189453125, 0.009436287879943848, 0.009486592292785645, 0.009531776428222656, 0.00940681552886963, 0.009496576309204101, 0.009410240173339844, 0.009505279541015625, 0.009481727600097656, 0.009595199584960938, 0.009858304023742676, 0.009535296440124512, 0.009434207916259766, 0.009501983642578125, 0.009505215644836426, 0.009471360206604005, 0.009509696006774902, 0.009476032257080078, 0.009533120155334472, 0.009422975540161133, 0.009484479904174805, 0.009527423858642577, 0.009486207962036133, 0.009512224197387696, 0.009525919914245605, 0.00951916790008545, 0.009512991905212402, 0.009442655563354493, 0.009484928131103515, 0.009504768371582031, 0.009490431785583496, 0.009511008262634277, 0.00943068790435791, 0.009439807891845703, 0.009469632148742677, 0.009430944442749023, 0.009426207542419433, 0.009442272186279297, 0.00940220832824707, 0.009406463623046875, 0.00962390422821045, 0.00943887996673584, 0.009517056465148926, 0.009473888397216797, 0.009421216011047364, 0.009502464294433594, 0.009518688201904296, 0.009486751556396484, 0.009717151641845704, 0.00943734359741211, 0.009480704307556152, 0.00946886444091797, 0.009782272338867188, 0.009574463844299316, 0.009699423789978028, 0.009534496307373047, 0.009525823593139648, 0.009199872016906738, 0.00955180835723877, 0.009594240188598634, 0.009580991744995116, 0.009535679817199707, 0.00958240032196045, 0.009471615791320801, 0.009529888153076172, 0.009502016067504882, 0.009451583862304687, 0.009513407707214356, 0.009448512077331543, 0.009540351867675781, 0.009519328117370605, 0.009458815574645996, 0.009552607536315918, 0.009596768379211426, 0.009447744369506836, 0.009566207885742188, 0.009542880058288574, 0.009533535957336426, 0.009542240142822265, 0.00945798397064209, 0.009504416465759277, 0.009611071586608887, 0.009466176033020019, 0.009510656356811524, 0.009486175537109376, 0.009478528022766114, 0.009484543800354003, 0.009462656021118164, 0.00948521614074707, 0.009481823921203614, 0.00958016014099121, 0.009574912071228027, 0.009609184265136718, 0.00952511978149414, 0.009494720458984375, 0.009477727890014649, 0.009522175788879395, 0.009488320350646973, 0.009481951713562012, 0.009503775596618653, 0.009482463836669923, 0.00948300838470459, 0.009601247787475587, 0.009518879890441895, 0.00951910400390625, 0.009463423728942872, 0.009481727600097656, 0.009535519599914551, 0.009496735572814942, 0.009447775840759277, 0.009526847839355469, 0.00947052764892578, 0.009552255630493164, 0.009471839904785156, 0.009488384246826171, 0.009467040061950684, 0.009501791954040528, 0.0094550724029541, 0.009511199951171875, 0.00943513584136963, 0.009265727996826172, 0.00951910400390625, 0.009605024337768555, 0.009561344146728516, 0.009683808326721192, 0.009484095573425293, 0.00951315212249756, 0.009592831611633301, 0.009486335754394531, 0.009576448440551758, 0.009502047538757325, 0.009456607818603515, 0.009547327995300293, 0.009506943702697754, 0.009508416175842286, 0.009560511589050294, 0.009552160263061523, 0.009568127632141113, 0.00947545623779297, 0.009461888313293457, 0.009693535804748535, 0.010358783721923828, 0.009789440155029297, 0.010153183937072754, 0.009540384292602538, 0.009534527778625488, 0.009583295822143555, 0.009501248359680177, 0.009486016273498534, 0.009481823921203614, 0.009457823753356933, 0.009492704391479492, 0.009512063980102538, 0.009517984390258789, 0.009455007553100586, 0.009507136344909668, 0.009494815826416016, 0.009523200035095216, 0.009485631942749024, 0.009494303703308105, 0.00968620777130127, 0.00952905559539795, 0.009916640281677247, 0.00949839973449707, 0.009522815704345704, 0.009528927803039551, 0.009468992233276368, 0.009515071868896484, 0.009493568420410155, 0.00955452823638916, 0.009537535667419434, 0.009450783729553222, 0.009511648178100586, 0.009510080337524414, 0.009443167686462403, 0.010524703979492187, 0.010535872459411622, 0.010131456375122071, 0.00987936019897461, 0.009674240112304687, 0.00953382396697998, 0.009566720008850099, 0.009516863822937012, 0.009199040412902832, 0.009548288345336914, 0.009598976135253906, 0.00953171157836914, 0.009443072319030762, 0.009508831977844238, 0.009402400016784667, 0.009500672340393066, 0.009491616249084473, 0.009440383911132812, 0.009570015907287597, 0.009496735572814942, 0.009426464080810546, 0.00950492763519287, 0.009461248397827148, 0.00944377613067627, 0.009580767631530762, 0.009451519966125489, 0.009453568458557129, 0.009460800170898438, 0.009632703781127929, 0.009494527816772461, 0.00946720027923584, 0.009484992027282715, 0.009447423934936524, 0.009468992233276368, 0.0095447998046875, 0.009459551811218261, 0.009453568458557129, 0.009443327903747559, 0.009426783561706543, 0.009444831848144531, 0.00956486415863037, 0.009489760398864747, 0.009471872329711913, 0.009450240135192871, 0.009451871871948242, 0.009526399612426758, 0.009415231704711913, 0.009437184333801269, 0.009455615997314454, 0.009488160133361817, 0.00945695972442627, 0.009534432411193847, 0.009476032257080078, 0.009514399528503418, 0.009515904426574706, 0.009440159797668457, 0.009530176162719726, 0.009467904090881347, 0.00941260814666748, 0.009523072242736817, 0.00940822410583496, 0.009467424392700196, 0.00944979190826416, 0.009574080467224121, 0.009491328239440919, 0.009404416084289552, 0.00939136028289795, 0.009460384368896485, 0.00942089557647705, 0.009951359748840332, 0.009747679710388184, 0.009604800224304199, 0.009613632202148437, 0.009564448356628417, 0.009612640380859376, 0.009572735786437988, 0.010172415733337402, 0.009784704208374023, 0.009592960357666016, 0.009554752349853516, 0.009494208335876464, 0.009472000122070312, 0.009541760444641114, 0.009451199531555175, 0.009604928016662597, 0.009559552192687988, 0.009630623817443848, 0.009537504196166993, 0.009467904090881347, 0.009498623847961426, 0.009525247573852539, 0.009470303535461426, 0.009510560035705566, 0.009536831855773927, 0.009482943534851074, 0.009540927886962891, 0.009558912277221679, 0.009523008346557617, 0.009461440086364747, 0.009654623985290527, 0.009567359924316406, 0.009702207565307618, 0.00956214427947998, 0.009570303916931153, 0.009523200035095216, 0.009515007972717286, 0.009576448440551758, 0.009483743667602539, 0.009603424072265625, 0.00957049560546875, 0.009862719535827636, 0.00947209644317627, 0.009439552307128906, 0.009490240097045898, 0.009476544380187989, 0.009485183715820312, 0.009548864364624023, 0.009571776390075684, 0.00949276828765869, 0.009547904014587402, 0.009548959732055664, 0.009475968360900878, 0.009464768409729004, 0.009459744453430176, 0.009510016441345215, 0.009435775756835938, 0.009476351737976074, 0.009441280364990234, 0.009435327529907226, 0.009514816284179687, 0.009545184135437012, 0.009445343971252441, 0.009468640327453613, 0.00942636775970459, 0.009151968002319336, 0.009548576354980468, 0.009551615715026855, 0.00960540771484375, 0.009467904090881347, 0.009598655700683594, 0.009422368049621582, 0.009536160469055175, 0.00960848045349121, 0.009471615791320801, 0.009533439636230469, 0.009477055549621582, 0.00960313606262207, 0.009659744262695313, 0.009599583625793457, 0.010028800010681153, 0.009533568382263184, 0.009515328407287598, 0.009456831932067871, 0.009427359580993652, 0.009717280387878419, 0.009472255706787109, 0.009548224449157715, 0.009789440155029297, 0.00952560043334961, 0.009532159805297851, 0.00952131175994873, 0.009469023704528809, 0.009494144439697266, 0.00946720027923584, 0.009499360084533691, 0.00940544033050537, 0.009468799591064453, 0.009492575645446777, 0.00948799991607666, 0.00946134376525879, 0.00948639965057373, 0.009468671798706054, 0.009497599601745605, 0.009507519721984863, 0.0095665283203125, 0.009491744041442872, 0.009419360160827638, 0.009435263633728028, 0.009503968238830566, 0.009435711860656738, 0.009420736312866211, 0.009485983848571777, 0.009433440208435059, 0.009478591918945312, 0.009458815574645996, 0.009428704261779785, 0.0095098876953125, 0.009433088302612304, 0.00946326446533203, 0.009694047927856445, 0.009419872283935546, 0.00943984031677246, 0.009439264297485352, 0.009539551734924316, 0.009489919662475586, 0.009412096023559571, 0.009466367721557617]",tokens/s,104.70842214988109,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 989, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 880, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 647, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 369, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,804.470784,1155.39968,0.0,752.877568,713.93792,s,1,7.1009853515625,7.1009853515625,0.0,7.1009853515625,7.1009853515625,7.1009853515625,7.1009853515625,[7.1009853515625],,kWh,2.726066174985438e-06,2.9354288524902467e-07,8.819451500080566e-07,3.90155421024252e-06,,MB,1166.983168,1197.34272,0.0,784.334848,638.879744,s,17,0.316709602355957,0.018629976609173943,0.0004021875873324249,0.018524288177490234,0.018681337356567383,0.01901122512817383,0.0199721272277832,"[0.020212352752685545, 0.018417407989501953, 0.018407583236694336, 0.018498464584350584, 0.018490400314331055, 0.01866160011291504, 0.018524288177490234, 0.0187109432220459, 0.01851033592224121, 0.018500736236572266, 0.018552928924560546, 0.018580255508422853, 0.01855039978027344, 0.01853267288208008, 0.01849295997619629, 0.018494304656982423, 0.01857196807861328]",tokens/s,13741.294762224132,kWh,6.558377950858933e-07,7.229380220652484e-08,4.341937952870089e-07,1.162325392579427e-06,tokens/kWh,220248135.0182723,MB,1179.357184,1222.508544,0.0,809.500672,638.882304,s,17,9.767627563476562,0.5745663272633271,0.0020389105053403604,0.5741109619140625,0.5775568237304687,0.5783615234375,0.5790223046874999,"[0.5735211791992187, 0.5751273803710938, 0.5771580200195312, 0.5735282592773437, 0.5741109619140625, 0.574290283203125, 0.572973876953125, 0.5791875, 0.578155029296875, 0.5732958984375, 0.5745399780273438, 0.5735796508789063, 0.5745567626953125, 0.571732177734375, 0.5715211181640625, 0.573799560546875, 0.5765499267578125]",tokens/s,109.64791532436381,kWh,1.6439590653754794e-05,1.8130352703744374e-06,7.319809321490908e-06,2.5572435245620142e-05,tokens/kWh,2463590.166321378,,s,1071,9.762833574295032,0.009115624252376326,0.00015834338447529624,0.009078944206237793,0.009219776153564454,0.009306064128875732,0.009774198341369627,"[0.009193216323852539, 0.009070591926574707, 0.009045087814331054, 0.009017663955688477, 0.009015904426574708, 0.00908902359008789, 0.009014975547790528, 0.00903609561920166, 0.009057727813720703, 0.009136704444885254, 0.009047904014587403, 0.009339039802551269, 0.009146368026733399, 0.009111712455749512, 0.009131839752197266, 0.009090720176696777, 0.009107423782348632, 0.009043968200683594, 0.009040543556213379, 0.009049856185913085, 0.00917404842376709, 0.009079104423522949, 0.0090546236038208, 0.009046272277832031, 0.009056256294250489, 0.009046015739440917, 0.009055968284606933, 0.009193535804748534, 0.009046239852905274, 0.009064224243164063, 0.00911996841430664, 0.009125887870788574, 0.009182463645935059, 0.009040639877319337, 0.00912384033203125, 0.009045920372009277, 0.00899897575378418, 0.009064127922058105, 0.009109855651855469, 0.009054176330566406, 0.009205823898315429, 0.009117631912231446, 0.009080287933349609, 0.00909984016418457, 0.009116928100585937, 0.009132736206054688, 0.009154560089111329, 0.009050111770629882, 0.009105055809020997, 0.00911184024810791, 0.009097344398498535, 0.009053983688354493, 0.009476320266723633, 0.009099264144897461, 0.009098272323608398, 0.00922755241394043, 0.009076607704162598, 0.009041728019714356, 0.009162752151489258, 0.00914851188659668, 0.009035679817199707, 0.009033727645874023, 0.009014880180358887, 0.00901039981842041, 0.009141023635864259, 0.009049856185913085, 0.009017567634582519, 0.009021471977233887, 0.009050368309020996, 0.009008895874023438, 0.009002976417541504, 0.009030847549438477, 0.009065312385559082, 0.009016544342041015, 0.009687071800231933, 0.0091342716217041, 0.009028160095214843, 0.00900476837158203, 0.009063872337341308, 0.009292320251464843, 0.009083200454711914, 0.009097344398498535, 0.009074560165405273, 0.008990880012512206, 0.010552831649780273, 0.00917948818206787, 0.00908627223968506, 0.009019871711730958, 0.009060671806335449, 0.009059616088867187, 0.009031328201293945, 0.00898534393310547, 0.009074912071228028, 0.009062399864196777, 0.009134079933166504, 0.009354944229125977, 0.009306431770324707, 0.009179136276245118, 0.009074687957763672, 0.009075743675231934, 0.009076895713806153, 0.009072511672973634, 0.009067456245422363, 0.009115648269653321, 0.009041760444641113, 0.009035231590270996, 0.009052864074707031, 0.009124128341674805, 0.009131744384765625, 0.009064448356628419, 0.009172863960266113, 0.009074463844299316, 0.009084735870361328, 0.009096735954284669, 0.00923750400543213, 0.009209055900573731, 0.009040672302246094, 0.009095168113708496, 0.009166848182678223, 0.00903542423248291, 0.00903775978088379, 0.009064255714416504, 0.009312543869018555, 0.009079104423522949, 0.009156607627868652, 0.009105536460876465, 0.008975199699401856, 0.009060352325439454, 0.009176608085632323, 0.009038304328918457, 0.009094176292419433, 0.009042911529541016, 0.009068511962890624, 0.009037152290344238, 0.009062111854553223, 0.00905724811553955, 0.008982239723205566, 0.009038111686706544, 0.009164671897888183, 0.00909068775177002, 0.009621760368347169, 0.009109760284423828, 0.009074687957763672, 0.009059488296508789, 0.009059167861938477, 0.009050111770629882, 0.009022815704345703, 0.009021344184875489, 0.009042688369750976, 0.009147583961486816, 0.009150431632995606, 0.009143136024475097, 0.009328448295593261, 0.009050175666809082, 0.010664064407348633, 0.011040608406066894, 0.009268575668334961, 0.009130368232727051, 0.009080703735351563, 0.009110079765319825, 0.009057439804077149, 0.009156736373901367, 0.009153344154357911, 0.009197471618652343, 0.009097151756286621, 0.009109567642211915, 0.009107040405273438, 0.009040287971496582, 0.009043968200683594, 0.009074591636657714, 0.0090665922164917, 0.009155679702758789, 0.009099360466003417, 0.009173824310302734, 0.009168895721435547, 0.009135744094848634, 0.009091456413269043, 0.009143744468688965, 0.009072928428649903, 0.009007328033447266, 0.009060416221618652, 0.00903382396697998, 0.009061408042907716, 0.009054080009460449, 0.00922486400604248, 0.009058655738830567, 0.009067680358886719, 0.00901411247253418, 0.009086976051330567, 0.008980192184448243, 0.009065919876098633, 0.00904201602935791, 0.009057024002075195, 0.009084032058715821, 0.009008000373840332, 0.008986623764038085, 0.008989727973937989, 0.009110400199890137, 0.008994560241699218, 0.009127903938293457, 0.009081215858459472, 0.009051936149597168, 0.009062623977661133, 0.009072640419006347, 0.009129983901977539, 0.009051775932312012, 0.009025888442993164, 0.009486175537109376, 0.009148351669311524, 0.009117504119873047, 0.009194239616394043, 0.00910099220275879, 0.009046015739440917, 0.00908083152770996, 0.009053567886352538, 0.00928217601776123, 0.009035776138305664, 0.00909235191345215, 0.009056384086608887, 0.009073280334472656, 0.009060352325439454, 0.009025535583496093, 0.009084927558898925, 0.009058272361755371, 0.009359392166137695, 0.009136128425598144, 0.009119263648986816, 0.009060832023620605, 0.009250047683715821, 0.009172927856445313, 0.009245375633239745, 0.009097472190856933, 0.009070591926574707, 0.009083840370178222, 0.009098176002502442, 0.00926262378692627, 0.0092227201461792, 0.009063712120056152, 0.009076383590698242, 0.00906544017791748, 0.009072159767150879, 0.009068639755249023, 0.008995200157165527, 0.009114912033081055, 0.009036735534667968, 0.00904316806793213, 0.009171072006225587, 0.009156800270080567, 0.009045503616333007, 0.009120512008666991, 0.009051712036132812, 0.009082688331604003, 0.008992671966552734, 0.00902774429321289, 0.009185279846191406, 0.009179136276245118, 0.009062496185302735, 0.009023391723632812, 0.009009152412414552, 0.009024800300598145, 0.00903878402709961, 0.00905577564239502, 0.009033984184265136, 0.009068544387817384, 0.009034751892089844, 0.009032575607299805, 0.009293951988220216, 0.009275391578674316, 0.009256959915161133, 0.009244928359985352, 0.009721599578857421, 0.009196576118469238, 0.009249376296997071, 0.009157024383544921, 0.009074655532836914, 0.009048064231872559, 0.009072383880615235, 0.009122048377990723, 0.009279104232788086, 0.009075072288513184, 0.009058303833007812, 0.009038880348205566, 0.009010144233703613, 0.00901734447479248, 0.009104448318481445, 0.009290687561035156, 0.009293824195861817, 0.00912166404724121, 0.009275520324707031, 0.009183232307434081, 0.009020928382873536, 0.009118207931518555, 0.009025535583496093, 0.009029631614685058, 0.00909721565246582, 0.009084927558898925, 0.00903987216949463, 0.009031807899475098, 0.009076191902160644, 0.009091487884521484, 0.009059552192687989, 0.009005855560302735, 0.009190496444702148, 0.009028639793395996, 0.009041791915893554, 0.0090316801071167, 0.009049920082092286, 0.009023648262023926, 0.009061920166015624, 0.009135871887207032, 0.00917193603515625, 0.009047840118408203, 0.009076736450195312, 0.00907804775238037, 0.008999648094177247, 0.008995327949523926, 0.009209856033325196, 0.009022687911987305, 0.009056320190429687, 0.009029760360717773, 0.009046496391296387, 0.009056384086608887, 0.00903987216949463, 0.009084927558898925, 0.009070591926574707, 0.009037823677062988, 0.00904582405090332, 0.009015007972717285, 0.008995295524597168, 0.009306079864501953, 0.009248736381530762, 0.009111616134643554, 0.009052160263061524, 0.009068544387817384, 0.009376992225646972, 0.009081631660461427, 0.00911087989807129, 0.00904252815246582, 0.009019455909729003, 0.009039615631103516, 0.009068096160888672, 0.009062527656555176, 0.009165087699890136, 0.00911302375793457, 0.009038687705993653, 0.009037823677062988, 0.00902291202545166, 0.009099424362182617, 0.009073056221008301, 0.009045472145080566, 0.009038368225097656, 0.00909235191345215, 0.009075008392333984, 0.009294272422790528, 0.009361344337463379, 0.009594623565673827, 0.00905065631866455, 0.00904905605316162, 0.009056351661682128, 0.009030367851257324, 0.009111552238464356, 0.009191424369812011, 0.00933801555633545, 0.009227104187011718, 0.009320735931396484, 0.00906595230102539, 0.009081088066101074, 0.009141951560974122, 0.009150112152099609, 0.009046655654907226, 0.009242655754089356, 0.009074687957763672, 0.009035776138305664, 0.009018431663513184, 0.00903878402709961, 0.009183263778686524, 0.009211872100830078, 0.009011199951171875, 0.009002016067504883, 0.009003999710083008, 0.009154560089111329, 0.009041376113891601, 0.009070207595825195, 0.00907356834411621, 0.009148415565490722, 0.009038080215454101, 0.00903286361694336, 0.009052767753601074, 0.009109503746032714, 0.00905180835723877, 0.009060383796691895, 0.009060671806335449, 0.009057536125183105, 0.009062784194946289, 0.009066880226135254, 0.00920150375366211, 0.009076160430908203, 0.009068608283996582, 0.00903337574005127, 0.009095199584960937, 0.009312928199768067, 0.009148736000061036, 0.009068448066711426, 0.009062496185302735, 0.009067584037780761, 0.009061152458190919, 0.009156767845153808, 0.009014271736145019, 0.009054271697998047, 0.009003968238830566, 0.009086976051330567, 0.00902790355682373, 0.009045439720153809, 0.009052288055419922, 0.009111167907714844, 0.009044320106506347, 0.00904412841796875, 0.00920355224609375, 0.009137344360351563, 0.009136704444885254, 0.009093536376953124, 0.009352736473083496, 0.009158880233764648, 0.009132287979125976, 0.009125951766967774, 0.009058015823364258, 0.009113535881042481, 0.00910547161102295, 0.00911177635192871, 0.009050047874450684, 0.00903987216949463, 0.009023872375488282, 0.009038816452026368, 0.009032416343688964, 0.009049632072448731, 0.009076416015625, 0.009085311889648437, 0.009277600288391114, 0.009093215942382812, 0.009183039665222168, 0.009101887702941895, 0.009086976051330567, 0.009125247955322265, 0.009237119674682617, 0.009052160263061524, 0.009825471878051759, 0.009486528396606445, 0.009941632270812989, 0.009109503746032714, 0.009140224456787109, 0.00911359977722168, 0.009054112434387206, 0.009107199668884278, 0.009070943832397461, 0.009048064231872559, 0.009054176330566406, 0.009086784362792968, 0.009115872383117675, 0.009048064231872559, 0.009283359527587891, 0.009144543647766113, 0.00906777572631836, 0.009076992034912109, 0.009088543891906739, 0.009027999877929687, 0.009046591758728028, 0.009074272155761719, 0.009116064071655274, 0.009054207801818847, 0.009057920455932617, 0.009057951927185058, 0.009063136100769044, 0.009049568176269531, 0.00903222370147705, 0.009102944374084473, 0.009068960189819337, 0.009088383674621583, 0.009218688011169434, 0.0091278076171875, 0.00906777572631836, 0.009223199844360352, 0.010884960174560547, 0.00970956802368164, 0.009945055961608888, 0.009138015747070313, 0.009134528160095214, 0.009172736167907715, 0.00906396770477295, 0.009107935905456542, 0.009110976219177246, 0.009230751991271973, 0.009267200469970703, 0.009186752319335937, 0.009120479583740234, 0.009232480049133301, 0.009083999633789062, 0.00911228847503662, 0.009045184135437012, 0.009042207717895507, 0.00904640007019043, 0.009068639755249023, 0.009046239852905274, 0.009112640380859375, 0.009095840454101563, 0.008984224319458008, 0.009139743804931641, 0.009079615592956542, 0.00905622386932373, 0.00937782382965088, 0.009013248443603515, 0.009052255630493163, 0.009344927787780762, 0.009115648269653321, 0.00903987216949463, 0.009259008407592773, 0.009099488258361816, 0.009010784149169922, 0.00906668758392334, 0.00903987216949463, 0.009217344284057617, 0.009124480247497558, 0.009092384338378907, 0.009114399909973144, 0.009111552238464356, 0.009058303833007812, 0.009046015739440917, 0.009092255592346192, 0.009105440139770507, 0.009057087898254395, 0.009064448356628419, 0.00909721565246582, 0.009035776138305664, 0.0091627197265625, 0.009219776153564454, 0.009156448364257812, 0.009128447532653808, 0.009058303833007812, 0.00908902359008789, 0.00907430362701416, 0.009021504402160645, 0.009033184051513671, 0.009059167861938477, 0.009129983901977539, 0.009070879936218261, 0.009101344108581542, 0.00915014362335205, 0.009160191535949706, 0.009242815971374512, 0.009105952262878417, 0.00907686424255371, 0.009112704277038574, 0.009097503662109376, 0.009150303840637208, 0.009099488258361816, 0.010281056404113769, 0.009495967864990235, 0.009628352165222167, 0.009844736099243164, 0.00970729637145996, 0.00924899196624756, 0.009108511924743653, 0.009104351997375488, 0.009172991752624511, 0.009142271995544434, 0.009104415893554688, 0.009381983757019043, 0.009257984161376954, 0.009064448356628419, 0.009104512214660644, 0.009032575607299805, 0.00905577564239502, 0.009107935905456542, 0.009051712036132812, 0.009071040153503417, 0.00909721565246582, 0.00912559986114502, 0.00901302433013916, 0.009039775848388672, 0.009173824310302734, 0.009060128211975098, 0.009043968200683594, 0.009003007888793945, 0.009074687957763672, 0.009042143821716309, 0.009055359840393067, 0.00902182388305664, 0.00929206371307373, 0.00901036834716797, 0.00904435157775879, 0.009174592018127441, 0.009065343856811523, 0.00911359977722168, 0.009070591926574707, 0.009101311683654785, 0.00903987216949463, 0.009043968200683594, 0.00910268783569336, 0.009151071548461913, 0.009151840209960937, 0.009153247833251953, 0.009199456214904785, 0.009123104095458984, 0.00913702392578125, 0.00910153579711914, 0.00911740779876709, 0.009059904098510742, 0.009090944290161133, 0.009112192153930663, 0.009119744300842286, 0.009246720314025878, 0.009133472442626953, 0.009097760200500488, 0.009088704109191895, 0.009109888076782226, 0.009070943832397461, 0.009102368354797363, 0.009095808029174805, 0.009185312271118164, 0.00931123161315918, 0.009118687629699708, 0.009046015739440917, 0.0090250244140625, 0.009064543724060058, 0.009081088066101074, 0.009046175956726073, 0.009037823677062988, 0.009123488426208496, 0.00908086395263672, 0.009022015571594239, 0.009123040199279784, 0.008983103752136231, 0.00904371166229248, 0.009021696090698242, 0.009052160263061524, 0.009072640419006347, 0.009055999755859375, 0.009061951637268067, 0.009131872177124023, 0.009149279594421387, 0.009104960441589356, 0.00909772777557373, 0.009076767921447754, 0.009041824340820313, 0.00908255958557129, 0.009052448272705078, 0.009127967834472657, 0.009002400398254394, 0.009009759902954101, 0.009211647987365722, 0.00926540756225586, 0.009125887870788574, 0.009086976051330567, 0.009107456207275391, 0.009091072082519532, 0.009070591926574707, 0.009031583786010742, 0.009099360466003417, 0.00903987216949463, 0.009063551902770996, 0.009025568008422852, 0.009064576148986817, 0.00913481616973877, 0.009127936363220214, 0.00912384033203125, 0.0091461763381958, 0.00921395206451416, 0.009138367652893066, 0.009224191665649414, 0.009349120140075684, 0.009185279846191406, 0.009170432090759278, 0.009183391571044922, 0.009144672393798828, 0.009179136276245118, 0.00913539218902588, 0.009099807739257813, 0.00910969638824463, 0.00910547161102295, 0.009129631996154785, 0.009090911865234376, 0.009099295616149902, 0.009128607749938965, 0.00906009578704834, 0.009080960273742675, 0.00916425609588623, 0.009099136352539063, 0.009232159614562988, 0.009107232093811035, 0.009441984176635743, 0.009117823600769043, 0.009037983894348145, 0.009137344360351563, 0.009116831779479981, 0.009106111526489259, 0.009162688255310058, 0.009021504402160645, 0.009070367813110351, 0.009026847839355468, 0.009034815788269044, 0.009014719963073731, 0.009095616340637207, 0.009043968200683594, 0.009009152412414552, 0.009072640419006347, 0.00908902359008789, 0.00906214427947998, 0.009052415847778321, 0.009498623847961426, 0.00935321617126465, 0.009211903572082519, 0.009168288230895997, 0.009071200370788575, 0.009119647979736328, 0.009336352348327637, 0.009128512382507325, 0.009093119621276855, 0.00901529598236084, 0.009154560089111329, 0.009137824058532714, 0.009037440299987792, 0.0090283203125, 0.009052160263061524, 0.009010623931884766, 0.009034527778625488, 0.009115424156188965, 0.009146368026733399, 0.009115648269653321, 0.009045472145080566, 0.009056256294250489, 0.009056351661682128, 0.009017951965332031, 0.00906169605255127, 0.00910313606262207, 0.009087776184082031, 0.009078911781311035, 0.009031519889831544, 0.009102368354797363, 0.009057184219360352, 0.009123871803283692, 0.009154208183288574, 0.009142656326293946, 0.00908083152770996, 0.00907472038269043, 0.009123807907104492, 0.009066464424133301, 0.009060383796691895, 0.00901734447479248, 0.009142111778259278, 0.009091232299804687, 0.009068544387817384, 0.009103103637695313, 0.00917692756652832, 0.009062111854553223, 0.009066880226135254, 0.009064831733703614, 0.009295807838439942, 0.009028767585754395, 0.009075551986694337, 0.009078495979309082, 0.009154848098754883, 0.009136128425598144, 0.009125887870788574, 0.009179136276245118, 0.009074560165405273, 0.009059807777404786, 0.009181247711181641, 0.009297951698303223, 0.009130399703979492, 0.009087008476257324, 0.009160832405090332, 0.009074687957763672, 0.009230367660522461, 0.009099231719970704, 0.009134079933166504, 0.009087039947509765, 0.00901318359375, 0.009313983917236328, 0.009295583724975586, 0.00909334373474121, 0.009070976257324219, 0.009277312278747559, 0.009134207725524902, 0.009028608322143555, 0.009034751892089844, 0.009043935775756837, 0.009035807609558105, 0.009101311683654785, 0.009121184349060058, 0.009072832107543945, 0.009074432373046875, 0.009036064147949219, 0.009099455833435058, 0.009057632446289062, 0.00911855983734131, 0.0090513277053833, 0.009065279960632324, 0.009070112228393554, 0.009070976257324219, 0.009093215942382812, 0.009033760070800782, 0.009062591552734376, 0.009087072372436524, 0.009135807991027832, 0.009199935913085938, 0.009074591636657714, 0.009080479621887207, 0.009011039733886719, 0.009040255546569825, 0.009066399574279784, 0.010082592010498046, 0.00913379192352295, 0.009078783988952637, 0.00911359977722168, 0.009042176246643066, 0.009138112068176269, 0.009021247863769531, 0.009057791709899902, 0.009023551940917968, 0.00904032039642334, 0.009136672019958497, 0.00921782398223877, 0.009107680320739745, 0.009091072082519532, 0.009068448066711426, 0.009154047966003418, 0.009026144027709961, 0.009052160263061524, 0.009088000297546387, 0.009126208305358886, 0.009079071998596191, 0.009028160095214843, 0.009023327827453614, 0.009048031806945802, 0.009013279914855957, 0.009029631614685058, 0.009072640419006347, 0.00902348804473877, 0.009070591926574707, 0.00899891185760498, 0.009210975646972656, 0.009097663879394532, 0.009067232131958008, 0.009042752265930177, 0.009117792129516602, 0.009032032012939454, 0.008988736152648925, 0.009017472267150878, 0.009042271614074707, 0.009088992118835449, 0.009072640419006347, 0.009098719596862793, 0.009093600273132324, 0.009093024253845216, 0.009046175956726073, 0.009053215980529786, 0.0090797119140625, 0.009017727851867675, 0.009023167610168458, 0.009080927848815918, 0.009207424163818359, 0.009109791755676269, 0.009121343612670899, 0.009036224365234375, 0.009031807899475098, 0.009047327995300293, 0.009070240020751952, 0.009243583679199219, 0.009062399864196777, 0.009053791999816894, 0.009068960189819337, 0.009003328323364259, 0.008998592376708985, 0.008986399650573731, 0.00910153579711914, 0.009016608238220215, 0.00903446388244629, 0.00902143955230713, 0.009064448356628419, 0.009111552238464356, 0.009125408172607421, 0.009043840408325196, 0.009083488464355468, 0.009014271736145019, 0.009080191612243653, 0.009060992240905761, 0.009035776138305664, 0.009053824424743652, 0.009012736320495606, 0.009015263557434082, 0.009036704063415528, 0.009066304206848145, 0.009033920288085938, 0.00900710391998291, 0.00909721565246582, 0.0090000638961792, 0.009046591758728028, 0.009000927925109863, 0.00909552001953125, 0.009150303840637208, 0.009052319526672363, 0.00911302375793457, 0.009016223907470703, 0.009012895584106445, 0.009053215980529786, 0.009063360214233398, 0.009077919960021972, 0.009024095535278321, 0.008974559783935547, 0.009523008346557617, 0.009066752433776856, 0.009053759574890137, 0.008977120399475097, 0.009049823760986328, 0.008996064186096192, 0.009161343574523926, 0.009076224327087403, 0.009007776260375976, 0.009025279998779296, 0.009001215934753419, 0.009074399948120117, 0.009074975967407226, 0.009005056381225587, 0.009002495765686035, 0.009099007606506348, 0.008984800338745116, 0.009028127670288086, 0.009043519973754882, 0.009082528114318847, 0.009005855560302735, 0.009162367820739746, 0.009050496101379394, 0.009143391609191894, 0.009089887619018555, 0.009152576446533204, 0.009191424369812011, 0.0090862398147583, 0.009076992034912109, 0.009030112266540527, 0.00910099220275879, 0.009041279792785644, 0.00913708782196045, 0.009093119621276855, 0.00912384033203125, 0.009127936363220214, 0.009084575653076172, 0.00898252773284912, 0.00908083152770996, 0.00908083152770996, 0.009027584075927735, 0.009026944160461426, 0.009116288185119628, 0.009074687957763672, 0.009059616088867187, 0.009023551940917968, 0.009115327835083007, 0.009044960021972656, 0.009026752471923829, 0.009038656234741212, 0.009174976348876953, 0.00907267189025879, 0.009126943588256836, 0.00922111988067627, 0.009155743598937989, 0.009199711799621582, 0.009074687957763672, 0.009125632286071777, 0.009021951675415038, 0.009110015869140625, 0.009074175834655761, 0.00906281566619873, 0.00928275203704834, 0.009556896209716797, 0.009254207611083984, 0.009181887626647948, 0.009060352325439454, 0.009074687957763672, 0.009020832061767578, 0.009077024459838868, 0.009010944366455078, 0.009112128257751465, 0.009080191612243653, 0.009070303916931153, 0.008991647720336914, 0.009046175956726073, 0.009014335632324219, 0.00902143955230713, 0.00915062427520752, 0.009161343574523926, 0.009111552238464356, 0.009125887870788574, 0.009230079650878907, 0.009165151596069336, 0.009074591636657714, 0.009306048393249512, 0.009596991539001464, 0.009136128425598144, 0.009091039657592774, 0.009027615547180175, 0.009009152412414552, 0.009043968200683594, 0.00909062385559082, 0.009003456115722657, 0.009025535583496093, 0.009025535583496093, 0.009035776138305664, 0.009029631614685058, 0.009038080215454101, 0.00903756809234619, 0.008980704307556151, 0.009115967750549316, 0.009093119621276855, 0.009164799690246582, 0.009045632362365722, 0.009169280052185058, 0.009101311683654785, 0.009076736450195312, 0.009125887870788574, 0.009092960357666015, 0.009078944206237793, 0.00902348804473877, 0.009117695808410644, 0.009043456077575684, 0.009035584449768066, 0.009091360092163086, 0.009164480209350586, 0.009079104423522949, 0.009128352165222169, 0.009086367607116699, 0.009011648178100585, 0.009131327629089355, 0.009015647888183594, 0.009067008018493652, 0.009000960350036622, 0.009027392387390136, 0.009083071708679199, 0.009069952011108398, 0.009037471771240235, 0.009077631950378418, 0.009031776428222657, 0.009090975761413574, 0.00909443187713623, 0.009101183891296386, 0.009214912414550781, 0.009124959945678711, 0.009024064064025878, 0.009021120071411133, 0.009112223625183106, 0.009302335739135742, 0.00918716812133789, 0.009246080398559571, 0.009102080345153809, 0.009046879768371582, 0.009026432037353515, 0.009098336219787598, 0.009115648269653321, 0.00908790397644043, 0.009373696327209472, 0.009555968284606933, 0.009897983551025391, 0.00975222396850586, 0.009135775566101074, 0.009144031524658203, 0.009221088409423829, 0.009350943565368652, 0.009355487823486329, 0.009196767807006836, 0.00921065616607666, 0.009080191612243653, 0.009321087837219239, 0.009181183815002441, 0.009142271995544434]",tokens/s,109.70175736887278,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,890.351616,6230.50752,0.0,5827.985408,5712.718848,s,1,7.15709033203125,7.15709033203125,0.0,7.15709033203125,7.15709033203125,7.15709033203125,7.15709033203125,[7.15709033203125],,kWh,6.625194041703253e-06,7.191388086573149e-07,2.117779472010639e-06,9.462112322371207e-06,,MB,1231.507456,6438.125568,0.0,6025.117696,5988.31104,s,10,4.911230285644531,0.4911230285644531,0.0029008336856568306,0.4917835998535156,0.49330024719238286,0.49345879364013673,0.49358563079833984,"[0.4830303955078125, 0.4903384094238281, 0.4905237121582031, 0.49138363647460936, 0.49135403442382813, 0.49218356323242185, 0.49309970092773436, 0.49243447875976565, 0.4932650146484375, 0.4936173400878906]",tokens/s,521.254319408082,kWh,1.4252323701389793e-05,1.5717865166524396e-06,9.447745653428943e-06,2.5271855871471177e-05,tokens/kWh,10129845.679002648,MB,1256.259584,6522.011648,0.0,6109.003776,6092.423168,s,10,18.294562988281253,1.8294562988281249,0.0021146853360502925,1.8288551025390625,1.8326095092773438,1.8332659973144532,1.8337911877441406,"[1.826995849609375, 1.82685693359375, 1.8283814697265626, 1.8292921142578125, 1.8287640380859376, 1.8339224853515625, 1.832463623046875, 1.83017626953125, 1.8287686767578124, 1.8289415283203125]",tokens/s,34.436460734457135,kWh,5.364253160777656e-05,5.9168048036461426e-06,3.552104032157082e-05,9.50803767329935e-05,tokens/kWh,662597.2904684399,,s,630,18.291681854248036,0.029034415641663567,0.0002547170058459476,0.029017871856689456,0.02925580253601074,0.029345072650909427,0.030420022926330566,"[0.030507007598876954, 0.02950553512573242, 0.029058528900146485, 0.028807647705078127, 0.028712799072265625, 0.028517663955688475, 0.028593088150024416, 0.028902591705322264, 0.028807071685791014, 0.028855199813842772, 0.02877235221862793, 0.0287126407623291, 0.028881023406982422, 0.028762304306030273, 0.028874752044677734, 0.028657663345336915, 0.02885785675048828, 0.028866144180297853, 0.028881824493408204, 0.02874300765991211, 0.02881987190246582, 0.02886835289001465, 0.028883455276489257, 0.028850175857543944, 0.028948480606079102, 0.028850175857543944, 0.028909248352050783, 0.02885424041748047, 0.02911280059814453, 0.029036415100097655, 0.029058271408081055, 0.02912745666503906, 0.028985408782958983, 0.029250751495361327, 0.029113279342651368, 0.029185855865478515, 0.029095935821533202, 0.02901750373840332, 0.029008480072021486, 0.028961824417114257, 0.028893280029296874, 0.02901689529418945, 0.029009279251098634, 0.029075456619262696, 0.028977088928222657, 0.029032768249511717, 0.029038496017456054, 0.029049375534057616, 0.02897865676879883, 0.029090112686157226, 0.02899955177307129, 0.02909379196166992, 0.029104576110839844, 0.02917356872558594, 0.029122751235961915, 0.02905881690979004, 0.029020416259765626, 0.029081600189208984, 0.029173343658447266, 0.029018527984619142, 0.029018239974975588, 0.029224031448364256, 0.029262624740600585, 0.030441568374633788, 0.029398815155029297, 0.02900809669494629, 0.028700672149658202, 0.02876176071166992, 0.028731584548950195, 0.028612768173217774, 0.028839935302734376, 0.02870681571960449, 0.02873139190673828, 0.028876800537109375, 0.02869862365722656, 0.02885990333557129, 0.028750335693359375, 0.02884115219116211, 0.02871980857849121, 0.02874131202697754, 0.028870527267456054, 0.02875040054321289, 0.028813247680664063, 0.0287314567565918, 0.028898527145385742, 0.02893276786804199, 0.02894054412841797, 0.028847999572753906, 0.02893929672241211, 0.028871488571166993, 0.028971391677856444, 0.02895849609375, 0.028876319885253906, 0.028985824584960938, 0.029085695266723634, 0.029091392517089844, 0.029167808532714844, 0.02929280090332031, 0.029191999435424804, 0.029208768844604494, 0.029075456619262696, 0.029114368438720704, 0.029001728057861328, 0.028919456481933593, 0.029049184799194334, 0.02900377655029297, 0.028923904418945313, 0.029001728057861328, 0.02903628730773926, 0.029092096328735353, 0.0290119686126709, 0.02904473686218262, 0.029048831939697265, 0.029065120697021486, 0.02894857597351074, 0.029054975509643553, 0.029130752563476563, 0.029041919708251953, 0.029092607498168947, 0.029109760284423827, 0.02916534423828125, 0.029075712203979493, 0.029045215606689455, 0.029146495819091796, 0.029309280395507814, 0.029226303100585938, 0.03057868766784668, 0.0295295352935791, 0.029053056716918945, 0.028817983627319337, 0.028613759994506837, 0.028856351852416993, 0.028801887512207032, 0.028786527633666993, 0.028823551177978517, 0.028745887756347656, 0.028806720733642578, 0.028616992950439454, 0.028848127365112306, 0.02887222480773926, 0.028847904205322267, 0.028810943603515625, 0.028817855834960937, 0.028826240539550782, 0.028815135955810547, 0.028903583526611328, 0.02879283142089844, 0.028888959884643555, 0.028823104858398438, 0.028918432235717773, 0.028921791076660156, 0.028798688888549806, 0.028922111511230468, 0.028891136169433593, 0.02897715187072754, 0.028889087677001952, 0.029015039443969725, 0.02899046325683594, 0.028984928131103517, 0.029302848815917968, 0.0291495361328125, 0.029241247177124022, 0.029110368728637696, 0.02916691207885742, 0.02914374351501465, 0.029124607086181642, 0.029040639877319335, 0.02902835273742676, 0.0290447998046875, 0.02906502342224121, 0.02897430419921875, 0.029012960433959963, 0.02919001579284668, 0.029058143615722655, 0.0289781436920166, 0.02909388732910156, 0.029121952056884767, 0.029131359100341796, 0.029048511505126953, 0.029061439514160157, 0.029050880432128907, 0.02925574493408203, 0.029099872589111328, 0.02921072006225586, 0.029122560501098634, 0.02907279968261719, 0.029176416397094725, 0.02915657615661621, 0.029276191711425783, 0.030654239654541015, 0.02951379203796387, 0.028972768783569337, 0.028868831634521485, 0.028687999725341796, 0.02886249542236328, 0.028649663925170897, 0.02882473564147949, 0.028867071151733398, 0.028785152435302733, 0.028902816772460937, 0.02872969627380371, 0.028887296676635744, 0.02874367904663086, 0.028856351852416993, 0.028760032653808595, 0.02878879928588867, 0.028743423461914063, 0.028790975570678713, 0.028894464492797853, 0.02885840034484863, 0.028862752914428712, 0.028883264541625975, 0.028831872940063476, 0.028937440872192383, 0.0289531192779541, 0.02900111961364746, 0.028963327407836914, 0.028950944900512695, 0.029267295837402344, 0.029137247085571288, 0.029038848876953124, 0.029054975509643553, 0.02915123176574707, 0.029280256271362305, 0.029435903549194335, 0.02934988784790039, 0.02918592071533203, 0.029046911239624024, 0.029040319442749023, 0.02905683135986328, 0.029012479782104493, 0.02898124885559082, 0.029089536666870117, 0.0290402889251709, 0.0290164794921875, 0.029038623809814455, 0.029052192687988282, 0.02894118309020996, 0.02902751922607422, 0.028988224029541015, 0.029070943832397462, 0.029305248260498046, 0.029073408126831055, 0.029073152542114258, 0.029065311431884764, 0.0291146240234375, 0.029099327087402344, 0.029087839126586915, 0.029158975601196287, 0.029184383392333986, 0.029290752410888674, 0.0292293758392334, 0.03042745590209961, 0.029378496170043945, 0.028953887939453124, 0.028777023315429688, 0.028770240783691406, 0.0285882568359375, 0.02887833595275879, 0.02891827201843262, 0.028933439254760742, 0.028781248092651368, 0.028893024444580077, 0.02876416015625, 0.028867807388305664, 0.028752479553222656, 0.028854303359985352, 0.028727199554443358, 0.02867036819458008, 0.02895462417602539, 0.028882848739624024, 0.028932159423828124, 0.028870975494384766, 0.028888736724853516, 0.028911680221557618, 0.028889087677001952, 0.028958112716674804, 0.028844127655029295, 0.028953088760375976, 0.028891136169433593, 0.02891747283935547, 0.028954912185668945, 0.028919263839721678, 0.029131296157836915, 0.02909494400024414, 0.029217567443847656, 0.029173952102661133, 0.029218656539916992, 0.02912886428833008, 0.029048831939697265, 0.029081024169921876, 0.02904934310913086, 0.028944448471069337, 0.029030399322509767, 0.02907369613647461, 0.029058784484863282, 0.02898739242553711, 0.029093536376953125, 0.029046592712402345, 0.02905881690979004, 0.028971136093139647, 0.02908172798156738, 0.029024768829345703, 0.02904473686218262, 0.02906319999694824, 0.029081600189208984, 0.029181024551391602, 0.02908457565307617, 0.029068864822387696, 0.02923289680480957, 0.029252256393432617, 0.029177888870239258, 0.02933065605163574, 0.02936502456665039, 0.0292903995513916, 0.030401824951171875, 0.029473215103149413, 0.02910236740112305, 0.028884992599487305, 0.028803071975708007, 0.028670080184936525, 0.02863814353942871, 0.028803936004638674, 0.028837984085083007, 0.028786464691162108, 0.028821056365966796, 0.028754592895507813, 0.028854272842407228, 0.02875366401672363, 0.02884147262573242, 0.0286844482421875, 0.028801727294921874, 0.028866464614868165, 0.028983295440673826, 0.028804288864135743, 0.02891152000427246, 0.028848735809326172, 0.0290184326171875, 0.028982912063598633, 0.028837440490722656, 0.02897926330566406, 0.028956672668457032, 0.029068031311035157, 0.02904473686218262, 0.02897920036315918, 0.028976512908935548, 0.02925632095336914, 0.02920243263244629, 0.029372415542602538, 0.02936777687072754, 0.029311872482299803, 0.029434944152832033, 0.029370975494384766, 0.029075136184692384, 0.029066560745239257, 0.029258752822875978, 0.02940438461303711, 0.029276544570922852, 0.029323680877685547, 0.029167072296142578, 0.029214975357055663, 0.029208864212036133, 0.029296096801757814, 0.029269567489624025, 0.02927305603027344, 0.029327264785766603, 0.02925372886657715, 0.029302783966064453, 0.029268159866333007, 0.029202144622802736, 0.029130847930908203, 0.029345792770385744, 0.029298688888549803, 0.02931679916381836, 0.02937628746032715, 0.029170272827148437, 0.029142976760864258, 0.02916864013671875, 0.030511455535888674, 0.029472768783569334, 0.02905027198791504, 0.028868864059448242, 0.028760288238525392, 0.02888479995727539, 0.028660255432128905, 0.028948287963867187, 0.028968608856201172, 0.028968576431274415, 0.029040672302246093, 0.028873376846313477, 0.02906937599182129, 0.028997568130493163, 0.02885990333557129, 0.0289465274810791, 0.028893600463867186, 0.029080799102783203, 0.029053152084350584, 0.029036832809448243, 0.028952768325805664, 0.02911199951171875, 0.029075872421264647, 0.028935808181762696, 0.028961151123046876, 0.029003263473510742, 0.029061632156372072, 0.028913408279418944, 0.029125152587890626, 0.02903011131286621, 0.028953760147094727, 0.0290230712890625, 0.029085504531860353, 0.029139135360717775, 0.0291778564453125, 0.029112319946289062, 0.029214111328125, 0.029344192504882814, 0.02920902442932129, 0.0291378231048584, 0.029096767425537108, 0.029030080795288085, 0.028963136672973632, 0.029111520767211914, 0.029207231521606446, 0.029112415313720705, 0.02914227294921875, 0.029072128295898437, 0.028937728881835937, 0.029184511184692383, 0.029033792495727538, 0.029039295196533203, 0.029009920120239258, 0.029123968124389648, 0.029090240478515626, 0.029098175048828126, 0.029150623321533203, 0.029190303802490235, 0.02917625617980957, 0.02912371253967285, 0.02923948860168457, 0.029237695693969726, 0.029315200805664063, 0.030638080596923828, 0.029524192810058594, 0.029053951263427736, 0.028883071899414064, 0.028820127487182618, 0.02869209671020508, 0.028780319213867187, 0.028897727966308594, 0.028836000442504884, 0.02878995132446289, 0.028733407974243164, 0.02882032012939453, 0.028712575912475585, 0.028853824615478516, 0.02883180809020996, 0.028875328063964843, 0.028698720932006837, 0.028936288833618165, 0.02893619155883789, 0.029013248443603517, 0.028870784759521484, 0.028872800827026368, 0.028910303115844728, 0.028890335083007812, 0.029016511917114258, 0.02892406463623047, 0.029106176376342774, 0.028997631072998048, 0.028984960556030274, 0.029010143280029297, 0.02904863929748535, 0.02900351905822754, 0.028999807357788086, 0.02930940818786621, 0.029214719772338867, 0.029335552215576172, 0.02924492835998535, 0.02923091125488281, 0.029214559555053712, 0.029119104385375977, 0.029108352661132812, 0.02897315216064453, 0.028964351654052735, 0.02906470489501953, 0.02910643196105957, 0.02902911949157715, 0.02898918342590332, 0.029056703567504883, 0.029012544631958008, 0.028966976165771485, 0.028979135513305665, 0.029122560501098634, 0.0291059513092041, 0.029515264511108398, 0.02905571174621582, 0.029069311141967775, 0.02913689613342285, 0.029130752563476563, 0.029214719772338867, 0.029148927688598635, 0.029161727905273438, 0.029130176544189455, 0.029230880737304687, 0.030392704010009767, 0.02944223976135254, 0.029079616546630858, 0.028852224349975586, 0.028846080780029298, 0.02861392021179199, 0.028743520736694336, 0.028816255569458007, 0.028731552124023438, 0.02881123161315918, 0.02873740768432617, 0.02877644729614258, 0.0289300479888916, 0.028825599670410155, 0.028923423767089843, 0.028862943649291994, 0.028765632629394532, 0.02889081573486328, 0.028934944152832032, 0.02900601577758789, 0.028884479522705078, 0.028848543167114257, 0.028837888717651368, 0.02895257568359375, 0.028913503646850587, 0.028887199401855468, 0.028956672668457032, 0.028958303451538086, 0.02890096092224121, 0.02893440055847168, 0.028926528930664063, 0.029083295822143553, 0.02910857582092285, 0.029179904937744142, 0.02920857620239258, 0.029222911834716796, 0.029290496826171877, 0.029171712875366212, 0.029149152755737304, 0.029074623107910157, 0.02903536033630371, 0.02899718475341797, 0.029007488250732422, 0.02906172752380371, 0.02896918487548828, 0.02896691131591797, 0.029042688369750977, 0.029095808029174806, 0.029024383544921876, 0.02905619239807129, 0.029110240936279296, 0.029025184631347657, 0.029003711700439454, 0.029045951843261718, 0.029170623779296877, 0.02908051109313965, 0.02911664009094238, 0.029182144165039062, 0.0291190071105957, 0.029218624114990235, 0.029143232345581055, 0.02929871940612793, 0.02921379280090332, 0.030367616653442384, 0.02945859146118164, 0.029083711624145508, 0.028739871978759764, 0.02877235221862793, 0.028810623168945313, 0.02882419204711914, 0.02878361511230469, 0.02877337646484375, 0.028833791732788085, 0.0287127685546875, 0.028900672912597656, 0.02882009506225586, 0.028864160537719726, 0.028742240905761718, 0.02885593605041504, 0.028913951873779296, 0.028938335418701173, 0.02887068748474121, 0.028964864730834962, 0.028927967071533202, 0.028827423095703124, 0.02897327995300293, 0.028948448181152345, 0.028960800170898436, 0.028938175201416016, 0.02893779182434082, 0.028955135345458984, 0.028909408569335937, 0.029014240264892577, 0.028970239639282226, 0.028928672790527344, 0.02912073516845703, 0.029390655517578124, 0.029302560806274414, 0.029130912780761718, 0.029046848297119142, 0.028983295440673826, 0.028953760147094727, 0.02901024055480957, 0.02907804870605469, 0.028993343353271483, 0.02902035140991211, 0.029069311141967775, 0.029095104217529297, 0.02895510482788086, 0.029097536087036132, 0.02911235237121582, 0.02902092742919922, 0.029075456619262696, 0.029109823226928712, 0.02916531181335449, 0.029092031478881834, 0.029059488296508788, 0.02903830337524414, 0.02923353576660156, 0.029179904937744142, 0.029179744720458985, 0.029247007369995116, 0.029165855407714845, 0.02909833526611328, 0.02913484764099121, 0.029191871643066407]",tokens/s,34.44188484251869,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 718, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 414.12 MiB is free. Process 193662 has 14.33 GiB memory in use. Of the allocated memory 14.22 GiB is allocated by PyTorch, and 1.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 289, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 150.12 MiB is free. Process 161422 has 14.59 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.43 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1089, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.14 GiB is free. Process 65151 has 13.60 GiB memory in use. Of the allocated memory 13.48 GiB is allocated by PyTorch, and 16.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,884.224,11792.154624,0.0,11389.632512,11388.883968,s,1,7.8313125,7.8313125,0.0,7.8313125,7.8313125,7.8313125,7.8313125,[7.8313125],,kWh,8.06959539163472e-06,8.824581394069759e-07,4.33750347000883e-06,1.3289557001050525e-05,,MB,1224.54016,12089.950208,0.0,11676.942336,11620.241408,s,10,3.490746551513672,0.34907465515136715,0.0055193712063493034,0.34980799865722656,0.3546589813232422,0.35579297637939455,0.3567001724243164,"[0.33511196899414064, 0.35692697143554686, 0.354406982421875, 0.3505115966796875, 0.3467518310546875, 0.3485571899414063, 0.34954595947265626, 0.35171115112304685, 0.35007003784179686, 0.3471528625488281]",tokens/s,733.3674794837003,kWh,9.9898313958349e-06,1.101693544512086e-06,6.6090238057333234e-06,1.770054874608031e-05,tokens/kWh,14462828.450823583,MB,1229.873152,12089.950208,0.0,11676.942336,11620.243968,s,10,30.791531494140628,3.0791531494140623,0.0013443508625656396,3.0793005371093747,3.0805783935546875,3.0810168579101562,3.0813676293945313,"[3.0769521484375, 3.0769287109375, 3.078533935546875, 3.079562744140625, 3.079083251953125, 3.079933349609375, 3.079367431640625, 3.079233642578125, 3.081455322265625, 3.08048095703125]",tokens/s,20.46017100902837,kWh,9.01476874812483e-05,9.943435284196548e-06,5.996016833846688e-05,0.00016005129110391176,tokens/kWh,393623.8162496162,,s,630,30.787739788055415,0.04886942823500861,0.0002978430679788184,0.04882472038269043,0.049090335464477546,0.049156426620483404,0.050685764045715336,"[0.05087500762939453, 0.049391616821289064, 0.04866870498657227, 0.048529376983642576, 0.04857440185546875, 0.04860115051269531, 0.0485662727355957, 0.04862300872802734, 0.04855868911743164, 0.04859904098510742, 0.048582431793212894, 0.048767200469970705, 0.04860825729370117, 0.04864921569824219, 0.048756511688232425, 0.048574016571044924, 0.04862553787231445, 0.048689281463623044, 0.04870415878295899, 0.04912348937988281, 0.0489568977355957, 0.04892268753051758, 0.0488656005859375, 0.04885913467407227, 0.048828414916992184, 0.04864409637451172, 0.04863123321533203, 0.048610912322998044, 0.04870652770996094, 0.04873420715332031, 0.04881203079223633, 0.04877107238769531, 0.04872185516357422, 0.04870560073852539, 0.04864815902709961, 0.04886710357666016, 0.04867712020874024, 0.04884288024902344, 0.04887932968139649, 0.0489535026550293, 0.04920115280151367, 0.04890332794189453, 0.048997150421142575, 0.04890188980102539, 0.048922943115234374, 0.04880374526977539, 0.04882032012939453, 0.04889395141601562, 0.04879148864746094, 0.04879776000976563, 0.04890179061889648, 0.048933216094970707, 0.04876851272583008, 0.048793888092041014, 0.048680255889892575, 0.04886159896850586, 0.04884918212890625, 0.04888134384155273, 0.04893545532226563, 0.04904278564453125, 0.048967967987060546, 0.04916672134399414, 0.049070079803466796, 0.050841598510742186, 0.04908342361450195, 0.04871267318725586, 0.048584671020507814, 0.04858617782592774, 0.04859145736694336, 0.0485478401184082, 0.048584705352783204, 0.04864409637451172, 0.048656383514404294, 0.048601089477539064, 0.04861337661743164, 0.048611328125, 0.048551937103271485, 0.048641696929931644, 0.04878166580200195, 0.04859699249267578, 0.04875459289550781, 0.04876502227783203, 0.04912947082519531, 0.04908595275878906, 0.04915027236938477, 0.04880556869506836, 0.04873859024047852, 0.048767200469970705, 0.04875212860107422, 0.048652702331542966, 0.04864419174194336, 0.04869529724121094, 0.048645729064941405, 0.048775489807128904, 0.048783454895019535, 0.04875251388549805, 0.04866668701171875, 0.04868307113647461, 0.04880166244506836, 0.04873638534545898, 0.04872192001342773, 0.04887551879882813, 0.048973087310791016, 0.04895759963989258, 0.04899862289428711, 0.04898441696166992, 0.04890828704833984, 0.04898918533325195, 0.04888835144042969, 0.04903334426879883, 0.04881238555908203, 0.048739551544189456, 0.048825119018554686, 0.04882134246826172, 0.04874739074707031, 0.048877601623535154, 0.048775135040283205, 0.04890982437133789, 0.04886172866821289, 0.04894287872314453, 0.048776798248291016, 0.048919166564941406, 0.04907417678833008, 0.04900783920288086, 0.049076160430908206, 0.04904022216796875, 0.05099302291870117, 0.049122207641601565, 0.04876083374023438, 0.04858060836791992, 0.04862179183959961, 0.04878688049316406, 0.048623966217041015, 0.04857651138305664, 0.04853744125366211, 0.04859689712524414, 0.0485931510925293, 0.04853110504150391, 0.04860950469970703, 0.048593025207519534, 0.04859904098510742, 0.04857062530517578, 0.04860460662841797, 0.04868947219848633, 0.04880284881591797, 0.049025310516357425, 0.049097408294677736, 0.04907958221435547, 0.048995201110839846, 0.04886307144165039, 0.04874233627319336, 0.04869116973876953, 0.04876240158081055, 0.04875107192993164, 0.049024478912353515, 0.04868159866333008, 0.04865228652954102, 0.04873958587646485, 0.04879548645019531, 0.04871260833740235, 0.04867628860473633, 0.048740127563476565, 0.04876911926269531, 0.0489048957824707, 0.048911937713623045, 0.049014881134033204, 0.048979934692382814, 0.049197025299072265, 0.04903977584838867, 0.0490041618347168, 0.048802207946777344, 0.04886700820922851, 0.04883894348144531, 0.04886732864379883, 0.04888800048828125, 0.04876822280883789, 0.04883516693115234, 0.04878745651245117, 0.048879390716552736, 0.04884201431274414, 0.048756736755371094, 0.04889475250244141, 0.048861343383789065, 0.04896768188476563, 0.049086463928222655, 0.04912947082519531, 0.04914281463623047, 0.04915708923339844, 0.04913049697875976, 0.051071041107177736, 0.04940595245361328, 0.048828128814697266, 0.04865475082397461, 0.04855401611328125, 0.04859107208251953, 0.048624481201171875, 0.04860982513427734, 0.04868342590332031, 0.0488421745300293, 0.04865039825439453, 0.048655902862548825, 0.04862860870361328, 0.048611167907714845, 0.04869955062866211, 0.04870553588867187, 0.048694496154785154, 0.04879644775390625, 0.04879974365234375, 0.049071872711181644, 0.049111297607421875, 0.048928768157958984, 0.048901153564453126, 0.048796607971191404, 0.04872380828857422, 0.04864182281494141, 0.04873161697387695, 0.04873107147216797, 0.048747745513916016, 0.04886985778808594, 0.048740318298339844, 0.048734046936035155, 0.04870953750610352, 0.04879625701904297, 0.0488304328918457, 0.048748577117919925, 0.048758785247802736, 0.04876287841796875, 0.04900790405273438, 0.049060577392578124, 0.04908355331420899, 0.04901564788818359, 0.04899001693725586, 0.0489854736328125, 0.0488741455078125, 0.04887363052368164, 0.04885811233520508, 0.048763648986816406, 0.04884262466430664, 0.04888409423828125, 0.04877926254272461, 0.04887113571166992, 0.04885942459106445, 0.048906238555908206, 0.04881203079223633, 0.04892435073852539, 0.04892816162109375, 0.04898015975952148, 0.049001182556152344, 0.0491253776550293, 0.049111038208007815, 0.0491124153137207, 0.049076702117919924, 0.05068851089477539, 0.04925263977050781, 0.048787136077880856, 0.04866867065429688, 0.048635265350341794, 0.048731998443603514, 0.04867766571044922, 0.048451583862304685, 0.048601089477539064, 0.0485928955078125, 0.04859632110595703, 0.04862223815917969, 0.04858224105834961, 0.04865475082397461, 0.04861663818359375, 0.048710464477539066, 0.04869340896606445, 0.04878905487060547, 0.048834110260009764, 0.048995006561279295, 0.04905372619628906, 0.048982017517089846, 0.048879615783691405, 0.04881987380981445, 0.048699710845947264, 0.048730144500732424, 0.048729248046875, 0.04878969573974609, 0.04872220611572266, 0.048718143463134765, 0.048743553161621093, 0.04871878433227539, 0.04871782302856445, 0.04876867294311524, 0.048818336486816404, 0.048768798828125, 0.04884076690673828, 0.04886972808837891, 0.048928001403808596, 0.049009407043457034, 0.049040512084960936, 0.04918544006347656, 0.04900883102416992, 0.04900566482543945, 0.048843296051025394, 0.04884435272216797, 0.04912009429931641, 0.04889968109130859, 0.048834976196289064, 0.04883660888671875, 0.04890358352661133, 0.048847457885742185, 0.04879974365234375, 0.0489615364074707, 0.04883769607543945, 0.04891664123535156, 0.04892956924438477, 0.04893900680541992, 0.04898313522338867, 0.049150463104248046, 0.04906639862060547, 0.04919091033935547, 0.04905984115600586, 0.050657920837402344, 0.049247936248779295, 0.048628032684326174, 0.048836063385009766, 0.04858319854736328, 0.04856399917602539, 0.04864332962036133, 0.048593856811523437, 0.04870966339111328, 0.048715328216552736, 0.048648094177246096, 0.048614017486572264, 0.048721729278564455, 0.048724063873291014, 0.04855984115600586, 0.048812320709228516, 0.04880179214477539, 0.04873625564575195, 0.048844799041748044, 0.04906585693359375, 0.049021022796630856, 0.048971614837646484, 0.04904288101196289, 0.04883020782470703, 0.04872496032714844, 0.04876838302612305, 0.04875516891479492, 0.04868320083618164, 0.04879494476318359, 0.04876563262939453, 0.048680126190185545, 0.04876985549926758, 0.0488653450012207, 0.048790977478027346, 0.04875420761108398, 0.048799713134765624, 0.04875094223022461, 0.04895401763916016, 0.04891852951049805, 0.049073150634765625, 0.04906086349487305, 0.04899225616455078, 0.049078144073486325, 0.04896185684204102, 0.048967487335205076, 0.04881375885009766, 0.048773342132568356, 0.04876911926269531, 0.048842079162597654, 0.04892086410522461, 0.04883267211914062, 0.04903708648681641, 0.04904390335083008, 0.0488611831665039, 0.04886092758178711, 0.04893926239013672, 0.048912384033203124, 0.04891347122192383, 0.04908736038208008, 0.04921145629882812, 0.048998401641845706, 0.04905574417114258, 0.049186336517333985, 0.050679039001464844, 0.04909542465209961, 0.0488138542175293, 0.048724193572998044, 0.04863318252563477, 0.048644065856933594, 0.04868985748291016, 0.04866243362426758, 0.04866630554199219, 0.04860355377197265, 0.04867820739746094, 0.048631584167480466, 0.048511905670166014, 0.0486003189086914, 0.048589569091796875, 0.04862928009033203, 0.048726497650146486, 0.04863564682006836, 0.048891521453857424, 0.04899903869628906, 0.049121280670166016, 0.04910079956054687, 0.04900864028930664, 0.048846847534179685, 0.04876287841796875, 0.04879500961303711, 0.04866831970214844, 0.048689537048339844, 0.04876118469238281, 0.048804096221923825, 0.048840705871582034, 0.048824321746826174, 0.04868019104003906, 0.04878793716430664, 0.04873244857788086, 0.04875468826293945, 0.048756736755371094, 0.04887062454223633, 0.04892752075195313, 0.0490250244140625, 0.0490898551940918, 0.04908921432495117, 0.0490885124206543, 0.04909465789794922, 0.04895948791503906, 0.04879564666748047, 0.04877417755126953, 0.04892111968994141, 0.048939456939697264, 0.048898048400878906, 0.04879056167602539, 0.048880126953125, 0.04879792022705078, 0.048828670501708984, 0.048879039764404296, 0.04887385559082031, 0.04886073684692383, 0.048976158142089846, 0.04903567886352539, 0.049108928680419925, 0.04902054214477539, 0.04921177673339844, 0.04920729446411133, 0.050668033599853515, 0.049169857025146486, 0.04868044662475586, 0.048642974853515625, 0.048594558715820316, 0.04862582397460938, 0.048626144409179686, 0.04863750457763672, 0.048656063079833986, 0.048581119537353515, 0.048604991912841795, 0.04860128021240234, 0.04858665466308594, 0.0486769905090332, 0.04863961410522461, 0.048606784820556644, 0.048618175506591796, 0.0486420783996582, 0.04879520034790039, 0.04902937698364258, 0.049086719512939456, 0.04899020767211914, 0.048947200775146485, 0.0488221435546875, 0.0487589111328125, 0.04877907180786133, 0.04870905685424805, 0.04873900985717773, 0.04879183959960937, 0.04869049453735352, 0.04880006408691406, 0.04867638397216797, 0.04877782440185547, 0.048755870819091794, 0.04883292770385742, 0.04880636978149414, 0.048979969024658204, 0.048965633392333986, 0.0489246711730957, 0.0489697265625, 0.04910614395141601, 0.04912828826904297, 0.04907759857177734, 0.04893552017211914, 0.048846622467041016, 0.04901500701904297, 0.04887305450439453, 0.04899881744384765, 0.04878950500488281, 0.048830463409423826, 0.04886262512207031, 0.048857406616210936, 0.04887305450439453, 0.04894960021972656, 0.04881647872924805, 0.04915561676025391, 0.04892099380493164, 0.048911712646484376, 0.04904595184326172, 0.0490777587890625, 0.04905657577514649, 0.04913865661621094, 0.04911206436157227, 0.05077657699584961, 0.0491313591003418, 0.048688480377197266, 0.04865075302124024, 0.04863212966918945, 0.04869734573364258, 0.04862713623046875, 0.048685630798339846, 0.048713024139404294, 0.0487305908203125, 0.04862527847290039, 0.048804447174072264, 0.0485662727355957, 0.048666622161865236, 0.048647262573242187, 0.048806720733642575, 0.048936416625976566, 0.048812671661376955, 0.04887347030639649, 0.049053695678710936, 0.049258495330810545, 0.04898320007324219, 0.04889833450317383, 0.04880783843994141, 0.048777664184570316, 0.048734046936035155, 0.048664958953857425, 0.04879359817504883, 0.04880953598022461, 0.048773120880126954, 0.04884108734130859, 0.04874393463134766, 0.04876758575439453, 0.0487459831237793, 0.048963134765625, 0.04883958435058594, 0.04893804931640625, 0.04884326553344726, 0.0489128303527832, 0.04910489654541016, 0.0490164794921875, 0.04901286315917969, 0.049006816864013675, 0.04902092742919922, 0.048873214721679686, 0.04893312072753906, 0.048910049438476565, 0.048828704833984375, 0.04880998229980469, 0.04889395141601562, 0.04890003204345703, 0.04887324905395508, 0.048852577209472656, 0.04927695846557617, 0.048865951538085935, 0.04895129776000977, 0.048931041717529294, 0.049062847137451175, 0.04906070327758789, 0.049102848052978515, 0.04912073516845703, 0.049146400451660154, 0.049293312072753906, 0.0506932487487793, 0.049121822357177734, 0.048857440948486326, 0.04867830276489258, 0.048634464263916016, 0.048801502227783206, 0.04862736129760742, 0.048695934295654296, 0.04865577697753906, 0.04868067169189453, 0.04862860870361328, 0.04870348739624023, 0.04867071914672851, 0.04871372985839844, 0.04864179229736328, 0.04864985656738281, 0.04864473724365234, 0.04873011016845703, 0.0487918701171875, 0.04905126571655274, 0.04917206573486328, 0.049002975463867185, 0.04893286514282227, 0.04888371276855469, 0.048775169372558595, 0.04875465774536133, 0.04866361618041992, 0.04874544143676758, 0.04875263977050781, 0.048750465393066406, 0.048765056610107424, 0.04874448013305664, 0.048838817596435546, 0.04893062210083008, 0.048827999114990236, 0.04878992080688477, 0.04879974365234375, 0.048928001403808596, 0.048922496795654295, 0.0490807991027832, 0.04902444839477539, 0.049178848266601564, 0.04908518218994141, 0.04903247833251953, 0.04895308685302734, 0.04886627197265625, 0.04888576126098633, 0.04883456039428711, 0.048936958312988284, 0.048807937622070315, 0.04879884719848633, 0.04888198471069336, 0.048898624420166015, 0.04887551879882813, 0.048870849609375, 0.04886995315551758, 0.04893286514282227, 0.049057247161865235, 0.04905353546142578, 0.04906975936889649, 0.049180992126464845, 0.04917523193359375, 0.0491071662902832]",tokens/s,20.46269080929475,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 270.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 98.12 MiB is free. Process 153478 has 14.64 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 1.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,883.63008,6089.998336,0.0,5687.476224,5685.679104,s,1,7.78438037109375,7.78438037109375,0.0,7.78438037109375,7.78438037109375,7.78438037109375,7.78438037109375,[7.78438037109375],,kWh,6.576947270840112e-06,7.000934223621233e-07,2.1116683559946337e-06,9.388709049196868e-06,,MB,1189.875712,6324.87936,0.0,5911.871488,5850.451456,s,10,2.056777145385742,0.2056777145385742,0.004448626608134176,0.20690196990966797,0.2102283950805664,0.2110288246154785,0.2116691682434082,"[0.1957662353515625, 0.20794924926757813, 0.2118292541503906, 0.20141807556152344, 0.2053102111816406, 0.21005052185058593, 0.20729306030273437, 0.20252210998535156, 0.20651087951660158, 0.20812754821777343]",tokens/s,1244.6657168197385,kWh,5.83880704108348e-06,6.439140566128552e-07,3.907203125760039e-06,1.0389924223456374e-05,tokens/kWh,24639255.734132536,MB,1195.4176,6324.87936,0.0,5911.871488,5850.454016,s,10,16.527911621093747,1.652791162109375,0.0011412461285353196,1.6527720336914062,1.6541783691406249,1.6546173828125,1.65496859375,"[1.6519759521484374, 1.6507183837890624, 1.6531632080078125, 1.65209814453125, 1.653014892578125, 1.65312939453125, 1.652145263671875, 1.654080810546875, 1.6525291748046875, 1.655056396484375]",tokens/s,38.11733838145422,kWh,4.846597837433516e-05,5.345557104455242e-06,3.206760343183956e-05,8.587913891062994e-05,tokens/kWh,733589.0974123634,,s,630,16.524775184631373,0.026229801880367214,0.00039806712816051144,0.02614680004119873,0.02639257678985596,0.026601132583618164,0.02876951681137085,"[0.02908582305908203, 0.02750464057922363, 0.026714080810546874, 0.026361312866210938, 0.026237503051757812, 0.026074655532836916, 0.025956672668457033, 0.025931936264038086, 0.025921152114868163, 0.025937824249267577, 0.025993663787841795, 0.02604854393005371, 0.02604649543762207, 0.02606390380859375, 0.02610246467590332, 0.026009632110595704, 0.026018016815185546, 0.026053951263427733, 0.02600934410095215, 0.02602079963684082, 0.026299583435058595, 0.02609446334838867, 0.026077280044555663, 0.026002656936645507, 0.026143360137939452, 0.026161151885986327, 0.026105951309204102, 0.026224576950073242, 0.02614255905151367, 0.026087615966796877, 0.026091552734375, 0.026128288269042968, 0.026177600860595705, 0.026077280044555663, 0.026160991668701172, 0.026396032333374023, 0.026305152893066407, 0.026381599426269532, 0.026375999450683595, 0.026299392700195313, 0.026300319671630858, 0.026243072509765625, 0.02625267219543457, 0.02626576042175293, 0.026159584045410158, 0.026189504623413087, 0.02611369514465332, 0.026220447540283204, 0.02602579116821289, 0.026092191696166993, 0.026028383255004884, 0.02674403190612793, 0.026112831115722657, 0.026058591842651368, 0.02604038429260254, 0.02612428855895996, 0.026025983810424806, 0.026089536666870118, 0.026091455459594726, 0.02626505661010742, 0.026153152465820312, 0.026101696014404298, 0.026163616180419923, 0.029014015197753908, 0.027404287338256835, 0.026523839950561522, 0.026414911270141603, 0.026066591262817383, 0.025868640899658204, 0.02589286422729492, 0.025948160171508788, 0.02600873565673828, 0.02601251220703125, 0.02609459114074707, 0.02590332794189453, 0.0260677433013916, 0.026040319442749024, 0.026062719345092772, 0.02606729507446289, 0.026484512329101564, 0.026181184768676757, 0.026202144622802733, 0.02618409538269043, 0.026166656494140624, 0.026102624893188476, 0.026095039367675783, 0.02607904052734375, 0.02605500793457031, 0.026114240646362304, 0.02601683235168457, 0.02608016014099121, 0.025964576721191405, 0.026109952926635743, 0.02595020866394043, 0.026009599685668947, 0.025956352233886718, 0.02612633514404297, 0.026281984329223632, 0.026241024017333983, 0.026331327438354493, 0.026535551071166993, 0.02639072036743164, 0.02630860710144043, 0.026204160690307617, 0.026201215744018555, 0.026215295791625976, 0.026243072509765625, 0.026176895141601562, 0.02618841552734375, 0.026064895629882814, 0.02610348892211914, 0.026203680038452148, 0.026175935745239257, 0.026126272201538087, 0.02626806449890137, 0.02608742332458496, 0.026048511505126954, 0.026175487518310548, 0.02612019157409668, 0.026074367523193358, 0.026019935607910157, 0.026067615509033203, 0.026003007888793946, 0.02611859130859375, 0.026025983810424806, 0.02613657569885254, 0.028803647994995116, 0.027336063385009764, 0.026589727401733397, 0.026375808715820313, 0.02618351936340332, 0.026090015411376954, 0.02610518455505371, 0.026058752059936522, 0.02603411293029785, 0.02604105567932129, 0.026161151885986327, 0.026077184677124023, 0.026111936569213866, 0.026074911117553713, 0.026228832244873046, 0.02609689521789551, 0.02612944030761719, 0.026127647399902344, 0.026116735458374025, 0.026040319442749024, 0.026036319732666017, 0.025989023208618164, 0.02606879997253418, 0.02602412796020508, 0.026071039199829102, 0.026071039199829102, 0.02609766387939453, 0.026040319442749024, 0.026066944122314452, 0.026181631088256836, 0.026262624740600586, 0.026237503051757812, 0.02617788887023926, 0.02616435241699219, 0.026256256103515625, 0.026078208923339844, 0.026284736633300783, 0.026503488540649413, 0.026384384155273437, 0.026241024017333983, 0.02631046485900879, 0.02616339111328125, 0.02624025535583496, 0.02619603157043457, 0.026255680084228517, 0.026132928848266603, 0.026183616638183593, 0.026208255767822267, 0.026187776565551758, 0.0262139835357666, 0.026101184844970704, 0.02622768020629883, 0.02617500877380371, 0.026159584045410158, 0.026167295455932618, 0.02631648063659668, 0.02615449523925781, 0.02621113586425781, 0.026230783462524415, 0.0262509765625, 0.02623855972290039, 0.026286176681518555, 0.02621091270446777, 0.028979391098022462, 0.027429183959960936, 0.026789247512817384, 0.026254112243652344, 0.02607244873046875, 0.02591401672363281, 0.025976736068725585, 0.026021984100341795, 0.026046464920043946, 0.026125343322753906, 0.02616009521484375, 0.026126367568969727, 0.026097440719604494, 0.02618592071533203, 0.02612224006652832, 0.026097471237182618, 0.02591948890686035, 0.026046655654907228, 0.025937248229980468, 0.025999488830566405, 0.025943935394287108, 0.02605504035949707, 0.025962783813476564, 0.02631475257873535, 0.026001407623291017, 0.02607267189025879, 0.02605673599243164, 0.026173824310302733, 0.026075328826904297, 0.02602560043334961, 0.026062559127807618, 0.02607302474975586, 0.026042911529541017, 0.026187776565551758, 0.02609494400024414, 0.026038944244384767, 0.02622198486328125, 0.0265133113861084, 0.026496864318847655, 0.02644406318664551, 0.026419776916503906, 0.02637968063354492, 0.026405376434326173, 0.026375839233398438, 0.02631056022644043, 0.02615078353881836, 0.02611881637573242, 0.026183679580688478, 0.026144607543945313, 0.0260916805267334, 0.026033664703369142, 0.026110464096069336, 0.02609766387939453, 0.026136640548706055, 0.026150848388671873, 0.026175487518310548, 0.026187776565551758, 0.026111007690429688, 0.02621526336669922, 0.026230016708374024, 0.02618867111206055, 0.026209407806396485, 0.026193824768066407, 0.02877788734436035, 0.027269535064697266, 0.026574655532836913, 0.02625369644165039, 0.02607923126220703, 0.025978271484375, 0.026099967956542968, 0.026040672302246094, 0.02604960060119629, 0.026206720352172853, 0.025977407455444336, 0.02611910438537598, 0.026022848129272462, 0.026082752227783203, 0.026063520431518553, 0.026059776306152343, 0.02613337516784668, 0.026072128295898438, 0.026131423950195312, 0.026015743255615235, 0.02609951972961426, 0.02614396858215332, 0.026074079513549803, 0.02612393569946289, 0.02615110397338867, 0.02611199951171875, 0.026089183807373045, 0.02615340805053711, 0.026138463973999024, 0.026165407180786134, 0.026184768676757814, 0.026155967712402344, 0.026105344772338866, 0.026183839797973632, 0.02622323226928711, 0.02626527976989746, 0.02635100746154785, 0.026368383407592774, 0.026278144836425783, 0.026291711807250977, 0.02733235168457031, 0.02627577590942383, 0.02606368064880371, 0.02618742370605469, 0.02616364860534668, 0.02611935997009277, 0.026145503997802733, 0.0261345272064209, 0.02614681625366211, 0.026174816131591797, 0.026079391479492186, 0.026136287689208983, 0.026118911743164063, 0.026228448867797852, 0.026161472320556642, 0.0263272647857666, 0.02626304054260254, 0.02619830322265625, 0.02615648078918457, 0.02614963150024414, 0.026150976181030273, 0.026184768676757814, 0.026147520065307617, 0.028950944900512695, 0.02754102325439453, 0.02664703941345215, 0.026487775802612305, 0.02612326431274414, 0.02601296043395996, 0.026026815414428712, 0.026028160095214845, 0.026062623977661133, 0.026081279754638673, 0.026040319442749024, 0.026085376739501953, 0.026073183059692383, 0.026023168563842774, 0.02610243225097656, 0.026113536834716795, 0.02620022392272949, 0.026136159896850586, 0.026077184677124023, 0.026036991119384765, 0.026113887786865235, 0.026056863784790038, 0.026187776565551758, 0.026089471817016603, 0.02612633514404297, 0.026119327545166014, 0.026104671478271484, 0.026089471817016603, 0.026062847137451172, 0.02607923126220703, 0.026115840911865234, 0.025987104415893556, 0.02609312057495117, 0.02618025588989258, 0.026318143844604493, 0.026413951873779297, 0.026451711654663087, 0.026554431915283203, 0.026388479232788087, 0.026554079055786134, 0.02643494415283203, 0.02629315185546875, 0.026266752243041994, 0.026213247299194335, 0.026287519454956054, 0.026185983657836913, 0.026147071838378905, 0.026097728729248048, 0.026083391189575197, 0.02608236885070801, 0.02611712074279785, 0.026128288269042968, 0.02612633514404297, 0.026184703826904295, 0.02612022399902344, 0.02616819190979004, 0.026130752563476564, 0.02610918426513672, 0.026056224822998048, 0.026113023757934572, 0.026195968627929687, 0.02614476776123047, 0.026206207275390626, 0.02864633560180664, 0.02746691131591797, 0.02661667251586914, 0.026299808502197267, 0.026088031768798828, 0.026023935317993165, 0.02615737533569336, 0.025958080291748047, 0.02599238395690918, 0.025977664947509766, 0.026034368515014648, 0.026026880264282227, 0.025964544296264647, 0.026043327331542968, 0.025956352233886718, 0.026060480117797852, 0.02604198455810547, 0.026047168731689455, 0.026034175872802736, 0.025956352233886718, 0.026073087692260744, 0.026046464920043946, 0.026162336349487305, 0.02604102325439453, 0.02604047966003418, 0.02609971237182617, 0.02610380744934082, 0.02604595184326172, 0.026122432708740234, 0.026294591903686524, 0.026056703567504884, 0.026074848175048827, 0.026102048873901367, 0.026128063201904295, 0.026150880813598634, 0.026405216217041016, 0.026504800796508788, 0.0264136962890625, 0.026425119400024413, 0.026730495452880858, 0.02632089614868164, 0.02628118324279785, 0.026286880493164064, 0.026207263946533204, 0.02619487953186035, 0.026243104934692382, 0.02620022392272949, 0.026179424285888674, 0.026095264434814452, 0.026175840377807617, 0.02615091133117676, 0.026189792633056642, 0.026130464553833006, 0.026228031158447265, 0.026106271743774414, 0.02621401596069336, 0.02615772819519043, 0.026112127304077148, 0.02615283203125, 0.02614463996887207, 0.02614694404602051, 0.026220640182495116, 0.026259199142456054, 0.0287490234375, 0.027416736602783202, 0.026610464096069337, 0.02627993583679199, 0.026183040618896484, 0.026068735122680663, 0.026051456451416016, 0.02590924835205078, 0.026040319442749024, 0.02607513618469238, 0.02595430374145508, 0.02599942398071289, 0.026081216812133788, 0.02612838363647461, 0.025987071990966795, 0.02599488067626953, 0.02601590347290039, 0.026042591094970702, 0.026097343444824218, 0.02608550453186035, 0.026068832397460936, 0.02610006332397461, 0.026052799224853516, 0.026027584075927736, 0.026144895553588867, 0.026095743179321288, 0.026122112274169922, 0.02613055992126465, 0.026179584503173828, 0.026171327590942383, 0.026197343826293944, 0.026188095092773436, 0.026230400085449218, 0.026151199340820313, 0.02620876884460449, 0.026332319259643553, 0.02655539131164551, 0.02662716865539551, 0.026835712432861328, 0.026390527725219725, 0.026269760131835938, 0.026351455688476563, 0.026326879501342774, 0.026349855422973634, 0.026243040084838867, 0.026265535354614258, 0.02624531173706055, 0.02617740821838379, 0.026179807662963867, 0.026094623565673828, 0.02621107292175293, 0.02618704032897949, 0.026188512802124024, 0.026158496856689452, 0.02617366409301758, 0.026160640716552733, 0.026235679626464843, 0.02617740821838379, 0.026196063995361327, 0.026255712509155274, 0.026207807540893555, 0.026271776199340822, 0.026239423751831054, 0.028657087326049803, 0.02729827117919922, 0.026654815673828124, 0.026273792266845702, 0.026127424240112305, 0.026085439682006835, 0.026022783279418947, 0.02614067268371582, 0.026076608657836915, 0.026028608322143553, 0.026083200454711915, 0.026054784774780272, 0.026007551193237305, 0.026023391723632813, 0.02611244773864746, 0.026136383056640625, 0.026078943252563477, 0.026208192825317382, 0.026019935607910157, 0.026118207931518554, 0.026071264266967775, 0.026016000747680665, 0.026046464920043946, 0.026021888732910156, 0.026109952926635743, 0.026179584503173828, 0.026171392440795898, 0.02606662368774414, 0.026072736740112304, 0.026091775894165038, 0.026123775482177734, 0.026065824508666992, 0.026111167907714845, 0.0260633602142334, 0.02616908836364746, 0.026368576049804686, 0.02645542335510254, 0.026507904052734375, 0.026392192840576173, 0.02643596839904785, 0.026257408142089843, 0.026226367950439453, 0.026173791885375976, 0.026238943099975588, 0.026175487518310548, 0.02614067268371582, 0.02614886474609375, 0.026150400161743165, 0.026122751235961913, 0.026121759414672853, 0.02619817543029785, 0.02618400001525879, 0.026134239196777344, 0.02618601608276367, 0.02623251152038574, 0.026274112701416014, 0.02627299118041992, 0.026255615234375, 0.026195903778076172, 0.026147199630737306, 0.026251327514648436, 0.026203296661376954, 0.026174528121948242, 0.028991104125976563, 0.027527584075927734, 0.02672390365600586, 0.026355903625488283, 0.026128095626831056, 0.026053407669067382, 0.026044160842895507, 0.026058752059936522, 0.026109439849853516, 0.026097471237182618, 0.026138496398925782, 0.02608230400085449, 0.026090463638305663, 0.02609756851196289, 0.026138847351074218, 0.026127071380615235, 0.026056480407714844, 0.026196191787719727, 0.02614271926879883, 0.02611814308166504, 0.026174495697021485, 0.026112672805786132, 0.02614908790588379, 0.02605276870727539, 0.02612348747253418, 0.026141183853149414, 0.026194143295288085, 0.02617344093322754, 0.026121696472167968, 0.026130655288696288, 0.02617580795288086, 0.026187519073486328, 0.026116352081298828, 0.026154207229614257, 0.026215200424194337, 0.026396671295166017, 0.026537151336669923, 0.0265467529296875, 0.02655196762084961, 0.02637884712219238, 0.026367935180664062, 0.02629638481140137, 0.026265600204467773, 0.026201791763305664, 0.026208223342895506, 0.026201471328735352, 0.02621129608154297, 0.026197023391723633, 0.02624604797363281, 0.026187231063842773, 0.02614035224914551, 0.02614678382873535, 0.026168256759643554, 0.026165088653564452, 0.026169504165649414, 0.026181631088256836, 0.026228288650512695, 0.0261964168548584, 0.026278207778930664, 0.026191551208496092, 0.02619171142578125, 0.026191520690917968, 0.02619011116027832]",tokens/s,38.124573131011395,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,893.60384,6993.870848,0.0,6591.348736,6590.657536,s,1,7.77121240234375,7.77121240234375,0.0,7.77121240234375,7.77121240234375,7.77121240234375,7.77121240234375,[7.77121240234375],,kWh,6.3480529291609855e-06,6.927597937043735e-07,1.933890435998342e-06,8.974703158863701e-06,,MB,1201.840128,7258.112,0.0,6845.104128,6805.125632,s,10,2.2650821380615236,0.22650821380615235,0.008472289062471501,0.22889324951171874,0.23350831451416015,0.2335294563293457,0.23354636978149415,"[0.2033206329345703, 0.22952554321289062, 0.22568727111816406, 0.2335036163330078, 0.23355059814453125, 0.22826095581054687, 0.23148374938964844, 0.22461811828613282, 0.23193539428710938, 0.22319625854492187]",tokens/s,1130.2018399168826,kWh,6.0500819273805805e-06,6.669012736109231e-07,4.012928380408213e-06,1.0729911581399719e-05,tokens/kWh,23858537.701631717,MB,1207.382016,7260.209152,0.0,6847.20128,6805.128192,s,10,17.829971557617185,1.7829971557617186,0.0016620133987938857,1.7831369018554688,1.784479443359375,1.78551083984375,1.78633595703125,"[1.7804403076171875, 1.784250244140625, 1.781853759765625, 1.78102978515625, 1.783011962890625, 1.782234375, 1.7839918212890624, 1.786542236328125, 1.7832618408203125, 1.783355224609375]",tokens/s,35.33376359935112,kWh,5.2520504258035046e-05,5.793125832932468e-06,3.47758248727912e-05,9.30894549637587e-05,tokens/kWh,676768.3839649396,,s,630,17.82668785095216,0.028296329922146268,0.0004041545234517871,0.02821990394592285,0.028482163047790526,0.02860131101608276,0.030974646682739256,"[0.03088345527648926, 0.029147520065307617, 0.028461055755615236, 0.02820863914489746, 0.028133888244628907, 0.028096511840820314, 0.02811903953552246, 0.02809814453125, 0.02803139114379883, 0.02802796745300293, 0.028078271865844728, 0.02801545524597168, 0.02813327980041504, 0.02811494445800781, 0.028159807205200196, 0.02809609603881836, 0.028211679458618164, 0.028248191833496094, 0.028067039489746093, 0.028220191955566406, 0.02807516860961914, 0.02808041572570801, 0.02815238380432129, 0.02811849594116211, 0.02810483169555664, 0.02818611145019531, 0.028086816787719727, 0.028037023544311524, 0.028151487350463866, 0.028164255142211915, 0.028123584747314453, 0.028116832733154296, 0.028313951492309572, 0.028484640121459962, 0.028543424606323243, 0.028747808456420897, 0.028541248321533205, 0.02845100784301758, 0.028383232116699218, 0.028341728210449217, 0.0282541446685791, 0.02825481605529785, 0.028291072845458985, 0.02814771270751953, 0.028219392776489258, 0.02824336051940918, 0.028150175094604494, 0.028120832443237306, 0.02816659164428711, 0.028196416854858398, 0.02825222396850586, 0.028193151473999024, 0.028167903900146483, 0.02812668800354004, 0.028136255264282227, 0.028073087692260742, 0.02804128074645996, 0.028242015838623048, 0.028213951110839845, 0.028193952560424805, 0.028251007080078126, 0.028239871978759764, 0.02819071960449219, 0.031158464431762695, 0.02950556755065918, 0.028646623611450196, 0.028467967987060548, 0.02813257598876953, 0.028119647979736328, 0.028106847763061524, 0.028144800186157226, 0.028191360473632812, 0.028089887619018556, 0.028047807693481447, 0.028073696136474608, 0.028183168411254882, 0.028225439071655273, 0.028185728073120118, 0.028238176345825195, 0.02822003173828125, 0.028250112533569335, 0.028243967056274414, 0.028239551544189452, 0.028201280593872072, 0.028165599822998048, 0.028201503753662108, 0.02818832015991211, 0.02812758445739746, 0.028254079818725585, 0.028223775863647462, 0.028176223754882813, 0.02818252754211426, 0.02814566421508789, 0.02818662452697754, 0.02811903953552246, 0.028434207916259766, 0.028524415969848633, 0.028494176864624025, 0.02855891227722168, 0.028463552474975586, 0.028441728591918944, 0.028345216751098634, 0.028263551712036133, 0.028297279357910158, 0.028310335159301758, 0.028276063919067382, 0.028305503845214845, 0.028203039169311522, 0.02821116828918457, 0.02823628807067871, 0.028141279220581055, 0.028178783416748048, 0.02833523178100586, 0.028191295623779297, 0.02806959915161133, 0.028246623992919922, 0.028243263244628905, 0.02829587173461914, 0.02816409683227539, 0.028223424911499023, 0.028315711975097656, 0.028310848236083985, 0.02832054328918457, 0.02829302406311035, 0.02826803207397461, 0.028307775497436523, 0.03097494316101074, 0.02935807991027832, 0.028685632705688476, 0.028316352844238283, 0.028077119827270507, 0.027946943283081054, 0.028030975341796875, 0.02794304084777832, 0.028058496475219727, 0.028065792083740236, 0.028185663223266603, 0.028095487594604493, 0.028022655487060545, 0.028184640884399415, 0.028117088317871092, 0.02798076820373535, 0.027982080459594726, 0.0281824951171875, 0.028068000793457032, 0.028090944290161134, 0.028026880264282225, 0.02818252754211426, 0.028301311492919923, 0.028108287811279296, 0.028176319122314452, 0.02815376091003418, 0.028185247421264648, 0.028078079223632812, 0.02820217514038086, 0.028202816009521483, 0.028082784652709962, 0.028219871520996094, 0.02827667236328125, 0.028464384078979492, 0.028680959701538087, 0.028491775512695314, 0.028569343566894532, 0.028451072692871095, 0.028325887680053712, 0.02829516792297363, 0.028237855911254883, 0.02816771125793457, 0.028133119583129883, 0.028223743438720705, 0.028268991470336916, 0.028243967056274414, 0.028215551376342775, 0.028277599334716796, 0.02830019187927246, 0.028268287658691407, 0.028221696853637696, 0.02830335998535156, 0.028233184814453124, 0.028303903579711916, 0.028241567611694336, 0.02817840003967285, 0.02821164894104004, 0.02822662353515625, 0.02833020782470703, 0.028266496658325195, 0.02828678321838379, 0.028285024642944336, 0.028256223678588866, 0.03097596740722656, 0.029414976119995117, 0.028522655487060546, 0.028533023834228517, 0.02813724708557129, 0.02813577651977539, 0.028090240478515625, 0.02791334342956543, 0.028062047958374022, 0.02798441505432129, 0.02807526397705078, 0.028005119323730468, 0.02812451171875, 0.02822006416320801, 0.02824982452392578, 0.028129568099975587, 0.028217599868774413, 0.028220224380493163, 0.028277280807495118, 0.02815827178955078, 0.028117216110229493, 0.02818035125732422, 0.028192768096923827, 0.02813747215270996, 0.028205055236816406, 0.02816204833984375, 0.028184064865112303, 0.028197376251220704, 0.028188159942626953, 0.02813577651977539, 0.02813871955871582, 0.028160543441772462, 0.028230047225952147, 0.02839344024658203, 0.028505760192871092, 0.02848188781738281, 0.02843440055847168, 0.028282943725585936, 0.028216896057128907, 0.02825619125366211, 0.028241600036621094, 0.02817292785644531, 0.028261951446533203, 0.028229280471801756, 0.028241888046264648, 0.028304224014282228, 0.02807414436340332, 0.02817555236816406, 0.028116863250732423, 0.028156959533691406, 0.028266399383544923, 0.028196863174438477, 0.028250272750854493, 0.02823151969909668, 0.028221439361572266, 0.02813488006591797, 0.028176511764526367, 0.02823580741882324, 0.028191104888916015, 0.02815795135498047, 0.028116735458374023, 0.028186399459838866, 0.02811907196044922, 0.030925920486450195, 0.029234079360961913, 0.0285100154876709, 0.028225536346435546, 0.028153087615966795, 0.028179328918457033, 0.028075519561767577, 0.028041120529174804, 0.028097183227539062, 0.028063743591308594, 0.028231679916381838, 0.028188671112060547, 0.028194143295288087, 0.02812995147705078, 0.02815180778503418, 0.02812313652038574, 0.028207103729248048, 0.028248064041137694, 0.028196640014648437, 0.028145248413085938, 0.028090463638305665, 0.028254751205444337, 0.02820623970031738, 0.028148576736450194, 0.028092416763305664, 0.02816204833984375, 0.028217151641845704, 0.02813100814819336, 0.028121023178100585, 0.02823356819152832, 0.028167072296142577, 0.028179616928100587, 0.02837161636352539, 0.028460544586181642, 0.02852300834655762, 0.028487680435180664, 0.028493759155273437, 0.02843984031677246, 0.028427040100097656, 0.02836275291442871, 0.028276128768920897, 0.028293184280395508, 0.028289472579956055, 0.02823103904724121, 0.028215583801269532, 0.028213024139404297, 0.028239967346191407, 0.028265216827392577, 0.028138784408569335, 0.028251775741577147, 0.028247968673706055, 0.028251136779785156, 0.028218624114990234, 0.028240640640258788, 0.02830544090270996, 0.028173856735229492, 0.02831590461730957, 0.028294464111328126, 0.028297887802124024, 0.028354944229125975, 0.028186527252197266, 0.02825004768371582, 0.028243904113769532, 0.03106732749938965, 0.029455167770385742, 0.028723392486572265, 0.02827449607849121, 0.028173919677734374, 0.028058015823364257, 0.028018592834472656, 0.02793654441833496, 0.02804972839355469, 0.028044544219970702, 0.02796771240234375, 0.028016639709472657, 0.028142112731933594, 0.028203327178955077, 0.02795644760131836, 0.027984352111816407, 0.02795724868774414, 0.028022783279418945, 0.028022335052490233, 0.028051904678344727, 0.028048799514770507, 0.028143455505371093, 0.028170528411865233, 0.028219551086425782, 0.028098880767822267, 0.028327680587768553, 0.028263904571533202, 0.028207456588745117, 0.02825657653808594, 0.028190528869628906, 0.02821561622619629, 0.02818662452697754, 0.02839567947387695, 0.028497215270996093, 0.028543231964111328, 0.028536479949951173, 0.028541568756103516, 0.02854707145690918, 0.028389280319213867, 0.02836479949951172, 0.02832329559326172, 0.02830384063720703, 0.028285375595092775, 0.028220191955566406, 0.028214208602905272, 0.028213247299194336, 0.028203231811523437, 0.02822915267944336, 0.028319007873535158, 0.028236223220825196, 0.028236095428466796, 0.028184799194335936, 0.02822310447692871, 0.02826892852783203, 0.028380512237548828, 0.02821993637084961, 0.028245471954345704, 0.028289632797241213, 0.028223552703857423, 0.028220895767211915, 0.02818239974975586, 0.028117536544799804, 0.028296768188476564, 0.030983871459960937, 0.0293885440826416, 0.028654144287109374, 0.028362272262573242, 0.028186943054199217, 0.028092544555664064, 0.02806172752380371, 0.02799407958984375, 0.0279836483001709, 0.028025087356567384, 0.028008447647094727, 0.027989023208618163, 0.028122079849243163, 0.02802895927429199, 0.028035039901733397, 0.028069759368896486, 0.02812940788269043, 0.02817024040222168, 0.02809836769104004, 0.02811836814880371, 0.028281312942504883, 0.02831920051574707, 0.028240352630615233, 0.028310976028442382, 0.02819756889343262, 0.028235456466674805, 0.028228223800659178, 0.028207103729248048, 0.02818217658996582, 0.028169984817504882, 0.028201568603515626, 0.02817228889465332, 0.028266496658325195, 0.028280351638793947, 0.02844905662536621, 0.02854265594482422, 0.02864899253845215, 0.02861555290222168, 0.028518495559692384, 0.028436128616333007, 0.028406112670898438, 0.028370943069458008, 0.02836070442199707, 0.02832115173339844, 0.028289663314819337, 0.02836400032043457, 0.02835536003112793, 0.028264448165893553, 0.028194559097290038, 0.0281944637298584, 0.02819526481628418, 0.028188831329345704, 0.028268543243408203, 0.028252159118652344, 0.028335615158081053, 0.028225439071655273, 0.028340063095092773, 0.028248832702636718, 0.028241119384765624, 0.02821567916870117, 0.028346784591674806, 0.028349920272827147, 0.028291616439819336, 0.031113216400146484, 0.02963046455383301, 0.028678144454956055, 0.028350303649902344, 0.028203168869018556, 0.028186399459838866, 0.028225759506225585, 0.028229440689086914, 0.02831100845336914, 0.02814963150024414, 0.028185440063476563, 0.028161312103271486, 0.028280927658081056, 0.028160383224487304, 0.028228927612304687, 0.028176319122314452, 0.028228607177734375, 0.02818604850769043, 0.02817286491394043, 0.028230688095092774, 0.028168895721435546, 0.028166431427001953, 0.028239871978759764, 0.028216991424560547, 0.02820470428466797, 0.0282303352355957, 0.02828886413574219, 0.028301088333129883, 0.02829350471496582, 0.028227584838867188, 0.028211200714111328, 0.028255584716796876, 0.028478111267089844, 0.028446367263793945, 0.02856380844116211, 0.02857779121398926, 0.028538591384887697, 0.028471263885498047, 0.028344736099243165, 0.028271936416625978, 0.0283505916595459, 0.02829155158996582, 0.02834022331237793, 0.028284927368164063, 0.028260288238525392, 0.028307519912719726, 0.028286272048950196, 0.028301311492919923, 0.02832863998413086, 0.028317695617675782, 0.028245664596557616, 0.02830953598022461, 0.028231552124023437, 0.02821798324584961, 0.02824998474121094, 0.028202943801879883, 0.028217344284057616, 0.028223072052001953, 0.028312223434448242, 0.028339967727661133, 0.028293119430541993, 0.02836390495300293, 0.02833443260192871, 0.030973920822143553, 0.029417375564575195, 0.030160287857055663, 0.028219295501708985, 0.028242719650268554, 0.02815724754333496, 0.02811359977722168, 0.028176031112670898, 0.02815216064453125, 0.028266496658325195, 0.02820854377746582, 0.028351360321044922, 0.02818992042541504, 0.02817692756652832, 0.028129247665405272, 0.028340351104736327, 0.028180320739746093, 0.02812112045288086, 0.02812723159790039, 0.028073183059692385, 0.02819152069091797, 0.028036863327026366, 0.028061952590942383, 0.027999423980712892, 0.028113279342651367, 0.02823574447631836, 0.028237600326538086, 0.028156160354614258, 0.02810665512084961, 0.02819945526123047, 0.028229631423950196, 0.028081600189208984, 0.02839344024658203, 0.028404319763183594, 0.02855936050415039, 0.02855471992492676, 0.028583904266357422, 0.028453439712524415, 0.02837708854675293, 0.028338176727294922, 0.028291072845458985, 0.028258304595947265, 0.028298879623413088, 0.028238208770751953, 0.02822707176208496, 0.028166656494140626, 0.02835660743713379, 0.028210912704467773, 0.028141855239868164, 0.028180479049682617, 0.02814361572265625, 0.028062816619873046, 0.02810563278198242, 0.028085983276367188, 0.028012447357177735, 0.028156288146972658, 0.02819891166687012, 0.028246015548706056, 0.028106752395629882, 0.02809782409667969, 0.02806985664367676, 0.028106847763061524, 0.028093088150024415, 0.030982271194458007, 0.029081823348999024, 0.02852249526977539, 0.02823366355895996, 0.028166208267211914, 0.028106367111206055, 0.027926111221313478, 0.027988319396972657, 0.028348320007324217, 0.02811087989807129, 0.028252416610717774, 0.02817430305480957, 0.028153440475463868, 0.028117023468017577, 0.028149887084960936, 0.028209695816040038, 0.028196863174438477, 0.028177696228027342, 0.02825494384765625, 0.028211200714111328, 0.028229408264160157, 0.02822166442871094, 0.028194368362426756, 0.02824991989135742, 0.028236415863037108, 0.028280832290649413, 0.028194719314575196, 0.028210304260253907, 0.028154144287109373, 0.028209856033325195, 0.0281046085357666, 0.02894553565979004, 0.02820400047302246, 0.028470272064208983, 0.02861568069458008, 0.02853887939453125, 0.028521728515625, 0.028393983840942383, 0.028398080825805663, 0.02838297653198242, 0.0284628791809082, 0.028344032287597656, 0.028320192337036133, 0.028325952529907227, 0.028276512145996094, 0.028235519409179687, 0.02827107238769531, 0.028286495208740235, 0.02817084884643555, 0.02817420768737793, 0.0282903995513916, 0.028130016326904296, 0.028150751113891603, 0.028223648071289062, 0.02811292839050293, 0.028096479415893556, 0.028099231719970703, 0.028099872589111327, 0.028217727661132813, 0.028150272369384766, 0.028057600021362306, 0.02821084785461426, 0.028217695236206056]",tokens/s,35.340272139580364,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.73312,11766.9888,0.0,11364.466688,11346.760704,s,1,7.33246044921875,7.33246044921875,0.0,7.33246044921875,7.33246044921875,7.33246044921875,7.33246044921875,[7.33246044921875],,kWh,7.47033819588978e-06,8.1654833622551e-07,4.095558832017954e-06,1.2382445364133243e-05,,MB,1198.125056,12209.487872,0.0,11796.48,11746.412544,s,10,3.7185172729492186,0.3718517272949219,0.006461473483069043,0.3737565765380859,0.37697884826660155,0.3773531936645508,0.37765266998291014,"[0.3538907470703125, 0.3697769775390625, 0.3777275390625, 0.3751359558105469, 0.37063922119140624, 0.37192230224609374, 0.37301174926757813, 0.37450140380859376, 0.37689566040039063, 0.37501571655273436]",tokens/s,688.446445744118,kWh,1.0391857087498287e-05,1.1456491565947674e-06,6.935359954414993e-06,1.8472866198508047e-05,tokens/kWh,13858163.494990058,MB,1203.666944,12314.345472,0.0,11901.3376,11861.462528,s,10,29.90655712890625,2.990655712890625,0.001602449976219176,2.990564697265625,2.992509326171875,2.992895166015625,2.9932038378906247,"[2.99119287109375, 2.990176513671875, 2.990952880859375, 2.987164306640625, 2.99007763671875, 2.993281005859375, 2.991672607421875, 2.98997900390625, 2.98963671875, 2.992423583984375]",tokens/s,21.065614382976637,kWh,8.764680865500169e-05,9.667995484126776e-06,5.805966426378293e-05,0.0001553744684029114,tokens/kWh,405472.0228334471,,s,630,29.903308322906486,0.04746556876651825,0.00030393862297417704,0.047423313140869144,0.04770383567810059,0.04776728515625,0.04938013999938965,"[0.049531265258789064, 0.04775443267822266, 0.047350208282470704, 0.047147422790527346, 0.04717977523803711, 0.04716953659057617, 0.04721254348754883, 0.04719363021850586, 0.047161632537841794, 0.047263553619384766, 0.04717977523803711, 0.047390945434570314, 0.047201919555664065, 0.04719260787963867, 0.04730844879150391, 0.047287841796875, 0.04732320022583008, 0.04751436614990234, 0.04731289672851562, 0.04763852691650391, 0.04771811294555664, 0.04825526428222656, 0.04760166549682617, 0.047574718475341796, 0.047491710662841795, 0.04736991882324219, 0.04746851348876953, 0.04740099334716797, 0.047401119232177734, 0.04734672164916992, 0.0473526382446289, 0.04742115020751953, 0.04736438369750977, 0.04755267333984375, 0.04742947387695313, 0.04744192123413086, 0.04736745452880859, 0.047446849822998044, 0.04742339324951172, 0.04753830337524414, 0.04764044952392578, 0.047640544891357425, 0.047601696014404296, 0.047798431396484375, 0.04751103973388672, 0.04741360092163086, 0.047405055999755856, 0.04734531021118164, 0.04738896179199219, 0.047403072357177736, 0.0474337272644043, 0.047425376892089845, 0.04735919952392578, 0.04735644912719727, 0.0473392333984375, 0.04746105575561523, 0.04759545516967773, 0.047470718383789065, 0.0475032958984375, 0.047591487884521486, 0.04763616180419922, 0.047626495361328125, 0.04767129516601563, 0.04958003234863281, 0.04782489776611328, 0.04731216049194336, 0.04717129516601563, 0.04710297775268555, 0.04718796920776367, 0.04716339111328125, 0.047228160858154296, 0.04717974472045899, 0.04716374588012695, 0.04717536163330078, 0.047182334899902346, 0.04717388916015625, 0.04715724945068359, 0.04720956802368164, 0.0472540168762207, 0.047341983795166014, 0.04735311889648437, 0.0472880630493164, 0.047532352447509765, 0.047655582427978516, 0.047656097412109376, 0.0475880012512207, 0.04742982482910156, 0.04740691375732422, 0.04736428833007812, 0.04734975814819336, 0.04735385513305664, 0.047359840393066406, 0.04747280120849609, 0.04750956726074219, 0.047396640777587894, 0.04734787368774414, 0.047398494720458983, 0.047428001403808595, 0.04737571334838867, 0.047334049224853514, 0.04729651260375976, 0.04754025650024414, 0.047564769744873045, 0.04761190414428711, 0.04762758255004883, 0.04761260986328125, 0.04759756851196289, 0.0475906867980957, 0.04751228713989258, 0.04744499206542969, 0.047424510955810545, 0.04733542251586914, 0.04749929428100586, 0.04742569732666015, 0.047451358795166015, 0.0474159049987793, 0.04742086410522461, 0.04749574279785156, 0.047490432739257814, 0.04746623992919922, 0.047522014617919925, 0.04756086349487305, 0.04762060928344727, 0.047720447540283206, 0.04780646514892578, 0.047742977142333984, 0.049427967071533206, 0.047829185485839844, 0.047368160247802736, 0.04734159851074219, 0.04732345581054687, 0.04732723236083984, 0.04720374298095703, 0.047328960418701174, 0.047227809906005856, 0.04731014251708984, 0.047291072845458984, 0.047241214752197266, 0.04722073745727539, 0.047288318634033204, 0.047210369110107425, 0.04730233764648437, 0.04738851165771484, 0.047347934722900394, 0.04742921447753906, 0.04755852890014649, 0.04764057540893555, 0.04767635345458984, 0.04753609466552734, 0.047497215270996096, 0.04742342376708984, 0.04735136032104492, 0.047299072265625, 0.04731679916381836, 0.04746054458618164, 0.04749052810668945, 0.04742367935180664, 0.04742998504638672, 0.0475074577331543, 0.047989856719970705, 0.047370784759521486, 0.04733116912841797, 0.047370784759521486, 0.047326335906982424, 0.047436511993408204, 0.047531391143798826, 0.04763692855834961, 0.04762249755859375, 0.04758937454223633, 0.04755244827270508, 0.047513729095458986, 0.047496479034423826, 0.04743228912353516, 0.04745529556274414, 0.047348064422607423, 0.04734838485717773, 0.04733468627929688, 0.047411937713623044, 0.04739219284057617, 0.04738515090942383, 0.047493057250976564, 0.047403072357177736, 0.04739686584472656, 0.04739891052246094, 0.047486976623535154, 0.04753561782836914, 0.04763052749633789, 0.04770358276367188, 0.04770896148681641, 0.049199264526367185, 0.04776959991455078, 0.04727132797241211, 0.04711475372314453, 0.047143009185791014, 0.04722415924072266, 0.04713081741333008, 0.047131103515625, 0.047124481201171874, 0.04715126419067383, 0.047185630798339845, 0.04714918518066406, 0.04712870407104492, 0.04716259384155273, 0.047157375335693356, 0.047153919219970704, 0.04714201736450195, 0.04725113677978516, 0.04730569458007813, 0.04732928085327148, 0.047546367645263675, 0.047626014709472655, 0.04755068969726563, 0.04744025421142578, 0.04745107269287109, 0.04742009735107422, 0.0473372802734375, 0.047376575469970705, 0.047382366180419924, 0.0473043212890625, 0.04732691192626953, 0.047290401458740236, 0.04728224182128906, 0.04731776046752929, 0.04736163330078125, 0.04732771301269531, 0.04730054473876953, 0.047301631927490234, 0.04730752182006836, 0.04735939025878906, 0.047532608032226566, 0.047638145446777344, 0.0476635856628418, 0.04757737731933594, 0.047504447937011716, 0.04749808120727539, 0.047446014404296875, 0.04743167877197266, 0.04742300796508789, 0.0474090576171875, 0.04746406555175781, 0.04742444610595703, 0.04741939163208008, 0.04746640014648437, 0.047548511505126956, 0.047449695587158204, 0.047421344757080076, 0.04757279968261719, 0.047530689239501954, 0.047566913604736326, 0.047550399780273436, 0.04770611190795898, 0.047786113739013675, 0.049470592498779296, 0.047790687561035154, 0.047207839965820314, 0.04715599822998047, 0.047086624145507815, 0.04719724655151367, 0.04718582534790039, 0.04713894271850586, 0.04713049697875977, 0.04717145538330078, 0.04721881484985352, 0.0471770248413086, 0.04715302276611328, 0.047219009399414064, 0.04715327835083008, 0.04725139236450195, 0.047255199432373045, 0.04725360107421875, 0.04736070251464844, 0.04745011138916016, 0.04766707229614258, 0.04762022399902344, 0.04746649551391602, 0.047423103332519534, 0.04735628890991211, 0.04731289672851562, 0.04724697494506836, 0.047271839141845705, 0.04728675079345703, 0.04734790420532226, 0.04732294464111328, 0.04723932647705078, 0.047293697357177734, 0.047313503265380856, 0.04726169586181641, 0.04735212707519531, 0.04732691192626953, 0.04741923141479492, 0.04747689437866211, 0.04758512115478516, 0.04763251113891601, 0.047666305541992186, 0.04764089584350586, 0.047686241149902345, 0.04755830383300781, 0.04751139068603515, 0.047452320098876954, 0.047472991943359376, 0.04748492813110351, 0.04759542465209961, 0.047508705139160154, 0.04761385726928711, 0.04755984115600586, 0.04751043319702149, 0.04749404907226563, 0.04757884979248047, 0.047542560577392576, 0.04775052642822265, 0.04767750549316406, 0.04771206283569336, 0.04784000015258789, 0.0478105583190918, 0.04774121475219727, 0.0494694709777832, 0.04780550384521484, 0.04737891387939453, 0.04730633544921875, 0.04725040054321289, 0.04723862457275391, 0.04722524642944336, 0.04727155303955078, 0.04732976150512695, 0.04728188705444336, 0.04728582382202148, 0.04721939086914063, 0.04724319839477539, 0.047265792846679686, 0.04719001770019531, 0.04729212951660156, 0.0473744010925293, 0.04743529510498047, 0.0475079345703125, 0.047605152130126956, 0.04771923065185547, 0.04763199996948242, 0.04751398468017578, 0.04745759963989258, 0.04741190338134765, 0.047355903625488284, 0.047437824249267575, 0.04738671875, 0.04732844924926758, 0.047263519287109375, 0.04728041458129883, 0.04739920043945312, 0.0474156494140625, 0.04746630477905273, 0.04742780685424805, 0.0474071044921875, 0.0474337272644043, 0.047433120727539066, 0.04755516815185547, 0.04757078552246094, 0.04772614288330078, 0.047725151062011716, 0.047805919647216796, 0.04776547241210938, 0.04769635009765625, 0.04753529739379883, 0.04749203109741211, 0.0474477424621582, 0.047419105529785156, 0.04740972900390625, 0.04737014389038086, 0.047502880096435544, 0.047505985260009764, 0.04741129684448242, 0.04745820617675781, 0.04753408050537109, 0.047521984100341794, 0.04763833618164062, 0.04774911880493164, 0.04784854507446289, 0.04791183853149414, 0.04778131103515625, 0.04781523132324219, 0.04939164733886719, 0.04778745651245117, 0.04736684799194336, 0.04717660903930664, 0.04723545455932617, 0.04730223846435547, 0.04720742416381836, 0.04719206237792969, 0.04725113677978516, 0.04730707168579101, 0.04731084823608398, 0.047222782135009765, 0.04721603012084961, 0.04727264022827148, 0.04721859359741211, 0.04725468826293945, 0.04733990478515625, 0.047430110931396485, 0.04744192123413086, 0.04769331359863281, 0.04764313507080078, 0.0476135025024414, 0.047760894775390625, 0.04749817657470703, 0.0474600944519043, 0.04744182586669922, 0.04733996963500976, 0.047346656799316406, 0.047426174163818356, 0.04752742385864258, 0.047489856719970705, 0.04741939163208008, 0.04749311828613281, 0.04749926376342774, 0.0474458236694336, 0.04743596649169922, 0.0474582405090332, 0.04738044738769531, 0.04741334533691406, 0.04760723114013672, 0.04772220611572266, 0.047747230529785155, 0.04770681762695313, 0.04761740875244141, 0.047538047790527345, 0.04741401672363281, 0.04739276885986328, 0.0473642578125, 0.047374176025390624, 0.0474152946472168, 0.04741939163208008, 0.047382240295410154, 0.04746211242675781, 0.04740310287475586, 0.047456798553466795, 0.047454113006591796, 0.04744809722900391, 0.04747398376464844, 0.04752249526977539, 0.04752572631835938, 0.04775337600708008, 0.04774195098876953, 0.04771670532226562, 0.049351360321044924, 0.04777584075927734, 0.04735622406005859, 0.0471971206665039, 0.047207008361816405, 0.04724150466918945, 0.04719353485107422, 0.0472295036315918, 0.047163326263427736, 0.047175167083740234, 0.04719619369506836, 0.04715385437011719, 0.047172576904296874, 0.04718403244018555, 0.04716348648071289, 0.04721430587768555, 0.04729539108276367, 0.04731401443481445, 0.04737731170654297, 0.04750688171386719, 0.047661632537841794, 0.04766259384155273, 0.04751001739501953, 0.04746031951904297, 0.04736412811279297, 0.04732096099853516, 0.0472597770690918, 0.04728998565673828, 0.04728976058959961, 0.047311840057373045, 0.04730886459350586, 0.047343807220458986, 0.0473675537109375, 0.04736431884765625, 0.04733139038085937, 0.04740496063232422, 0.04740633773803711, 0.04733638381958008, 0.04734479904174805, 0.04758000183105469, 0.04774092864990234, 0.0476952018737793, 0.047653472900390625, 0.04763040161132812, 0.04759142303466797, 0.04765695953369141, 0.04756480026245117, 0.04756684875488281, 0.04764057540893555, 0.04748313522338867, 0.04740582275390625, 0.04744499206542969, 0.047572864532470706, 0.047575424194335934, 0.04745366287231445, 0.04746396636962891, 0.04746895980834961, 0.04749331283569336, 0.04756880187988281, 0.04752336120605469, 0.04765670394897461, 0.04773775863647461, 0.047678657531738285, 0.049426433563232425, 0.04777369689941406, 0.04732928085327148, 0.047289665222167966, 0.04718457412719727, 0.04719411087036133, 0.04714086532592773, 0.04722185516357422, 0.04720937728881836, 0.04713676834106445, 0.04722915267944336, 0.04722665786743164, 0.04718380737304687, 0.04727199935913086, 0.047203456878662106, 0.04725849533081055, 0.047288318634033204, 0.047271873474121096, 0.04731676864624024, 0.04752412796020508, 0.04757708740234375, 0.04761539077758789, 0.047523902893066405, 0.047458847045898436, 0.047388671875, 0.04736000061035156, 0.047265792846679686, 0.047265792846679686, 0.047288318634033204, 0.04740079879760742, 0.0473540153503418, 0.04731628799438477, 0.047298465728759766, 0.04730550384521484, 0.047355903625488284, 0.047355903625488284, 0.04733932876586914, 0.047360191345214846, 0.04743999862670899, 0.04747865676879883, 0.04760185623168945, 0.04757897567749023, 0.04756467056274414, 0.04754431915283203, 0.04752105712890625, 0.04757177734375, 0.04748287963867188, 0.04743932723999023, 0.04741308975219727, 0.04740371322631836, 0.047384449005126957, 0.047392833709716795, 0.047450080871582034, 0.047619487762451174, 0.047448768615722656, 0.047562496185302734, 0.047572959899902345, 0.04765315246582031, 0.04773654556274414, 0.04770624160766602, 0.04771619033813477, 0.04775968170166016, 0.047768768310546876, 0.04935196685791016, 0.04778672027587891, 0.047314945220947265, 0.04724256134033203, 0.04726240158081055, 0.0472270393371582, 0.047267585754394534, 0.047257694244384765, 0.04719820785522461, 0.04719206237792969, 0.04718153762817383, 0.04720025634765625, 0.04719030380249024, 0.04721881484985352, 0.047236129760742186, 0.04722927856445312, 0.047311359405517575, 0.04729849624633789, 0.04730284881591797, 0.04753190231323242, 0.0477591667175293, 0.04764486312866211, 0.04760099029541016, 0.04754499053955078, 0.047423233032226564, 0.04739846420288086, 0.047293121337890626, 0.047290367126464845, 0.04739686584472656, 0.04751564788818359, 0.04740496063232422, 0.04740310287475586, 0.047366207122802734, 0.047432865142822266, 0.0474119987487793, 0.04758428955078125, 0.047510494232177736, 0.04750147247314453, 0.04761993789672852, 0.04757881546020508, 0.047618305206298825, 0.04770169448852539, 0.04770159912109375, 0.0476927375793457, 0.04759024047851562, 0.047657985687255856, 0.04753142547607422, 0.04764937591552734, 0.04758323287963867, 0.04752195358276367, 0.047666046142578126, 0.04749587249755859, 0.04741059112548828, 0.04746035385131836, 0.047467391967773435, 0.04744192123413086, 0.047480831146240236, 0.04749059295654297, 0.04771478271484375, 0.04771772766113281, 0.04770684814453125, 0.047681472778320313, 0.04765695953369141]",tokens/s,21.067903029224635,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 289, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 106.12 MiB is free. Process 169788 has 14.63 GiB memory in use. Of the allocated memory 14.52 GiB is allocated by PyTorch, and 1.52 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 272, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 232.12 MiB is free. Process 84440 has 14.51 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 1.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 536.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 140.12 MiB is free. Process 72412 has 14.60 GiB memory in use. Of the allocated memory 14.49 GiB is allocated by PyTorch, and 1.57 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 289, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 150.12 MiB is free. Process 151026 has 14.59 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.43 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 1248, in __init__ self.transformer = FalconModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 973, in __init__ self.h = nn.ModuleList([FalconDecoderLayer(config, layer_idx=i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 973, in self.h = nn.ModuleList([FalconDecoderLayer(config, layer_idx=i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 722, in __init__ self.mlp = FalconMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 697, in __init__ self.dense_h_to_4h = FalconLinear(hidden_size, config.ffn_hidden_size, bias=config.bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 3.29 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.05 GiB is free. Process 203358 has 13.69 GiB memory in use. Of the allocated memory 13.57 GiB is allocated by PyTorch, and 1.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 512, in __init__ self.mlp = MistralMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 152, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 170.12 MiB is free. Process 177265 has 14.57 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 1195, in __init__ self.model = MixtralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in __init__ [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 757, in __init__ self.block_sparse_moe = MixtralSparseMoeBlock(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in __init__ self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 672, in __init__ self.w3 = nn.Linear(self.hidden_dim, self.ffn_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 179726 has 14.74 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 9.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1262, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1030, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1030, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 802, in __init__ self.mlp = Qwen2MoeSparseMoeBlock(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 740, in __init__ [Qwen2MoeMLP(config, intermediate_size=config.moe_intermediate_size) for _ in range(self.num_experts)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 740, in [Qwen2MoeMLP(config, intermediate_size=config.moe_intermediate_size) for _ in range(self.num_experts)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 349, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 12.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 81424 has 14.73 GiB memory in use. Of the allocated memory 14.62 GiB is allocated by PyTorch, and 948.00 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.878144,6174.998528,0.0,5779.750912,5773.960192,s,1,7.529166015625,7.529166015625,0.0,7.529166015625,7.529166015625,7.529166015625,7.529166015625,[7.529166015625],,kWh,9.614256604165423e-06,1.0533241153822993e-06,3.4061138360005905e-06,1.4073694555548313e-05,,MB,1105.555456,6491.66848,0.0,6085.935104,6038.345728,s,10,2.1597591094970703,0.21597591094970703,0.0036829909604721837,0.21662503814697265,0.21944928131103517,0.21986086349487305,0.22019012924194337,"[0.2079114227294922, 0.22027244567871093, 0.21362310791015626, 0.21935781860351564, 0.21618031311035157, 0.21913043212890626, 0.21372621154785157, 0.21706976318359375, 0.2131793975830078, 0.21930819702148438]",tokens/s,1185.3173757864743,kWh,6.226532404787322e-06,6.86674918086594e-07,4.1377515371491e-06,1.1050958860023016e-05,tokens/kWh,23165410.643784337,MB,1110.44608,6512.64,0.0,6106.906624,6086.544896,s,10,16.33959387207031,1.633959387207031,0.0051381663713000875,1.6338790893554687,1.6407128662109374,1.640982470703125,1.641198154296875,"[1.63083251953125, 1.6313094482421875, 1.62900927734375, 1.6296138916015626, 1.636846923828125, 1.6254072265625, 1.63644873046875, 1.6412520751953126, 1.6406529541015624, 1.6382208251953125]",tokens/s,38.556649873463215,kWh,4.7937475727713365e-05,5.287167085089536e-06,3.1846027249850915e-05,8.50706700626538e-05,tokens/kWh,740560.7591147579,,s,630,16.336016490936288,0.025930184906248065,0.0004121603329365757,0.02585750389099121,0.02618778533935547,0.026375424003601076,0.02839885004043579,"[0.028554208755493166, 0.02700819206237793, 0.02650809669494629, 0.025849376678466797, 0.026167327880859376, 0.02565996742248535, 0.025659263610839842, 0.02553856086730957, 0.02552182388305664, 0.025471328735351562, 0.02551807975769043, 0.02555084800720215, 0.025553983688354494, 0.025490367889404297, 0.025875808715820313, 0.025686559677124025, 0.025657472610473634, 0.02591744041442871, 0.025706144332885743, 0.02560047912597656, 0.025612159729003905, 0.025671680450439452, 0.025697887420654295, 0.025631135940551757, 0.02568191909790039, 0.025675519943237305, 0.025996992111206055, 0.025712608337402344, 0.025807455062866212, 0.025778175354003906, 0.02591139221191406, 0.02567977523803711, 0.025651199340820312, 0.025674816131591796, 0.025709503173828124, 0.025656671524047853, 0.025999839782714845, 0.026155231475830078, 0.026337087631225584, 0.02622480010986328, 0.026170560836791992, 0.02613667106628418, 0.025944543838500977, 0.025921152114868163, 0.025809535980224608, 0.025774080276489256, 0.0258121280670166, 0.02577008056640625, 0.026023712158203125, 0.025981632232666016, 0.025785856246948242, 0.025925600051879882, 0.025856447219848634, 0.025973119735717774, 0.02585740852355957, 0.02588035202026367, 0.025958560943603517, 0.025852319717407226, 0.02582044792175293, 0.025735488891601564, 0.025868288040161135, 0.02577631950378418, 0.026087392807006837, 0.028281280517578125, 0.02699679946899414, 0.026230783462524415, 0.025883647918701173, 0.026042848587036132, 0.025707040786743164, 0.025827007293701174, 0.025631040573120118, 0.025598016738891602, 0.025558464050292967, 0.025530879974365234, 0.025577472686767577, 0.025629888534545897, 0.025606048583984374, 0.025574304580688476, 0.025511072158813475, 0.02555788803100586, 0.02591542434692383, 0.02561836814880371, 0.025786304473876955, 0.025772096633911133, 0.02561193656921387, 0.025598304748535156, 0.02568560028076172, 0.025692575454711913, 0.025655616760253908, 0.025673023223876955, 0.025727359771728516, 0.02566774368286133, 0.02599510383605957, 0.025839616775512695, 0.02592086410522461, 0.025905759811401367, 0.025720767974853516, 0.025736640930175782, 0.025845855712890626, 0.026042816162109374, 0.026171552658081056, 0.026214527130126952, 0.026075008392333985, 0.0261529598236084, 0.026364927291870118, 0.026214527130126952, 0.02597318458557129, 0.025978912353515626, 0.025825696945190428, 0.025835647583007812, 0.025876352310180664, 0.025810943603515626, 0.025823232650756835, 0.025884096145629882, 0.025770559310913085, 0.02588387107849121, 0.026081151962280273, 0.025921472549438475, 0.025901952743530274, 0.0259400634765625, 0.02574336051940918, 0.025851743698120117, 0.02586025619506836, 0.025772031784057618, 0.025788415908813478, 0.02607251167297363, 0.02840150451660156, 0.026957984924316405, 0.0263372802734375, 0.0258306884765625, 0.025880319595336914, 0.026266143798828124, 0.02564358329772949, 0.025802112579345702, 0.025628480911254883, 0.025569280624389647, 0.02564908790588379, 0.02578505516052246, 0.02563484764099121, 0.025533567428588866, 0.025705343246459962, 0.025609695434570312, 0.025878591537475584, 0.025913408279418945, 0.025803167343139647, 0.0257860164642334, 0.02572118377685547, 0.025740991592407225, 0.025637184143066406, 0.025599264144897462, 0.02575200080871582, 0.025769535064697265, 0.025713184356689452, 0.025652671813964845, 0.0259520320892334, 0.02580886459350586, 0.02580534362792969, 0.02564143943786621, 0.025701759338378906, 0.025709184646606445, 0.025677824020385744, 0.02571392059326172, 0.025899776458740233, 0.025980031967163086, 0.026053279876708985, 0.026046688079833985, 0.026025215148925782, 0.025963520050048827, 0.02591414451599121, 0.025856992721557618, 0.025817087173461914, 0.025803808212280274, 0.025800735473632812, 0.025758047103881836, 0.025769887924194337, 0.025745664596557617, 0.025753664016723632, 0.025756160736083986, 0.025872447967529296, 0.025802560806274414, 0.02573107147216797, 0.0257126407623291, 0.025830751419067384, 0.025850175857543945, 0.025792863845825194, 0.02572287940979004, 0.02572697639465332, 0.025778175354003906, 0.025771039962768555, 0.028329984664916992, 0.026820608139038086, 0.026093568801879883, 0.0257574405670166, 0.025628992080688476, 0.025628511428833007, 0.025511455535888673, 0.0254715518951416, 0.0255633602142334, 0.025519039154052733, 0.025509792327880858, 0.02546988868713379, 0.0255644474029541, 0.0256212158203125, 0.025630720138549806, 0.025587263107299803, 0.025622047424316407, 0.025605056762695314, 0.02558153533935547, 0.025534080505371093, 0.025586048126220704, 0.025558464050292967, 0.025516607284545897, 0.025593856811523437, 0.025677824020385744, 0.025632383346557618, 0.025646848678588866, 0.02560233688354492, 0.025637216567993164, 0.025594112396240234, 0.025655040740966795, 0.025659040451049806, 0.025633119583129884, 0.025612384796142577, 0.025595808029174806, 0.025696287155151366, 0.025866207122802735, 0.025892288208007812, 0.026063104629516602, 0.02611801528930664, 0.025981184005737304, 0.026070880889892577, 0.02657417678833008, 0.026955968856811525, 0.025932607650756837, 0.0259421443939209, 0.026011520385742188, 0.025829376220703124, 0.0261724796295166, 0.025999807357788087, 0.025917919158935546, 0.02618704032897949, 0.026061727523803712, 0.0261363525390625, 0.02583763122558594, 0.026006975173950196, 0.026034751892089845, 0.02612633514404297, 0.025894912719726562, 0.02585759925842285, 0.025887071609497072, 0.025996448516845704, 0.0259399356842041, 0.02850204849243164, 0.027066335678100586, 0.026425344467163086, 0.025944063186645508, 0.025855167388916016, 0.02579756736755371, 0.0259050235748291, 0.0258950080871582, 0.025751455307006836, 0.025608192443847655, 0.025875999450683595, 0.025665407180786134, 0.025721439361572264, 0.025896383285522462, 0.02572496032714844, 0.025782976150512695, 0.025859935760498047, 0.026048511505126954, 0.02581711959838867, 0.02576585578918457, 0.025765663146972657, 0.025781856536865235, 0.02578019142150879, 0.025774751663208008, 0.025761119842529295, 0.025901695251464844, 0.025933088302612303, 0.025743104934692382, 0.02583843231201172, 0.025792608261108397, 0.025810272216796874, 0.025844480514526365, 0.02579452705383301, 0.02575971221923828, 0.02599020767211914, 0.025943008422851563, 0.02605241584777832, 0.02610736083984375, 0.026150880813598634, 0.026276063919067384, 0.026128927230834962, 0.026125600814819336, 0.026063392639160157, 0.025907392501831054, 0.02632089614868164, 0.02609916877746582, 0.02595484733581543, 0.025851743698120117, 0.026177183151245117, 0.025895423889160156, 0.02585379219055176, 0.025914880752563478, 0.025897632598876952, 0.026113567352294923, 0.026114751815795898, 0.02588035202026367, 0.025965984344482423, 0.025907487869262696, 0.025905471801757812, 0.026064895629882814, 0.025882623672485353, 0.02586934471130371, 0.02584227180480957, 0.028392351150512696, 0.02694313621520996, 0.026177984237670898, 0.025800575256347658, 0.025653472900390627, 0.025547840118408202, 0.02547804832458496, 0.02550092887878418, 0.02551456069946289, 0.025540607452392578, 0.02547711944580078, 0.02566713523864746, 0.025676223754882814, 0.02555904006958008, 0.025543872833251952, 0.025508192062377928, 0.02557084846496582, 0.025559999465942382, 0.025638912200927736, 0.02568191909790039, 0.025673728942871094, 0.025636863708496094, 0.025579008102416992, 0.02556889533996582, 0.025627519607543944, 0.025656959533691407, 0.02564499282836914, 0.025665983200073243, 0.025601408004760743, 0.02561724853515625, 0.025614112854003907, 0.025638912200927736, 0.025686016082763673, 0.025665536880493164, 0.02570240020751953, 0.02574950408935547, 0.02592767906188965, 0.02609561538696289, 0.026201631546020506, 0.026130655288696288, 0.026071199417114256, 0.02599888038635254, 0.02594054412841797, 0.02585795211791992, 0.025870431900024415, 0.02573846435546875, 0.025715328216552733, 0.025793792724609375, 0.025772287368774415, 0.02576358413696289, 0.02576278305053711, 0.02586310386657715, 0.025833759307861328, 0.025803487777709962, 0.02579020881652832, 0.0258272647857666, 0.025801183700561524, 0.025748735427856446, 0.025941631317138673, 0.02579555130004883, 0.025767936706542968, 0.02570569610595703, 0.025832223892211913, 0.028497695922851562, 0.0270296630859375, 0.026333248138427735, 0.025767936706542968, 0.025632896423339845, 0.025534175872802736, 0.025747615814208983, 0.02611609649658203, 0.025558303833007813, 0.025531103134155273, 0.025566783905029297, 0.025541343688964845, 0.025577184677124023, 0.02558998489379883, 0.025657312393188476, 0.025593215942382813, 0.025573471069335937, 0.025653600692749023, 0.02572697639465332, 0.025681535720825197, 0.025627103805541993, 0.025661312103271484, 0.02572496032714844, 0.025613632202148438, 0.025596704483032227, 0.025683456420898438, 0.026134944915771483, 0.025990560531616212, 0.02590985679626465, 0.02599504089355469, 0.026085535049438478, 0.02596665573120117, 0.02602947235107422, 0.025977439880371093, 0.025800703048706054, 0.02611404800415039, 0.026351680755615236, 0.026268768310546874, 0.026346303939819335, 0.02637775993347168, 0.026368192672729492, 0.026153280258178712, 0.026177536010742186, 0.026241024017333983, 0.026055744171142578, 0.026186687469482422, 0.026197887420654296, 0.025946239471435546, 0.025994815826416016, 0.025949888229370117, 0.025839935302734374, 0.025913791656494142, 0.02595020866394043, 0.02611177635192871, 0.026031999588012694, 0.025866592407226562, 0.026093568801879883, 0.025956352233886718, 0.026082752227783203, 0.025913568496704103, 0.025943904876708983, 0.02597337532043457, 0.025966688156127928, 0.02856547164916992, 0.027140127182006837, 0.026556127548217772, 0.02602217674255371, 0.02590732765197754, 0.025788480758666993, 0.025646751403808593, 0.025686176300048828, 0.025915456771850587, 0.025747264862060547, 0.025820352554321288, 0.025879648208618163, 0.025964384078979493, 0.025719871520996095, 0.025795391082763672, 0.025703840255737305, 0.02575174331665039, 0.02566352081298828, 0.025834016799926758, 0.025976160049438476, 0.02579315185546875, 0.025982271194458006, 0.02581724739074707, 0.025866783142089844, 0.025955711364746094, 0.02593791961669922, 0.02600204849243164, 0.025987071990966795, 0.025809024810791014, 0.0260053768157959, 0.025792032241821288, 0.025943872451782226, 0.025879199981689454, 0.025845760345458983, 0.02602556800842285, 0.0261976318359375, 0.026444608688354493, 0.02642521667480469, 0.02637628746032715, 0.02632908821105957, 0.026306144714355467, 0.02617305564880371, 0.02617545509338379, 0.02625619125366211, 0.02608252716064453, 0.026158143997192383, 0.026058240890502928, 0.02601558494567871, 0.02581747245788574, 0.02611155128479004, 0.026071487426757814, 0.025965856552124023, 0.025998048782348633, 0.026004608154296876, 0.026076032638549806, 0.025914783477783202, 0.025903615951538086, 0.02615920066833496, 0.026163040161132814, 0.025933631896972655, 0.02593564796447754, 0.02605708885192871, 0.02601900863647461, 0.028486623764038085, 0.027301759719848634, 0.02667747116088867, 0.026177440643310547, 0.025956352233886718, 0.02584547233581543, 0.025848352432250976, 0.02577791976928711, 0.025794559478759766, 0.025776128768920898, 0.025659391403198242, 0.02570444869995117, 0.02582032012939453, 0.025903968811035155, 0.025986976623535156, 0.025792608261108397, 0.025741024017333983, 0.026015775680541992, 0.025765695571899415, 0.026016191482543947, 0.025963680267333984, 0.02584441566467285, 0.025974943161010743, 0.025956031799316406, 0.02572447967529297, 0.025847936630249025, 0.025872127532958984, 0.025827999114990233, 0.025835744857788084, 0.025923072814941408, 0.026091775894165038, 0.026136831283569337, 0.025914623260498048, 0.02598784065246582, 0.025886720657348632, 0.025878528594970703, 0.026306560516357422, 0.026330463409423827, 0.026077856063842775, 0.026118303298950197, 0.026105312347412108, 0.02629465675354004, 0.025995264053344725, 0.02608332824707031, 0.026060991287231446, 0.026003263473510743, 0.025853952407836913, 0.025916704177856444, 0.025940223693847655, 0.02584009552001953, 0.026173152923583985, 0.02605036735534668, 0.026021408081054687, 0.025934783935546876, 0.02595840072631836, 0.02618742370605469, 0.025977344512939454, 0.026177215576171874, 0.025952415466308595, 0.025972736358642577, 0.02648828887939453, 0.025903648376464843, 0.025839616775512695, 0.028528608322143555, 0.027115264892578126, 0.026390783309936522, 0.026191871643066408, 0.025841663360595703, 0.02581817626953125, 0.02572319984436035, 0.025821535110473633, 0.02594793510437012, 0.025824928283691408, 0.025737119674682618, 0.025678783416748046, 0.02570444869995117, 0.025841663360595703, 0.02568806457519531, 0.025815040588378906, 0.025708608627319336, 0.025767871856689453, 0.025783519744873046, 0.02588912010192871, 0.025936128616333008, 0.025722528457641603, 0.025920032501220703, 0.025869983673095703, 0.025952512741088868, 0.025785791397094728, 0.0257542724609375, 0.025703584671020508, 0.025892831802368163, 0.02575971221923828, 0.025790687561035155, 0.026021951675415038, 0.02575833511352539, 0.02591974449157715, 0.02577961540222168, 0.02623910331726074, 0.026374368667602538, 0.026347232818603517, 0.02615247917175293, 0.02613324737548828, 0.026042367935180662, 0.026042367935180662, 0.026071008682250978, 0.02627743911743164, 0.02599977684020996, 0.026169408798217775, 0.0261910400390625, 0.02604047966003418, 0.025903615951538086, 0.02603843116760254, 0.02609766387939453, 0.025853952407836913, 0.02588057518005371, 0.025878528594970703, 0.025912832260131836, 0.025872896194458008, 0.02585420799255371, 0.02593356704711914, 0.026064895629882814, 0.025911296844482422, 0.02593187141418457, 0.026150175094604492, 0.026067583084106446]",tokens/s,38.56509329245249,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.08352,6174.998528,0.0,5779.750912,5773.960192,s,1,7.7395419921875,7.7395419921875,0.0,7.7395419921875,7.7395419921875,7.7395419921875,7.7395419921875,[7.7395419921875],,kWh,9.994903391674369e-06,1.0953041804092718e-06,3.4397249740053537e-06,1.4529932546088994e-05,,MB,1095.151616,6491.66848,0.0,6085.935104,6038.345728,s,10,2.1448580932617185,0.2144858093261719,0.0026452678682763126,0.21521057891845702,0.21694376525878906,0.21700848846435547,0.2170602670288086,"[0.2076636505126953, 0.21692938232421874, 0.21582981872558593, 0.2149775390625, 0.21544361877441406, 0.21384474182128907, 0.2147804412841797, 0.21227523803710938, 0.21707321166992188, 0.21604045104980468]",tokens/s,1193.5521552882635,kWh,6.232499810460658e-06,6.873281304016718e-07,4.159891034765951e-06,1.107971897562828e-05,tokens/kWh,23105279.16485205,MB,1099.776,6512.64,0.0,6106.906624,6086.544896,s,10,16.34459362792969,1.6344593627929687,0.005604136420545625,1.6363401489257812,1.6394989379882812,1.6408669006347656,1.641961270751953,"[1.6247216796875, 1.636993408203125, 1.6390897216796876, 1.6391949462890625, 1.6359698486328125, 1.634955322265625, 1.627091064453125, 1.62763232421875, 1.63671044921875, 1.64223486328125]",tokens/s,38.54485552479287,kWh,4.784965462412298e-05,5.277586740818806e-06,3.1805276626234714e-05,8.493251799117652e-05,tokens/kWh,741765.3625498888,,s,630,16.341073549270607,0.02593821198296925,0.0004003684958193192,0.02587648010253906,0.02615531234741211,0.02629103021621704,0.028321831398010255,"[0.028504127502441405, 0.026898399353027343, 0.026089408874511718, 0.02574118423461914, 0.025610015869140624, 0.02552422332763672, 0.025497119903564455, 0.02549238395690918, 0.025530527114868164, 0.02555001640319824, 0.02552511978149414, 0.025526016235351563, 0.025544704437255858, 0.02552217674255371, 0.02555084800720215, 0.025491455078125, 0.025591455459594726, 0.025532768249511718, 0.025536512374877928, 0.025540607452392578, 0.02569148826599121, 0.025616992950439454, 0.02561840057373047, 0.025754751205444334, 0.0256212158203125, 0.025647424697875978, 0.025589696884155272, 0.025595327377319337, 0.025629247665405273, 0.0256856632232666, 0.025683935165405274, 0.025722848892211915, 0.025692575454711913, 0.025634815216064453, 0.025727136611938477, 0.02568284797668457, 0.025804864883422853, 0.025985919952392578, 0.026102975845336916, 0.02615113639831543, 0.026013376235961914, 0.026021024703979493, 0.025937536239624023, 0.02591961669921875, 0.025876352310180664, 0.02587392044067383, 0.025856767654418945, 0.02578761672973633, 0.025791135787963868, 0.02573311996459961, 0.025728607177734376, 0.025786783218383787, 0.025790464401245116, 0.025784320831298828, 0.025776128768920898, 0.025827392578125, 0.025849023818969728, 0.025817951202392577, 0.025812671661376952, 0.025790847778320313, 0.025710432052612305, 0.025745407104492187, 0.025769279479980468, 0.028258304595947265, 0.026853376388549805, 0.026139808654785157, 0.025758560180664063, 0.02566124725341797, 0.025710527420043944, 0.025781856536865235, 0.025791135787963868, 0.02577987289428711, 0.025882976531982422, 0.025825279235839844, 0.02570444869995117, 0.02587238311767578, 0.025820608139038085, 0.025772127151489257, 0.025956832885742187, 0.02571628761291504, 0.0257043514251709, 0.025907264709472657, 0.025809375762939454, 0.0258306884765625, 0.025844448089599608, 0.02579462432861328, 0.025888032913208008, 0.025910112380981447, 0.02596147155761719, 0.02587116813659668, 0.025858047485351563, 0.025937824249267577, 0.025958719253540038, 0.025874208450317383, 0.02575564765930176, 0.025968639373779297, 0.02588073539733887, 0.025882463455200195, 0.025862144470214843, 0.02609891128540039, 0.026186527252197264, 0.026232831954956053, 0.026042272567749023, 0.02619196891784668, 0.025995647430419922, 0.02600297546386719, 0.025996992111206055, 0.02592767906188965, 0.025993824005126953, 0.02593168067932129, 0.026070943832397463, 0.02613212776184082, 0.025948511123657227, 0.026040319442749024, 0.02595430374145508, 0.02598422431945801, 0.026015680313110353, 0.026076095581054688, 0.025995168685913086, 0.026041343688964845, 0.026024959564208985, 0.02612838363647461, 0.02586537551879883, 0.02601046371459961, 0.026011648178100585, 0.025948160171508788, 0.028299264907836914, 0.027129056930541993, 0.026391040802001952, 0.026112287521362305, 0.02572457695007324, 0.025762144088745116, 0.025656320571899413, 0.025699392318725586, 0.025771520614624024, 0.025801151275634766, 0.025634815216064453, 0.025831424713134765, 0.025807008743286133, 0.02568191909790039, 0.02580463981628418, 0.02586400032043457, 0.025733312606811522, 0.025832639694213868, 0.02566022491455078, 0.02572287940979004, 0.02587238311767578, 0.025968608856201173, 0.02602191925048828, 0.025835519790649415, 0.025784320831298828, 0.025738815307617187, 0.025850303649902345, 0.026075263977050782, 0.02616511917114258, 0.025911296844482422, 0.025979936599731444, 0.025959455490112304, 0.025847679138183595, 0.025878591537475584, 0.026015743255615235, 0.026089471817016603, 0.026183135986328127, 0.026545888900756837, 0.02611078453063965, 0.02634880065917969, 0.026229503631591797, 0.026226688385009765, 0.02604252815246582, 0.02597052764892578, 0.0260316162109375, 0.026122560501098634, 0.025904895782470704, 0.02593631935119629, 0.026060991287231446, 0.026068864822387697, 0.025902496337890626, 0.02609724807739258, 0.026115007400512695, 0.02601945686340332, 0.02591097640991211, 0.025891519546508788, 0.02591744041442871, 0.025957759857177735, 0.026071680068969726, 0.026137599945068358, 0.025967872619628907, 0.025921279907226563, 0.026077280044555663, 0.02840553665161133, 0.027213951110839844, 0.02630441665649414, 0.026099903106689453, 0.02587353515625, 0.02575961685180664, 0.0259102725982666, 0.025827520370483397, 0.025784128189086913, 0.02561561584472656, 0.025879295349121093, 0.025785472869873045, 0.02571558380126953, 0.02593388748168945, 0.025875680923461913, 0.025754335403442383, 0.025845760345458983, 0.025822751998901366, 0.02577574348449707, 0.025911584854125976, 0.02576646423339844, 0.02595756721496582, 0.025946975708007813, 0.02580886459350586, 0.026021600723266602, 0.025749439239501952, 0.025960800170898437, 0.02575334358215332, 0.025772287368774415, 0.025951616287231444, 0.025858688354492187, 0.025964544296264647, 0.025861568450927734, 0.02592972755432129, 0.025958335876464844, 0.025971328735351563, 0.026123743057250976, 0.026065439224243165, 0.026187776565551758, 0.02625846481323242, 0.026104799270629885, 0.026171072006225586, 0.02627382469177246, 0.026079519271850586, 0.026087104797363283, 0.026245664596557618, 0.026100576400756834, 0.026006080627441405, 0.025921920776367187, 0.026044639587402343, 0.02590492820739746, 0.026089471817016603, 0.025990848541259767, 0.02596281623840332, 0.026066144943237304, 0.025803552627563477, 0.025836736679077148, 0.02607823944091797, 0.025966367721557616, 0.02588572883605957, 0.026189823150634766, 0.025929952621459963, 0.02612505531311035, 0.0286167049407959, 0.027272960662841798, 0.026245376586914063, 0.02609561538696289, 0.025882623672485353, 0.02592064094543457, 0.025670175552368165, 0.025641311645507814, 0.025651391983032228, 0.025636512756347655, 0.025614688873291016, 0.025890623092651367, 0.025790111541748047, 0.025695615768432618, 0.025656095504760744, 0.025692352294921873, 0.02572902488708496, 0.02593507194519043, 0.026022111892700196, 0.025846336364746095, 0.025710048675537108, 0.025600223541259765, 0.025647424697875978, 0.02583296012878418, 0.025762304306030274, 0.02592767906188965, 0.025724159240722657, 0.025682687759399414, 0.025806848526000976, 0.02580611228942871, 0.02602057647705078, 0.025757696151733397, 0.025997312545776367, 0.02575564765930176, 0.025806848526000976, 0.025958208084106444, 0.026136831283569337, 0.026128320693969725, 0.02615500831604004, 0.02612428855895996, 0.02615910339355469, 0.026038528442382813, 0.02606787109375, 0.026158048629760743, 0.02597260856628418, 0.02589695930480957, 0.025932992935180664, 0.02603091239929199, 0.026149120330810547, 0.02593142318725586, 0.02588035202026367, 0.025922143936157226, 0.02586595153808594, 0.02587388801574707, 0.025873023986816405, 0.02586614418029785, 0.026094911575317382, 0.02614143943786621, 0.026158912658691406, 0.02587660789489746, 0.025918527603149413, 0.025977792739868163, 0.0259051513671875, 0.028317695617675782, 0.02716806411743164, 0.02651215934753418, 0.026007455825805666, 0.025974239349365234, 0.02576643180847168, 0.02564240074157715, 0.0257030086517334, 0.0256777286529541, 0.025659488677978515, 0.0257475528717041, 0.02586614418029785, 0.0256777286529541, 0.025908319473266602, 0.025847999572753907, 0.025786176681518554, 0.025965503692626953, 0.025910655975341798, 0.025733823776245116, 0.025941951751708985, 0.025769567489624022, 0.02566806411743164, 0.025636863708496094, 0.025860095977783205, 0.026001407623291017, 0.02590105628967285, 0.026011743545532227, 0.02604003143310547, 0.025884000778198243, 0.025789024353027344, 0.02601907157897949, 0.025882783889770507, 0.025807712554931642, 0.02579654312133789, 0.025901119232177736, 0.02584351921081543, 0.025964736938476562, 0.026060800552368164, 0.026232831954956053, 0.0260928955078125, 0.026402496337890626, 0.026239967346191405, 0.025970111846923827, 0.02584783935546875, 0.025794944763183593, 0.025792863845825194, 0.02580851173400879, 0.02585737609863281, 0.02587923240661621, 0.025862144470214843, 0.02590086364746094, 0.02597702407836914, 0.025931167602539062, 0.02600956726074219, 0.02582966423034668, 0.025795072555541993, 0.025796607971191408, 0.025855552673339843, 0.02577043151855469, 0.025816543579101563, 0.025844255447387696, 0.025849119186401367, 0.025848543167114258, 0.028672000885009766, 0.027183103561401366, 0.026251264572143555, 0.02586604881286621, 0.025708736419677733, 0.02574950408935547, 0.02556620788574219, 0.02550886344909668, 0.025683967590332032, 0.025622528076171876, 0.025734975814819337, 0.025660608291625978, 0.025647647857666017, 0.025546592712402345, 0.02551849555969238, 0.02553059196472168, 0.025544704437255858, 0.025561088562011718, 0.025613920211791992, 0.025638431549072267, 0.025668479919433593, 0.025665536880493164, 0.025634111404418944, 0.025596128463745118, 0.025610719680786133, 0.025747583389282225, 0.02568383979797363, 0.025675775527954102, 0.02558118438720703, 0.02560576057434082, 0.025622720718383788, 0.025607744216918946, 0.025689088821411132, 0.025708511352539064, 0.025778207778930664, 0.02576383972167969, 0.026013696670532226, 0.02612633514404297, 0.02600137519836426, 0.025987104415893556, 0.026062847137451172, 0.02599068832397461, 0.025960735321044922, 0.025888160705566408, 0.02579862403869629, 0.025823328018188478, 0.025795263290405275, 0.025769983291625977, 0.025759487152099608, 0.025751583099365233, 0.02582966423034668, 0.025830911636352538, 0.026020191192626954, 0.02585523223876953, 0.025770143508911134, 0.025720863342285155, 0.02580768013000488, 0.025728864669799803, 0.02577952003479004, 0.025787103652954103, 0.02580672073364258, 0.02580454444885254, 0.025872768402099608, 0.028495744705200197, 0.027072288513183593, 0.026278240203857422, 0.02599443244934082, 0.025718687057495117, 0.025629600524902343, 0.025589792251586915, 0.025525535583496094, 0.025537216186523437, 0.02551535987854004, 0.02561724853515625, 0.0256135368347168, 0.02559619140625, 0.025547071456909178, 0.025600000381469725, 0.02564233589172363, 0.0256529598236084, 0.025649728775024413, 0.025663616180419922, 0.02569651222229004, 0.025657344818115234, 0.0256646728515625, 0.025614368438720704, 0.025671552658081055, 0.02563987159729004, 0.025645055770874024, 0.025638912200927736, 0.025716703414916994, 0.02572496032714844, 0.025741119384765625, 0.025744640350341796, 0.025713600158691407, 0.025774080276489256, 0.025694208145141603, 0.02570240020751953, 0.025734495162963868, 0.02592425537109375, 0.02604035186767578, 0.026072256088256834, 0.026082080841064455, 0.025980768203735353, 0.025905311584472655, 0.025993215560913087, 0.02590105628967285, 0.025832511901855468, 0.02584419250488281, 0.025744863510131836, 0.025789440155029295, 0.025778175354003906, 0.025826719284057616, 0.025789024353027344, 0.025852928161621092, 0.02584582328796387, 0.02581190490722656, 0.025829376220703124, 0.025792512893676758, 0.025781696319580077, 0.025999807357788087, 0.025897087097167967, 0.025841312408447267, 0.025860448837280274, 0.025802751541137696, 0.025827327728271485, 0.028323520660400392, 0.027041791915893554, 0.026205184936523438, 0.025771936416625976, 0.025613983154296874, 0.025598207473754884, 0.025546335220336915, 0.025534975051879884, 0.025536672592163086, 0.025793567657470703, 0.025671968460083006, 0.02557792091369629, 0.025574880599975584, 0.0256231689453125, 0.02572902488708496, 0.025783903121948244, 0.025860511779785156, 0.025847583770751952, 0.02596441650390625, 0.025874784469604492, 0.025960447311401368, 0.025960447311401368, 0.025784320831298828, 0.025927072525024415, 0.025969247817993164, 0.02581667137145996, 0.025909664154052735, 0.0260032958984375, 0.026056863784790038, 0.025993215560913087, 0.025960447311401368, 0.025917600631713868, 0.02595737648010254, 0.025731231689453123, 0.025838272094726562, 0.025939968109130858, 0.026060895919799806, 0.026015647888183592, 0.026161151885986327, 0.026101024627685546, 0.026034271240234375, 0.026212991714477538, 0.026013696670532226, 0.026281984329223632, 0.025974079132080077, 0.02599740791320801, 0.02590166473388672, 0.02613603210449219, 0.026298431396484374, 0.026027904510498048, 0.02613279914855957, 0.025962783813476564, 0.02590105628967285, 0.02592576026916504, 0.026054399490356445, 0.025946239471435546, 0.026108928680419922, 0.026035200119018553, 0.02595756721496582, 0.026013792037963866, 0.025859807968139647, 0.0261146240234375, 0.025868255615234374, 0.028612672805786134, 0.027265024185180665, 0.02637151908874512, 0.026057279586791993, 0.025996959686279297, 0.025694784164428712, 0.02592131233215332, 0.025842784881591797, 0.02577440071105957, 0.025743967056274415, 0.025887807846069335, 0.02587676811218262, 0.0259072322845459, 0.025735807418823243, 0.02571820831298828, 0.025987232208251953, 0.025881216049194335, 0.0259434871673584, 0.025950559616088865, 0.025857152938842772, 0.026002304077148437, 0.026009599685668947, 0.02599920082092285, 0.02601308822631836, 0.025946495056152343, 0.026027616500854493, 0.0260053768157959, 0.02588083267211914, 0.02580956840515137, 0.0258602237701416, 0.025824256896972656, 0.025835712432861327, 0.026052480697631837, 0.02590755271911621, 0.025847871780395507, 0.0259465274810791, 0.02638047981262207, 0.026145792007446288, 0.02649078369140625, 0.026422176361083984, 0.026275840759277344, 0.026229856491088867, 0.026230752944946287, 0.02623583984375, 0.025980287551879883, 0.02598361587524414, 0.026034175872802736, 0.02609971237182617, 0.025837568283081053, 0.026021888732910156, 0.026193920135498046, 0.02589004707336426, 0.02596735954284668, 0.025970239639282227, 0.02589753532409668, 0.026238847732543945, 0.02634774398803711, 0.026011423110961916, 0.02611814308166504, 0.026003456115722655, 0.025941984176635742, 0.025982208251953125, 0.02591209602355957]",tokens/s,38.5531585853562,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 560.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 21817 has 14.73 GiB memory in use. Of the allocated memory 14.62 GiB is allocated by PyTorch, and 1.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 717, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 322.12 MiB is free. Process 46180 has 14.42 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 13.04 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 272, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 232.12 MiB is free. Process 69406 has 14.51 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 1.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 896.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 342.12 MiB is free. Process 166821 has 14.40 GiB memory in use. Of the allocated memory 14.29 GiB is allocated by PyTorch, and 1.75 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 896.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 598.12 MiB is free. Process 172943 has 14.15 GiB memory in use. Of the allocated memory 14.04 GiB is allocated by PyTorch, and 1.81 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 717, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 206549 has 14.71 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 4.20 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 657, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 324, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 238.12 MiB is free. Process 75555 has 14.51 GiB memory in use. Of the allocated memory 14.39 GiB is allocated by PyTorch, and 1.53 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 272, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.50 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.36 GiB is free. Process 78515 has 13.38 GiB memory in use. Of the allocated memory 13.27 GiB is allocated by PyTorch, and 1.86 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,789.64736,3950.903296,0.0,3548.381184,3467.969536,s,1,7.861302734375,7.861302734375,0.0,7.861302734375,7.861302734375,7.861302734375,7.861302734375,[7.861302734375],,kWh,4.79447890834308e-06,5.212363433231576e-07,2.541113144002849e-06,7.856828395669087e-06,,MB,1111.277568,3961.389056,0.0,3548.381184,3412.88448,s,10,1.0437553176879883,0.10437553176879884,0.005275383765913307,0.10670673751831054,0.10814457244873046,0.10837711868286132,0.10856315567016601,"[0.09658147430419922, 0.10443389129638672, 0.10860966491699219, 0.10804752349853515, 0.1080928955078125, 0.10799334716796875, 0.10623760223388672, 0.10436966705322266, 0.09221337890625, 0.10717587280273437]",tokens/s,2452.6821148759554,kWh,2.9964084937497703e-06,3.30442607779244e-07,1.9946784664898245e-06,5.321529568018839e-06,tokens/kWh,48106469.52682566,MB,1123.172352,3963.486208,0.0,3550.478336,3457.675776,s,10,11.794827514648436,1.1794827514648438,0.007366542988828026,1.1796400146484376,1.185407421875,1.186016943359375,1.186504560546875,"[1.159572509765625, 1.178227783203125, 1.1781524658203124, 1.1804578857421875, 1.178221435546875, 1.1788221435546875, 1.18527197265625, 1.18446533203125, 1.185009521484375, 1.18662646484375]",tokens/s,53.41324400187958,kWh,3.444125385166629e-05,3.796467528867502e-06,2.262311900551011e-05,6.086084038604388e-05,tokens/kWh,1035148.3745605106,,s,630,11.790798873901371,0.018715553768097408,0.0005078932461637354,0.018584112167358398,0.019123305892944337,0.01953751878738403,0.02125250797271729,"[0.020174560546875, 0.019192096710205078, 0.018610176086425782, 0.01823539161682129, 0.018151424407958985, 0.018085887908935547, 0.01816985511779785, 0.01935113525390625, 0.01812521553039551, 0.018080896377563476, 0.018105215072631835, 0.018227039337158205, 0.018043039321899414, 0.01826201629638672, 0.0180633602142334, 0.018263647079467774, 0.018231712341308593, 0.01812825584411621, 0.01817254447937012, 0.018159616470336915, 0.01812838363647461, 0.018211328506469726, 0.018159616470336915, 0.01847065544128418, 0.018456640243530272, 0.01816595268249512, 0.018782207489013672, 0.01830297660827637, 0.01810188865661621, 0.018313600540161134, 0.018524160385131837, 0.018708480834960937, 0.01820457649230957, 0.018360416412353517, 0.01822719955444336, 0.018465951919555666, 0.018207584381103516, 0.018195968627929687, 0.018333919525146486, 0.018583520889282227, 0.01828041648864746, 0.01840777587890625, 0.01824563217163086, 0.018284320831298828, 0.01822934341430664, 0.01833763122558594, 0.018569087982177733, 0.018260160446166993, 0.01836169624328613, 0.018424703598022462, 0.01841971206665039, 0.01854195213317871, 0.01850227165222168, 0.01879449653625488, 0.018589792251586915, 0.018481056213378907, 0.018783552169799805, 0.01850422477722168, 0.018410783767700195, 0.018432479858398437, 0.018418079376220704, 0.018708255767822264, 0.018431295394897462, 0.021043167114257813, 0.019748287200927736, 0.019474431991577147, 0.018503263473510743, 0.01891427230834961, 0.020445184707641603, 0.018909183502197266, 0.01852732849121094, 0.018430879592895508, 0.0184399356842041, 0.018445728302001953, 0.01868067169189453, 0.018468191146850586, 0.018713247299194335, 0.018396320343017577, 0.018486112594604493, 0.01855891227722168, 0.018430015563964845, 0.018585599899291993, 0.018395135879516602, 0.019104896545410158, 0.01853433609008789, 0.018436767578125, 0.018353952407836913, 0.018418176651000977, 0.01863065528869629, 0.01842918395996094, 0.018893056869506836, 0.01834649658203125, 0.01844428825378418, 0.018647039413452148, 0.01895327949523926, 0.018545600891113283, 0.0188538875579834, 0.018445728302001953, 0.01833763122558594, 0.01867350387573242, 0.01840220832824707, 0.01864089584350586, 0.01859404754638672, 0.018906879425048827, 0.018417503356933592, 0.018438304901123047, 0.018252864837646484, 0.01833465576171875, 0.018339456558227538, 0.01912665557861328, 0.01829635238647461, 0.018460479736328125, 0.018600448608398438, 0.018626720428466796, 0.01857535934448242, 0.018732959747314454, 0.018819168090820314, 0.019009536743164062, 0.018548736572265623, 0.019333120346069335, 0.018491392135620118, 0.019185663223266602, 0.018433439254760743, 0.01866803169250488, 0.01845257568359375, 0.018497119903564452, 0.021172000885009767, 0.019730976104736328, 0.01894767951965332, 0.018505632400512697, 0.018601951599121095, 0.01923347282409668, 0.0186976318359375, 0.018823711395263672, 0.018560352325439452, 0.01848908805847168, 0.018410400390625, 0.01902592086791992, 0.018534400939941405, 0.018526079177856446, 0.01844646453857422, 0.018749439239501953, 0.018660640716552733, 0.018885343551635743, 0.018374656677246092, 0.018474079132080077, 0.018487743377685547, 0.018342367172241213, 0.01898486328125, 0.018861440658569335, 0.018377439498901367, 0.01826131248474121, 0.01841548728942871, 0.018298912048339843, 0.018680160522460937, 0.018518463134765625, 0.018573247909545898, 0.01888057518005371, 0.018364416122436524, 0.01861427116394043, 0.01841971206665039, 0.018472320556640626, 0.018421760559082033, 0.018313535690307615, 0.01867359924316406, 0.018968544006347655, 0.018608383178710938, 0.01910099220275879, 0.01878214454650879, 0.018635679244995117, 0.019843008041381834, 0.01951263999938965, 0.018498144149780273, 0.018399391174316406, 0.018827199935913086, 0.018584672927856444, 0.018471904754638672, 0.018418943405151367, 0.0184036808013916, 0.01877996826171875, 0.018659135818481446, 0.018434335708618164, 0.018416128158569335, 0.018542591094970702, 0.018808832168579103, 0.018522111892700196, 0.018632768630981445, 0.018600927352905274, 0.018485855102539063, 0.021551008224487304, 0.019750816345214844, 0.018963232040405273, 0.018509824752807616, 0.018722944259643555, 0.018558496475219725, 0.018468511581420897, 0.018557119369506835, 0.018409215927124023, 0.01882956886291504, 0.018395647048950196, 0.018610176086425782, 0.018535615921020508, 0.018649248123168944, 0.01845315170288086, 0.01848428726196289, 0.01856732749938965, 0.018541536331176757, 0.01888627243041992, 0.01894828796386719, 0.018518016815185546, 0.018435232162475584, 0.018735200881958007, 0.01869900894165039, 0.019120128631591796, 0.01844220733642578, 0.019302112579345703, 0.01910406494140625, 0.018671615600585938, 0.0183635196685791, 0.018766719818115233, 0.018554176330566406, 0.01828236770629883, 0.01854751968383789, 0.018701663970947267, 0.018451040267944335, 0.01851807975769043, 0.018733055114746093, 0.018765823364257812, 0.018655231475830078, 0.01849081611633301, 0.018405471801757813, 0.018645471572875976, 0.01844793510437012, 0.01845907211303711, 0.018593791961669923, 0.018751487731933594, 0.018530303955078126, 0.01854867172241211, 0.01926355171203613, 0.018759008407592773, 0.01888857650756836, 0.01939740753173828, 0.019156991958618166, 0.019355039596557617, 0.01855299186706543, 0.018501344680786132, 0.018458431243896484, 0.018668447494506836, 0.018429920196533202, 0.018858015060424806, 0.01846067237854004, 0.018677759170532226, 0.021276895523071288, 0.019628192901611326, 0.018880512237548826, 0.019294208526611328, 0.019877887725830077, 0.018573055267333983, 0.019119712829589845, 0.01854047966003418, 0.01836038398742676, 0.019024032592773438, 0.01931520080566406, 0.019152799606323243, 0.018517663955688476, 0.0185850887298584, 0.018416704177856444, 0.01842367935180664, 0.018468576431274412, 0.01849577522277832, 0.018347007751464844, 0.01840025520324707, 0.018761728286743166, 0.018479103088378905, 0.01859702491760254, 0.018592607498168944, 0.01844223976135254, 0.018418943405151367, 0.01847987174987793, 0.018972671508789063, 0.018761503219604493, 0.01866160011291504, 0.018767135620117188, 0.018554880142211915, 0.01857814407348633, 0.018357568740844727, 0.01879520034790039, 0.018757631301879883, 0.018540544509887694, 0.018386943817138672, 0.019126272201538085, 0.018650815963745116, 0.018551103591918944, 0.018907136917114258, 0.018524160385131837, 0.01856716728210449, 0.018577407836914063, 0.018356224060058594, 0.01841267204284668, 0.018365312576293945, 0.018752607345581054, 0.0183591365814209, 0.018439968109130858, 0.01844428825378418, 0.018364032745361327, 0.01833750343322754, 0.01898713684082031, 0.01845737648010254, 0.018479135513305663, 0.018919424057006837, 0.018485088348388672, 0.01853660774230957, 0.01846790313720703, 0.018614784240722656, 0.01853718376159668, 0.021865888595581053, 0.019824703216552733, 0.018930208206176757, 0.018479103088378905, 0.01840947151184082, 0.01847500801086426, 0.018577407836914063, 0.018497535705566406, 0.018521343231201172, 0.01844095993041992, 0.018392416000366212, 0.018657312393188477, 0.018523935317993165, 0.018363136291503906, 0.01844233512878418, 0.01847091293334961, 0.018356224060058594, 0.018728960037231446, 0.018726911544799805, 0.018372608184814454, 0.01850307273864746, 0.01834035110473633, 0.018376800537109376, 0.018716672897338867, 0.01853824043273926, 0.018469120025634767, 0.01845248031616211, 0.018532352447509767, 0.018669567108154296, 0.01875289535522461, 0.019101503372192383, 0.018522048950195314, 0.018529151916503905, 0.018597824096679687, 0.018712640762329102, 0.01884979248046875, 0.018679807662963867, 0.018659328460693358, 0.018675712585449217, 0.019056640625, 0.018448383331298827, 0.019200000762939453, 0.01858252716064453, 0.018504703521728515, 0.01847452735900879, 0.018554655075073243, 0.01883145523071289, 0.018618303298950194, 0.018539167404174803, 0.01987993621826172, 0.01897020721435547, 0.018635168075561523, 0.01923891258239746, 0.018520063400268554, 0.01847200012207031, 0.01853865623474121, 0.01857187271118164, 0.019009599685668944, 0.018544639587402344, 0.01845030403137207, 0.018714879989624022, 0.0185784969329834, 0.018762687683105468, 0.021735424041748046, 0.01995747184753418, 0.019020063400268555, 0.018630847930908204, 0.018556928634643553, 0.018722623825073243, 0.0185262393951416, 0.020059680938720702, 0.018634208679199217, 0.01883225631713867, 0.018562816619873048, 0.018594144821166992, 0.01848320007324219, 0.018748992919921874, 0.018832992553710938, 0.018766464233398436, 0.01843836784362793, 0.01859516716003418, 0.018772640228271485, 0.01883955192565918, 0.018704383850097657, 0.01869004821777344, 0.018667520523071288, 0.018759679794311524, 0.018923744201660157, 0.01847478485107422, 0.018692096710205077, 0.0186060791015625, 0.018481151580810547, 0.01883955192565918, 0.018790399551391602, 0.018538496017456055, 0.018521247863769533, 0.018851999282836915, 0.01947104072570801, 0.018759008407592773, 0.01856988716125488, 0.018513023376464845, 0.018720767974853517, 0.01848819160461426, 0.01845248031616211, 0.018395040512084963, 0.018538591384887695, 0.01839308738708496, 0.018520063400268554, 0.01869753646850586, 0.018252256393432618, 0.01861964797973633, 0.01849648094177246, 0.018320928573608397, 0.018595872879028322, 0.01866166305541992, 0.01842140769958496, 0.01850422477722168, 0.018649055480957032, 0.018503679275512695, 0.018448383331298827, 0.018455648422241212, 0.018516895294189453, 0.02152038383483887, 0.021403648376464843, 0.01845248031616211, 0.01865715217590332, 0.02187913513183594, 0.020086368560791015, 0.019096096038818358, 0.019003776550292967, 0.018544639587402344, 0.01857535934448242, 0.01845043182373047, 0.018652639389038085, 0.01871107292175293, 0.01850569534301758, 0.01841494369506836, 0.018781984329223633, 0.018437023162841796, 0.018354175567626953, 0.018394304275512696, 0.01856752014160156, 0.018289024353027344, 0.018515743255615235, 0.019287679672241213, 0.01944976043701172, 0.01867241668701172, 0.01858355140686035, 0.018505727767944336, 0.01862860870361328, 0.018453567504882813, 0.01875654411315918, 0.018493440628051756, 0.01889023971557617, 0.01847756767272949, 0.018391040802001952, 0.018542591094970702, 0.018743295669555664, 0.018476640701293946, 0.018636608123779298, 0.01876211166381836, 0.0187619514465332, 0.01854982376098633, 0.018648000717163087, 0.018694143295288086, 0.01876505661010742, 0.018714879989624022, 0.01880659294128418, 0.018942527770996094, 0.018520191192626954, 0.018663423538208008, 0.018739200592041014, 0.019029216766357424, 0.018682655334472657, 0.01863007926940918, 0.021192800521850585, 0.01867532730102539, 0.01872368049621582, 0.01939561653137207, 0.018654176712036133, 0.018499168395996093, 0.01875119972229004, 0.019083967208862306, 0.01865318489074707, 0.018548736572265623, 0.018792448043823243, 0.018749439239501953, 0.01858687973022461, 0.018623231887817383, 0.02020368003845215, 0.019732864379882812, 0.01939289665222168, 0.01867366409301758, 0.019555648803710936, 0.018936511993408203, 0.01895759963989258, 0.018634496688842775, 0.018562015533447267, 0.018980224609375, 0.020623968124389647, 0.018581504821777343, 0.018704063415527345, 0.01843849563598633, 0.01838719940185547, 0.019175167083740233, 0.018397184371948243, 0.01867558479309082, 0.01842188835144043, 0.018922719955444336, 0.018508575439453126, 0.0184586238861084, 0.018664575576782225, 0.018526784896850584, 0.018555200576782227, 0.018741247177124023, 0.01854582405090332, 0.01838371276855469, 0.018548736572265623, 0.018398752212524416, 0.018495967864990234, 0.01880473518371582, 0.018540544509887694, 0.01846428871154785, 0.01854867172241211, 0.01846531105041504, 0.018479103088378905, 0.01906483268737793, 0.018892768859863282, 0.01935772705078125, 0.018485248565673826, 0.018449567794799806, 0.018776927947998047, 0.018817024230957033, 0.018481151580810547, 0.01861961555480957, 0.018506528854370118, 0.01883135986328125, 0.01845043182373047, 0.01880473518371582, 0.018775136947631835, 0.018494367599487305, 0.01841766357421875, 0.018507776260375978, 0.01875980758666992, 0.018829183578491213, 0.01859584045410156, 0.01940275192260742, 0.018610176086425782, 0.021063615798950195, 0.018871904373168946, 0.018944480895996093, 0.018685951232910156, 0.02118057632446289, 0.019745088577270507, 0.018997247695922852, 0.018561023712158203, 0.018478559494018554, 0.01890073585510254, 0.01851475143432617, 0.0186177921295166, 0.018697824478149414, 0.018669952392578126, 0.019065408706665038, 0.01881292724609375, 0.01861222457885742, 0.01859584045410156, 0.018712575912475587, 0.019564544677734375, 0.018714624404907225, 0.018571264266967775, 0.018675712585449217, 0.018491327285766603, 0.018896320343017577, 0.018700159072875977, 0.018615039825439453, 0.018536447525024414, 0.018584800720214845, 0.018549535751342775, 0.018679807662963867, 0.018784255981445314, 0.01863680076599121, 0.01900092887878418, 0.018782623291015627, 0.01869004821777344, 0.019224512100219728, 0.01861347198486328, 0.01887727928161621, 0.019054592132568358, 0.018708192825317382, 0.01855516815185547, 0.018747392654418944, 0.018655231475830078, 0.019179519653320314, 0.018550783157348632, 0.019310495376586915, 0.018494848251342774, 0.01951535987854004, 0.019122976303100586, 0.01863055992126465, 0.018985023498535158, 0.018644351959228516, 0.01849203109741211, 0.019873184204101564, 0.018798751831054686, 0.0188787841796875, 0.018564416885375978, 0.018615104675292968, 0.018647039413452148, 0.018566719055175783, 0.018670015335083008, 0.018671615600585938, 0.01886617660522461, 0.018767200469970702, 0.018674335479736327, 0.01863065528869629]",tokens/s,53.43149406054996,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 688.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 598.12 MiB is free. Process 159182 has 14.15 GiB memory in use. Of the allocated memory 14.04 GiB is allocated by PyTorch, and 1.75 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 456.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 280.12 MiB is free. Process 156312 has 14.46 GiB memory in use. Of the allocated memory 14.35 GiB is allocated by PyTorch, and 3.19 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 270.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 98.12 MiB is free. Process 163908 has 14.64 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 1.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 657, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 327, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 64.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 56.12 MiB is free. Process 66871 has 14.68 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 711.00 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,888.754176,15254.552576,0.0,14852.030464,14834.516992,s,1,7.69632666015625,7.69632666015625,0.0,7.69632666015625,7.69632666015625,7.69632666015625,7.69632666015625,[7.69632666015625],,kWh,7.833851812536826e-06,8.566556692894609e-07,2.6219465420196375e-06,1.1312454023845925e-05,,MB,1206.19008,15657.20576,0.0,15244.197888,15168.108544,s,10,4.779093872070312,0.47790938720703124,0.004329729175999447,0.4780590515136719,0.48226462097167966,0.48285102996826174,0.48332015716552734,"[0.466609375, 0.4821343078613281, 0.48343743896484376, 0.4772923278808594, 0.47837234497070313, 0.4788439636230469, 0.48074691772460937, 0.4777457580566406, 0.47696331787109375, 0.4769481201171875]",tokens/s,535.6663979674045,kWh,1.3635070865338795e-05,1.5036934695144374e-06,9.019931458363985e-06,2.4158695793217216e-05,tokens/kWh,10596598.516376635,MB,1211.592704,15720.12032,0.0,15307.112448,15247.908352,s,10,34.79727783203125,3.479727783203125,0.001044034012397528,3.4797027587890623,3.48116025390625,3.48116728515625,3.48117291015625,"[3.477724853515625, 3.4791416015625, 3.480072509765625, 3.479329833984375, 3.478609130859375, 3.479661865234375, 3.47974365234375, 3.48117431640625, 3.48115869140625, 3.480661376953125]",tokens/s,18.104864496615267,kWh,0.0001019366254329924,1.1243992027201793e-05,6.777096330763777e-05,0.000180951580767832,tokens/kWh,348159.43432310486,,s,630,34.79360745239261,0.05522794833713108,0.00020057611877807047,0.055226367950439455,0.05539961013793945,0.05543918476104736,0.056276338386535646,"[0.05603561782836914, 0.05517062377929687, 0.05499523162841797, 0.05493257522583008, 0.055032737731933595, 0.05490483093261719, 0.05496588897705078, 0.054968704223632814, 0.0549983024597168, 0.054937824249267575, 0.05498521423339844, 0.05495356750488281, 0.05507932662963867, 0.055019168853759765, 0.05509081649780274, 0.054991615295410155, 0.05519152069091797, 0.05528780746459961, 0.055226207733154294, 0.05517532730102539, 0.05514342498779297, 0.05511043167114258, 0.05510166549682617, 0.05499289703369141, 0.05508918380737305, 0.05498160171508789, 0.05534003067016602, 0.05524448013305664, 0.05510947036743164, 0.055222335815429686, 0.055097759246826174, 0.055035774230957034, 0.0552039680480957, 0.055455390930175784, 0.05520828628540039, 0.05528566360473633, 0.05529616165161133, 0.05529993438720703, 0.05520361709594727, 0.05520339202880859, 0.05511663818359375, 0.055170433044433594, 0.055312576293945315, 0.055304000854492184, 0.055245342254638674, 0.05517311859130859, 0.0551280632019043, 0.05512531280517578, 0.055292385101318356, 0.05528720092773438, 0.055362174987792966, 0.05530438232421875, 0.055320606231689454, 0.0552729606628418, 0.05541862487792969, 0.05546057510375976, 0.055363582611083983, 0.055258113861083986, 0.05540966415405273, 0.05529600143432617, 0.055356735229492186, 0.05550105667114258, 0.055277408599853514, 0.05642105484008789, 0.05534729766845703, 0.05505628967285156, 0.05497977447509766, 0.05492819213867187, 0.05493350219726562, 0.05496012878417969, 0.054986751556396485, 0.05500723266601563, 0.05496134567260742, 0.054935680389404294, 0.054987457275390624, 0.05499289703369141, 0.0549947509765625, 0.055025344848632814, 0.05505279922485352, 0.0551383056640625, 0.055234081268310545, 0.055312000274658206, 0.055255809783935544, 0.055201889038085934, 0.05512742233276367, 0.05513689422607422, 0.05511164855957031, 0.055087104797363284, 0.05505222320556641, 0.05516483306884765, 0.05512819290161133, 0.05513030242919922, 0.05517299270629883, 0.0550830078125, 0.05508639907836914, 0.05515539169311524, 0.05524636840820313, 0.055257568359375, 0.05573017501831055, 0.0553590087890625, 0.05527785491943359, 0.0552470703125, 0.05522633743286133, 0.055267105102539064, 0.05529417419433594, 0.05519152069091797, 0.05522639846801758, 0.055180801391601565, 0.055290367126464846, 0.055215648651123043, 0.05522204971313477, 0.05525369644165039, 0.05528720092773438, 0.05526179122924805, 0.05538399887084961, 0.055342273712158205, 0.05553446578979492, 0.05543670272827148, 0.0553702392578125, 0.055375968933105466, 0.055395904541015624, 0.055376319885253905, 0.0553963508605957, 0.055349246978759765, 0.05532198333740234, 0.05530073547363281, 0.05637580871582031, 0.05532009506225586, 0.05504064178466797, 0.054984737396240234, 0.054969406127929686, 0.05502659225463867, 0.05493145751953125, 0.05509529495239258, 0.054975711822509765, 0.054973217010498045, 0.05509734344482422, 0.05502975845336914, 0.055005184173583986, 0.054994945526123044, 0.055102527618408205, 0.05509625625610352, 0.05516502380371094, 0.05527510452270508, 0.05530796813964844, 0.05529868698120117, 0.05519564819335938, 0.055131454467773434, 0.05516342544555664, 0.05503567886352539, 0.05508339309692383, 0.05506611251831055, 0.05508147048950195, 0.055129119873046875, 0.05519827270507813, 0.05504848098754883, 0.05509952163696289, 0.05515254211425781, 0.055212127685546876, 0.055193599700927735, 0.05523849487304688, 0.055352928161621094, 0.05539078521728515, 0.05526937484741211, 0.05548147201538086, 0.055353759765625, 0.05527340698242188, 0.05523529434204102, 0.055279422760009765, 0.05523251342773437, 0.055242176055908206, 0.05531887817382813, 0.0553289909362793, 0.05528342437744141, 0.055209728240966795, 0.055359935760498045, 0.055363136291503905, 0.05532259368896485, 0.05541126251220703, 0.05539648056030273, 0.05548339080810547, 0.05536857604980469, 0.05551308822631836, 0.055382015228271485, 0.05551923370361328, 0.05539388656616211, 0.05529955291748047, 0.05532153701782227, 0.05525215911865235, 0.05637100982666016, 0.05525750350952149, 0.054980609893798826, 0.054975807189941404, 0.055027584075927734, 0.05503408050537109, 0.05494230270385742, 0.05500723266601563, 0.0549719352722168, 0.05506931304931641, 0.054981758117675784, 0.05508915328979492, 0.054995647430419924, 0.05503388977050781, 0.055011329650878904, 0.0551014404296875, 0.05528985595703125, 0.05530992126464844, 0.055322494506835934, 0.05535168075561524, 0.05520207977294922, 0.055113601684570315, 0.055069984436035155, 0.05508784103393555, 0.05507696151733398, 0.0550665283203125, 0.05509286499023437, 0.0550662727355957, 0.05529414367675781, 0.055099937438964845, 0.055103488922119144, 0.055256160736083984, 0.055163806915283206, 0.055244800567626956, 0.05533673477172851, 0.05537750244140625, 0.05529254531860352, 0.05537177658081055, 0.05530624008178711, 0.055314430236816405, 0.05522227096557617, 0.055311649322509766, 0.05520028686523438, 0.05528185653686524, 0.055190879821777346, 0.05521065521240234, 0.05520793533325195, 0.05526732635498047, 0.055218177795410155, 0.05526313781738281, 0.05523043060302734, 0.05528998565673828, 0.05536767959594727, 0.05541190338134765, 0.05539078521728515, 0.05540480041503906, 0.05539603042602539, 0.05540454483032226, 0.05535366439819336, 0.05531852722167969, 0.05531011199951172, 0.05534537506103516, 0.05530537414550781, 0.05631769561767578, 0.05526396942138672, 0.0550230712890625, 0.05491279983520508, 0.054905696868896486, 0.05490464019775391, 0.054970558166503904, 0.054940929412841795, 0.054989086151123044, 0.05496780776977539, 0.054967262268066405, 0.05499401473999024, 0.055016353607177736, 0.055011329650878904, 0.055119873046875, 0.05497446441650391, 0.05523865509033203, 0.055247135162353515, 0.05535094451904297, 0.055215808868408205, 0.05520537567138672, 0.055058815002441405, 0.055083518981933595, 0.05520172882080078, 0.055105598449707034, 0.05521974563598633, 0.055115360260009766, 0.0552416000366211, 0.0551280632019043, 0.055166976928710934, 0.05512192153930664, 0.05520387268066406, 0.05515465545654297, 0.05511788940429688, 0.055314559936523434, 0.05527123260498047, 0.05531203079223633, 0.05528406524658203, 0.05528678512573242, 0.055223297119140625, 0.05520588684082031, 0.05532662582397461, 0.05521324920654297, 0.05519996643066406, 0.05525164794921875, 0.05519769668579102, 0.05522556686401367, 0.05528361511230469, 0.05535417556762695, 0.05526124954223633, 0.05524035263061523, 0.055395904541015624, 0.05534595108032227, 0.055316478729248046, 0.05527094268798828, 0.055398880004882814, 0.055390209197998044, 0.05534310531616211, 0.0553919677734375, 0.05535724639892578, 0.05537980651855469, 0.05537036895751953, 0.055375873565673826, 0.056235488891601564, 0.05524879837036133, 0.05502016067504883, 0.05498374557495117, 0.05513516616821289, 0.05501542282104492, 0.05497241592407227, 0.05498028945922852, 0.055007553100585936, 0.054988800048828126, 0.05501852798461914, 0.055000030517578125, 0.055043167114257815, 0.05501414489746094, 0.055021728515625, 0.05501663970947265, 0.05519974517822265, 0.05532505416870117, 0.05531676864624024, 0.05526873779296875, 0.055139102935791016, 0.05507891082763672, 0.05504819107055664, 0.055087104797363284, 0.05506662368774414, 0.05510543823242187, 0.05514659118652344, 0.055093246459960936, 0.05519747161865234, 0.05512771224975586, 0.055251518249511716, 0.05509529495239258, 0.05520172882080078, 0.05537388610839844, 0.055246688842773437, 0.05536579132080078, 0.05535055923461914, 0.055429855346679685, 0.055289150238037106, 0.05528851318359375, 0.05523043060302734, 0.0551767692565918, 0.05519148635864258, 0.05521667098999023, 0.05532467269897461, 0.05523152160644531, 0.055215198516845705, 0.0553542709350586, 0.05538505554199219, 0.05524889755249023, 0.05530537414550781, 0.05536240005493164, 0.05537500762939453, 0.055479137420654294, 0.055379039764404295, 0.055376800537109375, 0.05541273498535156, 0.05535948944091797, 0.055349281311035156, 0.05534716796875, 0.055349246978759765, 0.05535129547119141, 0.05549683380126953, 0.056281089782714844, 0.05522022247314453, 0.055010494232177735, 0.054991649627685546, 0.05497375869750976, 0.054936286926269534, 0.05500646209716797, 0.054958847045898436, 0.05499084854125977, 0.054951934814453124, 0.0549920654296875, 0.05497257614135742, 0.05505500793457031, 0.05511324691772461, 0.05508143997192383, 0.05504201507568359, 0.05519340896606445, 0.05533513641357422, 0.05531843185424805, 0.055244510650634765, 0.05516239929199219, 0.05520243072509766, 0.05509552001953125, 0.05504204940795898, 0.05526732635498047, 0.055105281829833985, 0.05512422561645508, 0.0550748176574707, 0.05511372756958008, 0.0551383056640625, 0.055169025421142576, 0.05510697555541992, 0.05519116973876953, 0.05524367904663086, 0.05531449508666992, 0.05532672119140625, 0.05537177658081055, 0.05532876968383789, 0.05526528167724609, 0.05532672119140625, 0.055295902252197264, 0.05519779205322266, 0.05527142333984375, 0.055185409545898435, 0.05527347183227539, 0.05521612930297851, 0.055228416442871096, 0.05521952056884766, 0.055300800323486325, 0.05526323318481445, 0.05534003067016602, 0.05538431930541992, 0.0554073600769043, 0.05539408111572266, 0.05548825454711914, 0.05546640014648437, 0.055427135467529295, 0.05546393585205078, 0.055416831970214846, 0.05537996673583984, 0.05535475158691406, 0.055345790863037106, 0.055420513153076174, 0.056295936584472656, 0.05530656051635742, 0.05498662567138672, 0.055142528533935545, 0.054980224609375, 0.055027103424072264, 0.0549851188659668, 0.05501593780517578, 0.054997055053710935, 0.05501542282104492, 0.05501116943359375, 0.0549664306640625, 0.055121150970458985, 0.05499776077270508, 0.055125217437744144, 0.05503680038452148, 0.05520377731323242, 0.05535289764404297, 0.05546441650390625, 0.05524883270263672, 0.05518745422363281, 0.05514854431152344, 0.055109630584716796, 0.05511167907714844, 0.05511782455444336, 0.055175167083740234, 0.055109630584716796, 0.05515059280395508, 0.055152641296386716, 0.055160831451416016, 0.055086719512939454, 0.05516022491455078, 0.055259616851806644, 0.05535289764404297, 0.05534310531616211, 0.055443870544433595, 0.05541302490234375, 0.055357696533203125, 0.05527961730957031, 0.05527283096313477, 0.05529868698120117, 0.05533491134643555, 0.055384063720703126, 0.05525708770751953, 0.05524684906005859, 0.0552973747253418, 0.055204288482666015, 0.055226207733154294, 0.055269569396972654, 0.05531872177124023, 0.055431167602539064, 0.055414783477783204, 0.05541081619262695, 0.055414207458496095, 0.05544121551513672, 0.0554247055053711, 0.055399360656738283, 0.05536972808837891, 0.05541888046264649, 0.05539807891845703, 0.05534342575073242, 0.05540809631347656, 0.05539894485473633, 0.056264705657958984, 0.05527667236328125, 0.05508595275878906, 0.05514406585693359, 0.054976097106933595, 0.055061279296875, 0.05495619201660156, 0.05500707244873047, 0.054962177276611325, 0.05496166229248047, 0.0550200309753418, 0.05504633712768554, 0.05505414581298828, 0.05514153671264648, 0.055165790557861326, 0.05504764938354492, 0.055271678924560544, 0.05535772705078125, 0.05536943817138672, 0.05534339141845703, 0.05524684906005859, 0.05517926406860352, 0.05516400146484375, 0.055132095336914065, 0.055083999633789064, 0.05511568069458008, 0.05519164657592773, 0.05510758590698242, 0.05522227096557617, 0.05508665466308594, 0.05514412689208984, 0.055147262573242185, 0.05521408081054688, 0.05532271957397461, 0.05532579040527344, 0.055415615081787106, 0.055352928161621094, 0.05532665634155273, 0.055300575256347656, 0.05538147354125977, 0.05531292724609375, 0.05521100616455078, 0.055256065368652345, 0.05519497680664062, 0.055226016998291015, 0.05523878479003906, 0.05538086318969727, 0.055258750915527344, 0.055247230529785155, 0.05531852722167969, 0.05530003356933594, 0.05546553421020508, 0.05546595382690429, 0.05542556762695312, 0.05541436767578125, 0.055411102294921875, 0.05542911911010742, 0.05542911911010742, 0.05540185546875, 0.05536751937866211, 0.05538220977783203, 0.055312000274658206, 0.05535408020019531, 0.056377342224121094, 0.055259136199951174, 0.0551464958190918, 0.054968318939208984, 0.05502975845336914, 0.054951744079589845, 0.05497875213623047, 0.05497446441650391, 0.05501059341430664, 0.05499158477783203, 0.055067710876464844, 0.0550184326171875, 0.055054336547851565, 0.05502975845336914, 0.05504761505126953, 0.05515024185180664, 0.05519862365722656, 0.0553524169921875, 0.055339073181152346, 0.05524764633178711, 0.05517465591430664, 0.05512454223632812, 0.05520588684082031, 0.05512918472290039, 0.05511859130859375, 0.05508015823364258, 0.05510854339599609, 0.055093246459960936, 0.05514854431152344, 0.055242752075195314, 0.0551383056640625, 0.05514035034179687, 0.055228416442871096, 0.055341056823730465, 0.05532262420654297, 0.055314430236816405, 0.0554958381652832, 0.05539311981201172, 0.05536767959594727, 0.05525468826293945, 0.05520816040039062, 0.055259265899658204, 0.05525001525878906, 0.05520854568481445, 0.05522988891601562, 0.055221118927001954, 0.055298046112060545, 0.05536153411865234, 0.055416831970214846, 0.05523865509033203, 0.055371166229248044, 0.05536556625366211, 0.05545580673217773, 0.05541241455078125, 0.055398433685302735, 0.055423137664794925, 0.055532257080078126, 0.055368896484375, 0.05535417556762695, 0.05532262420654297, 0.05534310531616211, 0.05534515380859375, 0.05530624008178711]",tokens/s,18.10677437980574,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 718, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 502.12 MiB is free. Process 38158 has 14.25 GiB memory in use. Of the allocated memory 14.13 GiB is allocated by PyTorch, and 9.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 1195, in __init__ self.model = MixtralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in __init__ [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 757, in __init__ self.block_sparse_moe = MixtralSparseMoeBlock(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in __init__ self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 671, in __init__ self.w2 = nn.Linear(self.ffn_dim, self.hidden_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 360.12 MiB is free. Process 182701 has 14.39 GiB memory in use. Of the allocated memory 14.26 GiB is allocated by PyTorch, and 9.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 352, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 64.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 19247 has 14.71 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 85.33 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 512, in __init__ self.mlp = MistralMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 152, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 170.12 MiB is free. Process 95304 has 14.57 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,811.966464,1129.250816,0.0,734.0032,709.336064,s,1,7.4012548828125,7.4012548828125,0.0,7.4012548828125,7.4012548828125,7.4012548828125,7.4012548828125,[7.4012548828125],,kWh,5.018227141677774e-06,5.464397925686503e-07,1.0041674700089764e-06,6.568834404255401e-06,,MB,1117.376512,1276.051456,0.0,870.31808,809.960448,s,19,0.2845489587783813,0.01497626098833586,0.000721354646664666,0.01482044792175293,0.014997305488586425,0.01537096910476684,0.017479082736968996,"[0.018006111145019533, 0.014694175720214843, 0.014658592224121093, 0.014844799995422363, 0.014743328094482422, 0.014737407684326171, 0.01477455997467041, 0.014862208366394044, 0.01482044792175293, 0.014907584190368653, 0.01497708797454834, 0.014729791641235352, 0.015078175544738769, 0.014839903831481933, 0.014872063636779785, 0.014734848022460938, 0.014711680412292481, 0.01483948802947998, 0.014716704368591309]",tokens/s,17093.719199964766,kWh,5.896171635690091e-07,6.502351689316556e-08,3.9073656399191194e-07,1.0453772444540865e-06,tokens/kWh,244887672.2332783,MB,1127.317504,1309.605888,0.0,905.969664,809.963008,s,19,9.883693817138672,0.5201944114283512,0.007087630848022105,0.52034814453125,0.5285178955078126,0.5290438171386719,0.5304746325683594,"[0.5003568115234375, 0.5235473022460938, 0.5206721801757812, 0.518958984375, 0.5252100219726562, 0.5308323364257812, 0.527358642578125, 0.5236458740234375, 0.5288450927734375, 0.5238328247070313, 0.5284360961914063, 0.5189959106445312, 0.5198388061523438, 0.5151084594726563, 0.52034814453125, 0.5106662902832031, 0.5172429809570313, 0.5129843139648438, 0.516812744140625]",tokens/s,121.10856752000551,kWh,1.4459999236431325e-05,1.594679690931747e-06,7.439825256008322e-06,2.3494504183371386e-05,tokens/kWh,2681478.166480707,,s,1197,9.87563265323639,0.008250319676889214,0.0002649751535341334,0.008231072425842286,0.008551398658752441,0.008621772575378418,0.008864796257019043,"[0.007893343925476075, 0.007861472129821778, 0.007827648162841797, 0.0078115520477294925, 0.007755648136138916, 0.007863840103149415, 0.007809216022491455, 0.007858719825744629, 0.007843423843383789, 0.007976895809173584, 0.007849631786346435, 0.007805344104766846, 0.007824992179870606, 0.007926464080810547, 0.007839263916015625, 0.007803487777709961, 0.007813119888305664, 0.007804927825927735, 0.0079584641456604, 0.007798208236694336, 0.007791232109069824, 0.00779859209060669, 0.007766208171844482, 0.0077844481468200685, 0.00777785587310791, 0.007813600063323975, 0.007848991870880126, 0.007807936191558838, 0.007755360126495361, 0.00779699182510376, 0.007785920143127441, 0.0077749438285827635, 0.0077844481468200685, 0.007790048122406006, 0.0078032960891723634, 0.007833727836608887, 0.007806015968322754, 0.007857279777526855, 0.007810880184173584, 0.007821568012237549, 0.007825151920318603, 0.007868319988250732, 0.007917664051055907, 0.007870463848114014, 0.007855519771575928, 0.007848896026611329, 0.007898784160614014, 0.008034175872802734, 0.008136832237243652, 0.008252575874328613, 0.008133472442626953, 0.00814675235748291, 0.008493247985839844, 0.008154208183288575, 0.008208992004394532, 0.008403264045715331, 0.008435711860656739, 0.008345600128173827, 0.008331263542175293, 0.00829644775390625, 0.008318976402282715, 0.008258975982666016, 0.008069727897644043, 0.007967040061950684, 0.008228927612304688, 0.008381855964660645, 0.008514080047607422, 0.008398847579956055, 0.008409088134765624, 0.008484383583068848, 0.008565216064453126, 0.008537343978881836, 0.008313599586486817, 0.008328895568847656, 0.008457728385925293, 0.008162112236022949, 0.008193792343139649, 0.008510784149169922, 0.008345919609069824, 0.008247072219848633, 0.008231391906738281, 0.00831116771697998, 0.008439647674560546, 0.008395071983337402, 0.008331104278564453, 0.008305952072143555, 0.00849782371520996, 0.008475744247436523, 0.008368800163269043, 0.008397120475769043, 0.008384511947631837, 0.008136704444885253, 0.008265855789184571, 0.008190848350524903, 0.008176639556884765, 0.008426912307739258, 0.008436320304870605, 0.008615936279296875, 0.008404255867004395, 0.008436127662658692, 0.008482687950134278, 0.008280447959899902, 0.008165184020996094, 0.008310720443725586, 0.008370112419128418, 0.008553888320922852, 0.008442848205566407, 0.008437760353088379, 0.008281696319580078, 0.008396448135375977, 0.0084933443069458, 0.008259679794311523, 0.008149408340454101, 0.008048895835876466, 0.008050399780273438, 0.007923295974731445, 0.008352160453796387, 0.007975264072418213, 0.0080480318069458, 0.008184063911437988, 0.008363519668579102, 0.0083189115524292, 0.008245823860168457, 0.008046048164367676, 0.007946591854095459, 0.007879039764404298, 0.007845503807067871, 0.007882847785949706, 0.007893856048583985, 0.007968128204345703, 0.008161439895629882, 0.008217056274414063, 0.008203519821166991, 0.008290495872497559, 0.008127391815185547, 0.00802569580078125, 0.008048992156982421, 0.008435423851013184, 0.008085344314575195, 0.008283712387084962, 0.00825551986694336, 0.008249216079711914, 0.008295104026794434, 0.008364031791687012, 0.008305983543395996, 0.008198431968688965, 0.008036767959594727, 0.007948287963867188, 0.008017056465148926, 0.007942048072814942, 0.007928768157958984, 0.00800767993927002, 0.00848796844482422, 0.008551360130310059, 0.00869379234313965, 0.008538111686706543, 0.008468416213989258, 0.0085665283203125, 0.008523872375488281, 0.008462559700012208, 0.008455360412597656, 0.008371007919311524, 0.008291680335998535, 0.0082008638381958, 0.008112159729003907, 0.008010880470275879, 0.008014687538146973, 0.008093695640563964, 0.008437760353088379, 0.008528127670288085, 0.008328960418701173, 0.008343551635742187, 0.008365504264831543, 0.008278592109680176, 0.008408512115478516, 0.008274496078491211, 0.008278016090393067, 0.008184831619262695, 0.008113375663757324, 0.008284031867980957, 0.008564640045166015, 0.008697855949401855, 0.00841318416595459, 0.008391776084899903, 0.0085097599029541, 0.008290528297424316, 0.00822105598449707, 0.008324831962585448, 0.008116064071655273, 0.00847436809539795, 0.008275967597961426, 0.00808291244506836, 0.008046751976013183, 0.008059264183044434, 0.008085503578186035, 0.007964672088623047, 0.007993343830108643, 0.008001055717468262, 0.008038368225097656, 0.008157695770263672, 0.008253439903259278, 0.008138751983642578, 0.008196096420288086, 0.008193087577819825, 0.00810694408416748, 0.008030207633972167, 0.007966720104217529, 0.00798467206954956, 0.007971360206604003, 0.008165311813354493, 0.008267200469970704, 0.008436287879943848, 0.008556223869323731, 0.008446271896362306, 0.00830835247039795, 0.008552831649780273, 0.008658304214477539, 0.00851417636871338, 0.008351743698120117, 0.008291647911071778, 0.008297183990478515, 0.00825712013244629, 0.008360320091247559, 0.008189824104309082, 0.008130496025085449, 0.008180224418640136, 0.008197823524475097, 0.008269824028015137, 0.008309920310974122, 0.008174528121948242, 0.008161375999450684, 0.008107487678527831, 0.008431743621826172, 0.008214752197265624, 0.00818825626373291, 0.00803395175933838, 0.00808140754699707, 0.008119808197021485, 0.008130559921264649, 0.00838912010192871, 0.008478719711303711, 0.008484864234924316, 0.008355839729309082, 0.008289695739746094, 0.008280672073364258, 0.008333312034606934, 0.008519680023193359, 0.00829212760925293, 0.00808777618408203, 0.00809779167175293, 0.008170783996582031, 0.008368864059448242, 0.00807372760772705, 0.008327168464660644, 0.008491007804870606, 0.008148991584777832, 0.008132287979125977, 0.008038080215454102, 0.00799014377593994, 0.008025856018066406, 0.008228863716125488, 0.008476256370544433, 0.00838419246673584, 0.008294976234436036, 0.008198304176330566, 0.008206175804138184, 0.00829974365234375, 0.008547264099121094, 0.008525504112243652, 0.008403264045715331, 0.008376607894897461, 0.008361408233642578, 0.008531455993652343, 0.008562975883483887, 0.00856713581085205, 0.00840886402130127, 0.008364671707153321, 0.008375200271606445, 0.008358495712280273, 0.008333567619323731, 0.008259455680847167, 0.008468544006347656, 0.008330656051635741, 0.008266400337219239, 0.008178943634033204, 0.00832579231262207, 0.008216671943664551, 0.00854412841796875, 0.008306112289428711, 0.008290719985961915, 0.008437952041625977, 0.00832316780090332, 0.008240480422973633, 0.00841590404510498, 0.008347935676574707, 0.008347040176391601, 0.00865328025817871, 0.008306528091430664, 0.008287232398986816, 0.008217599868774414, 0.008336735725402831, 0.008370528221130371, 0.008427712440490722, 0.008331392288208008, 0.008293888092041016, 0.00841983985900879, 0.008529919624328614, 0.008481056213378906, 0.008519200325012207, 0.008364224433898925, 0.008295680046081543, 0.008223487854003906, 0.008154111862182617, 0.00812339210510254, 0.008159135818481445, 0.008236448287963867, 0.008600128173828126, 0.008493056297302246, 0.008465503692626953, 0.008273088455200195, 0.008203007698059083, 0.008550463676452636, 0.008588191986083984, 0.010002752304077149, 0.00867296028137207, 0.008240287780761718, 0.008157471656799316, 0.008440320014953612, 0.008505279541015626, 0.008552576065063476, 0.008591168403625489, 0.008485055923461915, 0.008316448211669922, 0.008450528144836426, 0.008347647666931152, 0.00828006362915039, 0.008299903869628906, 0.008428319931030274, 0.008572256088256836, 0.008470623970031739, 0.00844979190826416, 0.008491616249084472, 0.008536383628845215, 0.008495903968811036, 0.00866198444366455, 0.00853219223022461, 0.008431391716003418, 0.008480832099914551, 0.008421119689941406, 0.008550592422485351, 0.008783552169799804, 0.008365440368652343, 0.008335871696472168, 0.00822316837310791, 0.008155136108398438, 0.008136704444885253, 0.008103167533874512, 0.008094464302062988, 0.008187904357910156, 0.008322079658508301, 0.00836297607421875, 0.008487135887145995, 0.008410016059875488, 0.008301440238952637, 0.008353792190551757, 0.008359456062316894, 0.008275456428527832, 0.00817046356201172, 0.008167136192321777, 0.008337696075439454, 0.008477760314941405, 0.008567744255065918, 0.008835071563720704, 0.008524160385131836, 0.00831436824798584, 0.008167360305786133, 0.008154560089111328, 0.008239647865295411, 0.007981023788452148, 0.00805337619781494, 0.008075424194335937, 0.007984352111816407, 0.0080250244140625, 0.008019807815551758, 0.008181759834289551, 0.008272928237915039, 0.008317503929138183, 0.008543935775756836, 0.008490719795227052, 0.008501279830932617, 0.008325471878051757, 0.008342144012451171, 0.008417280197143554, 0.008378368377685547, 0.008416864395141601, 0.008936863899230957, 0.008348671913146973, 0.008507391929626466, 0.008493056297302246, 0.008456000328063965, 0.008521984100341797, 0.008660927772521972, 0.008773856163024903, 0.008524736404418946, 0.008498016357421876, 0.008807583808898926, 0.008489824295043946, 0.008476287841796874, 0.008496671676635743, 0.008440192222595216, 0.008616543769836426, 0.008331135749816894, 0.008194047927856446, 0.008173567771911621, 0.00827625560760498, 0.008230112075805663, 0.008634880065917968, 0.008355392456054687, 0.00826358413696289, 0.008108575820922852, 0.008154751777648926, 0.008067456245422364, 0.008034272193908691, 0.00803228759765625, 0.008056832313537597, 0.008468480110168456, 0.008479935646057129, 0.008289216041564941, 0.00835142421722412, 0.008232640266418458, 0.008229120254516602, 0.008286687850952148, 0.008476448059082032, 0.008456192016601562, 0.008443903923034669, 0.00850483226776123, 0.008499520301818848, 0.008613311767578125, 0.008526080131530762, 0.008360447883605958, 0.008357888221740722, 0.008232928276062012, 0.0083853759765625, 0.008403008460998534, 0.008356863975524903, 0.008250368118286134, 0.008292351722717285, 0.008486559867858887, 0.008591135978698731, 0.00847110366821289, 0.008386560440063476, 0.008230912208557128, 0.008144895553588867, 0.008285696029663087, 0.008130144119262696, 0.00812662410736084, 0.008098591804504394, 0.008072383880615234, 0.00818665599822998, 0.008347455978393554, 0.008509056091308594, 0.008597824096679687, 0.008417535781860351, 0.008417280197143554, 0.008645888328552245, 0.008354559898376464, 0.008396127700805663, 0.00839033603668213, 0.008549344062805176, 0.008593119621276856, 0.00849948787689209, 0.00844934368133545, 0.008407744407653808, 0.008566783905029298, 0.008455231666564941, 0.008331775665283203, 0.008417599678039551, 0.008460384368896484, 0.008302623748779298, 0.008165375709533691, 0.008308159828186034, 0.008319135665893554, 0.008290975570678712, 0.008187647819519042, 0.008077312469482421, 0.008122400283813476, 0.00819315242767334, 0.00810649585723877, 0.008220352172851563, 0.00801587200164795, 0.008063167572021484, 0.0080164155960083, 0.008062911987304687, 0.007990816116333007, 0.007985631942749023, 0.008064000129699708, 0.00842240047454834, 0.008576319694519044, 0.00855519962310791, 0.008228992462158203, 0.008252511978149414, 0.008172287940979003, 0.008324735641479493, 0.008303071975708008, 0.008181599617004395, 0.008525376319885253, 0.008513728141784667, 0.0084235200881958, 0.008639840126037598, 0.008487903594970703, 0.008484864234924316, 0.008605695724487305, 0.008460288047790527, 0.008486847877502441, 0.008362048149108887, 0.008273920059204102, 0.00830463981628418, 0.008208383560180664, 0.008187456130981445, 0.008864128112792968, 0.00864998435974121, 0.00861676788330078, 0.009211999893188477, 0.008236960411071777, 0.008302592277526855, 0.008456192016601562, 0.008475872039794921, 0.00861673641204834, 0.00847606372833252, 0.008650367736816406, 0.008737983703613282, 0.008650015830993653, 0.008571264266967773, 0.008437888145446777, 0.008568256378173829, 0.008350111961364747, 0.008177824020385742, 0.008120223999023438, 0.00813417625427246, 0.008196576118469238, 0.008304991722106934, 0.008441216468811035, 0.008232895851135253, 0.00831942367553711, 0.00852121639251709, 0.008184384346008301, 0.008292287826538085, 0.008134655952453614, 0.008112128257751466, 0.008132448196411133, 0.008179295539855956, 0.008274496078491211, 0.008304320335388183, 0.008236800193786621, 0.008333632469177246, 0.008198399543762207, 0.008365823745727538, 0.00832921600341797, 0.008423616409301758, 0.008585056304931641, 0.008304032325744629, 0.008250176429748535, 0.008218175888061523, 0.008159680366516113, 0.008155136108398438, 0.008089599609375, 0.008546303749084473, 0.008078528404235839, 0.008096927642822266, 0.008194496154785156, 0.007995007991790771, 0.008026495933532714, 0.008001440048217774, 0.008232864379882812, 0.00834598445892334, 0.008450048446655273, 0.008591391563415528, 0.008654815673828125, 0.008589311599731446, 0.008564736366271973, 0.008566271781921387, 0.008468159675598145, 0.008812383651733398, 0.008422368049621582, 0.008356127738952637, 0.008275679588317871, 0.008263360023498536, 0.008216896057128907, 0.00828758430480957, 0.00841983985900879, 0.008331423759460449, 0.00828611183166504, 0.008232416152954102, 0.008276063919067383, 0.00833795166015625, 0.008241151809692383, 0.008034303665161132, 0.007924799919128417, 0.007915552139282227, 0.008047136306762695, 0.008191519737243653, 0.008090047836303712, 0.008049056053161622, 0.008033760070800781, 0.008239359855651855, 0.008652928352355958, 0.008412991523742676, 0.008470175743103027, 0.008372063636779786, 0.008317184448242188, 0.00832380771636963, 0.008177536010742188, 0.008122367858886719, 0.008002943992614746, 0.007998079776763915, 0.007978367805480956, 0.008300224304199218, 0.008360256195068359, 0.0081844482421875, 0.00820019245147705, 0.009277440071105958, 0.008279871940612794, 0.00808569622039795, 0.008163328170776368, 0.008333312034606934, 0.008546431541442872, 0.008609663963317872, 0.008595680236816406, 0.008615488052368165, 0.008722559928894043, 0.008186847686767577, 0.008368127822875977, 0.00819200038909912, 0.008048192024230957, 0.008068896293640137, 0.008684255599975587, 0.008110015869140626, 0.008671232223510742, 0.008306112289428711, 0.009097151756286621, 0.008798848152160645, 0.008244288444519044, 0.00821446418762207, 0.008372575759887695, 0.008743583679199219, 0.008548352241516113, 0.008304032325744629, 0.008849856376647949, 0.008466591835021972, 0.008455391883850097, 0.008193920135498047, 0.008168352127075196, 0.008011743545532227, 0.008216608047485351, 0.00807913589477539, 0.008062687873840332, 0.008203071594238281, 0.008142144203186034, 0.008132991790771484, 0.008484864234924316, 0.00833743953704834, 0.008362175941467286, 0.008283967971801759, 0.008486880302429198, 0.008244447708129883, 0.008339327812194824, 0.008360447883605958, 0.008481184005737304, 0.008665087699890137, 0.008570879936218261, 0.00840294361114502, 0.008417280197143554, 0.008826848030090331, 0.0086212158203125, 0.008470720291137695, 0.008880831718444825, 0.011236672401428222, 0.00931059169769287, 0.008454463958740234, 0.008196096420288086, 0.00810153579711914, 0.008171456336975098, 0.008034111976623536, 0.007997727870941163, 0.008010047912597656, 0.00807919979095459, 0.00840719985961914, 0.008341504096984862, 0.008046208381652832, 0.007915616035461426, 0.007931968212127686, 0.00785814380645752, 0.00784819221496582, 0.00840499210357666, 0.008392543792724609, 0.008231072425842286, 0.008404352188110352, 0.00820844841003418, 0.008217151641845704, 0.008535264015197754, 0.008381216049194336, 0.008258624076843262, 0.008239680290222168, 0.008194432258605958, 0.008219903945922851, 0.008194815635681153, 0.008148991584777832, 0.007991551876068115, 0.007947264194488525, 0.007922080039978028, 0.007887167930603027, 0.007928224086761474, 0.00800921630859375, 0.008417407989501953, 0.008666624069213867, 0.008603424072265624, 0.008616095542907714, 0.00857363224029541, 0.008460160255432128, 0.008380415916442872, 0.00828166389465332, 0.008448543548583985, 0.008463616371154786, 0.008170080184936524, 0.008065088272094727, 0.007980576038360596, 0.007960608005523681, 0.007971263885498047, 0.008167424201965333, 0.00831488037109375, 0.008226816177368163, 0.008178688049316407, 0.008449024200439453, 0.008407039642333984, 0.008070624351501465, 0.008073760032653808, 0.008359935760498047, 0.008291711807250976, 0.00817625617980957, 0.008065376281738281, 0.008107680320739746, 0.00825260829925537, 0.008116479873657226, 0.008038975715637208, 0.008048992156982421, 0.007956128120422363, 0.008148063659667968, 0.008534879684448242, 0.008304703712463378, 0.008199551582336425, 0.008147295951843261, 0.008075263977050781, 0.008067487716674804, 0.008081119537353515, 0.008183296203613282, 0.00852560043334961, 0.008423359870910644, 0.008521920204162597, 0.008351552009582519, 0.008386655807495117, 0.008320704460144043, 0.008333632469177246, 0.008140800476074218, 0.008011775970458984, 0.008249343872070313, 0.007929855823516846, 0.007964672088623047, 0.007979008197784423, 0.00820633602142334, 0.008935423851013183, 0.00862003231048584, 0.008289823532104492, 0.008305215835571289, 0.0084683837890625, 0.00825654411315918, 0.008097920417785644, 0.008061216354370118, 0.008040384292602539, 0.008004511833190918, 0.008230624198913574, 0.008390175819396972, 0.008348128318786622, 0.008115424156188964, 0.008161312103271484, 0.008471296310424805, 0.008167327880859375, 0.008062080383300781, 0.008191136360168457, 0.008146719932556152, 0.008073247909545899, 0.00833676815032959, 0.008116352081298827, 0.007987711906433105, 0.007921664237976075, 0.007921664237976075, 0.008173824310302735, 0.008551456451416016, 0.008560832023620606, 0.008646719932556153, 0.00862399959564209, 0.008491616249084472, 0.00848908805847168, 0.008255359649658203, 0.00830787181854248, 0.008190784454345703, 0.008233023643493651, 0.008042624473571777, 0.00812399959564209, 0.008027711868286132, 0.007998144149780274, 0.007976960182189942, 0.008002911567687988, 0.00812713623046875, 0.008550016403198243, 0.00856112003326416, 0.00820576000213623, 0.008137184143066406, 0.008199423789978027, 0.008426112174987793, 0.00812880039215088, 0.008356063842773438, 0.008142848014831543, 0.008099136352539062, 0.00818239974975586, 0.00812992000579834, 0.008083744049072266, 0.008203840255737305, 0.008388928413391113, 0.008256031990051269, 0.008214112281799316, 0.008565152168273926, 0.008335200309753418, 0.008150527954101563, 0.008063936233520507, 0.008044256210327148, 0.007970464229583741, 0.007915872097015381, 0.00790502405166626, 0.008302783966064453, 0.008859231948852539, 0.008552927970886231, 0.008525216102600097, 0.008458847999572755, 0.008341407775878907, 0.008337504386901855, 0.00820019245147705, 0.008064800262451172, 0.00802019214630127, 0.008025728225708007, 0.008016256332397461, 0.007983104228973388, 0.007948287963867188, 0.007999743938446045, 0.007988096237182617, 0.008270272254943848, 0.008196543693542481, 0.007996992111206054, 0.00811411190032959, 0.008144895553588867, 0.00809177589416504, 0.008313216209411622, 0.008396544456481934, 0.008177472114562987, 0.00800812816619873, 0.007985151767730713, 0.007974080085754395, 0.008003487586975097, 0.007978208065032959, 0.007951648235321044, 0.008077247619628906, 0.00804911994934082, 0.00791756820678711, 0.007942143917083741, 0.008171520233154296, 0.008089632034301757, 0.007984384059906006, 0.008079551696777343, 0.008093695640563964, 0.008130208015441894, 0.008411328315734863, 0.008708800315856934, 0.008506367683410645, 0.008454143524169922, 0.00865884780883789, 0.008448127746582032, 0.00828822422027588, 0.00819200038909912, 0.008239104270935058, 0.008218624114990235, 0.008334495544433594, 0.008170175552368163, 0.008070816040039062, 0.008052255630493164, 0.008004608154296875, 0.007892960071563721, 0.00820633602142334, 0.008373503684997558, 0.008169568061828614, 0.008089599609375, 0.008493696212768555, 0.00831491184234619, 0.008093695640563964, 0.008099840164184571, 0.008072511672973633, 0.008121024131774903, 0.008099519729614257, 0.007995359897613526, 0.007963263988494873, 0.008063839912414552, 0.008090208053588867, 0.008007807731628418, 0.008288000106811523, 0.008364447593688965, 0.008232704162597656, 0.008270079612731934, 0.008054783821105957, 0.007943967819213867, 0.00787663984298706, 0.007902751922607422, 0.007883423805236817, 0.007914783954620362, 0.008302335739135741, 0.008564864158630371, 0.008680224418640137, 0.008549951553344726, 0.008594143867492675, 0.008519455909729004, 0.008575072288513183, 0.008613759994506837, 0.00842959976196289, 0.008267775535583496, 0.008349920272827148, 0.008248448371887207, 0.008104063987731934, 0.008280608177185058, 0.008378656387329101, 0.00829206371307373, 0.008140800476074218, 0.008396800041198731, 0.008746623992919922, 0.008175775527954102, 0.008378591537475586, 0.008978431701660156, 0.008488960266113281, 0.007985151767730713, 0.008249279975891113, 0.008201727867126465, 0.008133184432983399, 0.008124416351318359, 0.008108256340026855, 0.008061984062194823, 0.008010496139526367, 0.007982624053955079, 0.007936480045318604, 0.007929408073425293, 0.007891200065612792, 0.007823552131652832, 0.008043999671936036, 0.008509984016418457, 0.008179455757141114, 0.008226207733154297, 0.008029024124145507, 0.008134655952453614, 0.008047840118408204, 0.008176416397094726, 0.008130559921264649, 0.008011199951171876, 0.007973440170288086, 0.007966495990753174, 0.008197759628295899, 0.008629983901977539, 0.008559488296508789, 0.008609919548034669, 0.008513216018676759, 0.008305952072143555, 0.008380319595336914, 0.008340352058410645, 0.008226271629333495, 0.008026080131530762, 0.0080513916015625, 0.007966720104217529, 0.007896992206573486, 0.008033632278442383, 0.008243295669555664, 0.008204959869384765, 0.008069120407104492, 0.008114175796508789, 0.008463744163513184, 0.008304448127746581, 0.008116448402404785, 0.007993408203125, 0.008055359840393066, 0.008008735656738281, 0.00798528003692627, 0.007956384181976318, 0.007905759811401368, 0.007999584197998047, 0.008100064277648925, 0.008003583908081055, 0.007946368217468261, 0.007911424160003662, 0.008095711708068847, 0.007929887771606445, 0.007823359966278077, 0.007791711807250977, 0.007845920085906982, 0.007817440032958985, 0.008020832061767577, 0.008016448020935058, 0.008191264152526856, 0.008037088394165039, 0.007976960182189942, 0.00798089599609375, 0.007878431797027588, 0.007895423889160156, 0.007921664237976075, 0.008054783821105957, 0.008519680023193359, 0.008683775901794434, 0.008574720382690429, 0.00854422378540039, 0.008513567924499511, 0.008310784339904785, 0.00821350383758545, 0.008184831619262695, 0.008077280044555664, 0.00793398380279541, 0.00801091194152832, 0.008084320068359374, 0.00799948787689209, 0.007918784141540527, 0.007942751884460449, 0.008067584037780762, 0.00801529598236084, 0.009736063957214356, 0.010015135765075683, 0.008671232223510742, 0.008746272087097167, 0.008314784049987794, 0.008313023567199707, 0.00828604793548584, 0.008320863723754884, 0.00812947177886963, 0.008040224075317383, 0.008100159645080566, 0.008004960060119628, 0.008047264099121093, 0.008218015670776367, 0.00814095973968506, 0.008052767753601075, 0.007964735984802246, 0.007909887790679932, 0.007898880004882812, 0.007847936153411865, 0.00786787223815918, 0.008053088188171386, 0.008134016036987305, 0.008071904182434081, 0.008062047958374024, 0.007986239910125733, 0.008013216018676758, 0.007948639869689942, 0.00790118408203125, 0.007892288208007813, 0.007910272121429444, 0.007943999767303467, 0.008232224464416503, 0.00857795238494873, 0.0085731840133667, 0.008666303634643555, 0.008530495643615722, 0.00810086441040039, 0.008224767684936523, 0.008120256423950195, 0.008083104133605957, 0.0080797758102417, 0.008028287887573243, 0.00818284797668457, 0.008061984062194823, 0.008080256462097169, 0.008491359710693359, 0.008476799964904785, 0.00821292781829834, 0.008077312469482421, 0.008114175796508789, 0.008019935607910157, 0.008079392433166504, 0.007921664237976075, 0.007870816230773925, 0.007877823829650878, 0.007954271793365478, 0.00816316795349121, 0.0081081600189209, 0.00799180793762207, 0.007944352149963379, 0.007925439834594726, 0.007937471866607666, 0.00790822410583496, 0.008116543769836426, 0.008072319984436036, 0.008022591590881348, 0.007999167919158936, 0.00786198377609253, 0.007845536231994629, 0.007854047775268555, 0.008104415893554687, 0.008836768150329589, 0.00863702392578125, 0.00854860782623291, 0.00856454372406006, 0.008382528305053712, 0.008247424125671386, 0.008433664321899414, 0.00832431983947754, 0.008137503623962402, 0.008054847717285157, 0.008040063858032227, 0.007966432094573974, 0.007985023975372315, 0.00820911979675293, 0.007979008197784423, 0.008433759689331055, 0.008546015739440919, 0.008310815811157227, 0.008165696144104003, 0.00819388771057129, 0.008173567771911621, 0.008044544219970704, 0.00827187156677246, 0.008140800476074218, 0.007962175846099854, 0.007987552165985108, 0.008020064353942872, 0.008130559921264649, 0.007943999767303467, 0.008036607742309571, 0.007968512058258057, 0.007925280094146728, 0.00794159984588623, 0.008189151763916016, 0.008091423988342285, 0.008025631904602051, 0.007999328136444092, 0.007933631896972656, 0.008051648139953613, 0.00819974422454834, 0.008421152114868165, 0.008501343727111817, 0.0084137601852417, 0.00841318416595459, 0.008475872039794921, 0.008506048202514648, 0.008560480117797852, 0.008425727844238281, 0.008361087799072266, 0.008130496025085449, 0.008211392402648926, 0.008130304336547852, 0.00799564790725708, 0.007981056213378907, 0.007982624053955079, 0.008571136474609374, 0.008288479804992676, 0.008104127883911133, 0.008101471900939941, 0.008231167793273925, 0.008155103683471679, 0.008447999954223634, 0.008294400215148925, 0.008199839591979981, 0.008261088371276856, 0.008442239761352539, 0.008205120086669921, 0.008139552116394043, 0.008012096405029296, 0.007983712196350098, 0.008044544219970704, 0.008105376243591308, 0.007918176174163818, 0.008075136184692383, 0.008065152168273925, 0.008039423942565918, 0.007978303909301758, 0.008029888153076172, 0.008065024375915527, 0.007975135803222656, 0.007892159938812256, 0.008155743598937988, 0.007886847972869874, 0.007906911849975586, 0.00841801643371582, 0.008842944145202637, 0.00868556785583496, 0.00871014404296875, 0.00861184024810791, 0.0085032958984375, 0.008278016090393067]",tokens/s,121.20742458031037,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 717, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 51368 has 14.71 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 4.20 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,868.368384,4804.44416,0.0,4401.922048,4400.206336,s,1,7.70624658203125,7.70624658203125,0.0,7.70624658203125,7.70624658203125,7.70624658203125,7.70624658203125,[7.70624658203125],,kWh,5.073968550004792e-06,5.522655589149401e-07,8.986118300025048e-07,6.524845938922237e-06,,MB,1190.383616,4903.010304,0.0,4490.002432,4455.927296,s,10,1.6124855651855468,0.1612485565185547,0.004990177800993483,0.1616930389404297,0.1654776123046875,0.1674062484741211,0.16894915740966798,"[0.14879971313476562, 0.16000431823730468, 0.16504902648925782, 0.1599872589111328, 0.16199977111816405, 0.1693348846435547, 0.16281391906738282, 0.1595737609863281, 0.16353660583496094, 0.1613863067626953]",tokens/s,1587.6111112383346,kWh,4.472199521275127e-06,4.930443897680599e-07,2.978028476697082e-06,7.943272387740268e-06,tokens/kWh,32228530.950935684,MB,1219.207168,4911.398912,0.0,4498.39104,4455.929856,s,10,13.609187255859371,1.3609187255859374,0.0018868683463867018,1.3605836181640625,1.3636239624023436,1.3640104064941405,1.364319561767578,"[1.35796533203125, 1.3594329833984375, 1.359353271484375, 1.361396484375, 1.359854736328125, 1.362082275390625, 1.3643968505859374, 1.3635380859375, 1.3609866943359374, 1.3601805419921875]",tokens/s,46.29225744019036,kWh,3.990777365122577e-05,4.401657985236045e-06,2.6232522669502777e-05,7.05419543059646e-05,tokens/kWh,893085.5491577032,,s,630,13.606447031021105,0.02159753496987479,0.0004371107145675281,0.02149452781677246,0.02172998161315918,0.02203306369781494,0.02436198320388794,"[0.024171360015869142, 0.022544160842895507, 0.02195612716674805, 0.021617311477661133, 0.021315584182739256, 0.021329919815063478, 0.02129007911682129, 0.02120297622680664, 0.02139779281616211, 0.021432416915893555, 0.021516799926757812, 0.02142310333251953, 0.02144358444213867, 0.021394784927368165, 0.021475200653076173, 0.02144745635986328, 0.02138015937805176, 0.02138822364807129, 0.02146908760070801, 0.021470624923706053, 0.021406400680541993, 0.02138489532470703, 0.02143449592590332, 0.02144060707092285, 0.021426271438598633, 0.02151628875732422, 0.021604352951049805, 0.02152448081970215, 0.02152169609069824, 0.02143715286254883, 0.02146451187133789, 0.02149225616455078, 0.021458080291748047, 0.021400447845458984, 0.021536415100097656, 0.02147567939758301, 0.021476543426513672, 0.021463808059692384, 0.02140985679626465, 0.021511423110961915, 0.021563520431518556, 0.02157382392883301, 0.021588415145874024, 0.021776384353637695, 0.02185968017578125, 0.021861024856567383, 0.02185215950012207, 0.021786624908447266, 0.02165283203125, 0.02150057601928711, 0.021394559860229492, 0.021400447845458984, 0.021362688064575194, 0.021313535690307618, 0.02147327995300293, 0.02146918487548828, 0.02143846321105957, 0.0214583683013916, 0.021481727600097655, 0.02145635223388672, 0.021463008880615236, 0.021415935516357423, 0.02152332878112793, 0.024396127700805664, 0.02269593620300293, 0.022041919708251954, 0.02168681526184082, 0.021471391677856444, 0.021396799087524412, 0.02142620849609375, 0.021420703887939454, 0.021346208572387695, 0.02139334487915039, 0.021474687576293946, 0.021351200103759765, 0.021366720199584962, 0.021421728134155275, 0.02144707107543945, 0.02141993522644043, 0.02147747230529785, 0.021379072189331053, 0.021429471969604492, 0.02139321517944336, 0.02133705520629883, 0.021405696868896484, 0.021391359329223633, 0.021425216674804688, 0.021447616577148436, 0.0214466552734375, 0.021432064056396485, 0.021481727600097655, 0.02141164779663086, 0.021461183547973633, 0.021415552139282226, 0.02139936065673828, 0.021537343978881837, 0.021431455612182616, 0.02158608055114746, 0.021453088760375976, 0.021616832733154297, 0.021651199340820312, 0.02160688018798828, 0.021485567092895508, 0.02153990364074707, 0.02161964797973633, 0.021581056594848633, 0.021805631637573243, 0.021835968017578124, 0.021839679718017577, 0.02175200080871582, 0.021744960784912108, 0.021562047958374023, 0.021554719924926757, 0.02159459114074707, 0.02164531135559082, 0.02161033630371094, 0.02151030349731445, 0.021534719467163087, 0.02150601577758789, 0.02141391944885254, 0.02145894432067871, 0.021484735488891602, 0.02136262321472168, 0.02146121597290039, 0.021430944442749022, 0.021472799301147462, 0.024706655502319336, 0.02291244888305664, 0.022295488357543945, 0.021780479431152345, 0.021473087310791016, 0.02141596794128418, 0.02139468765258789, 0.021339040756225586, 0.021295103073120117, 0.021350080490112305, 0.021344575881958008, 0.021325664520263674, 0.021334175109863282, 0.02142780876159668, 0.021420448303222657, 0.02149171257019043, 0.021470975875854493, 0.02149123191833496, 0.021514976501464844, 0.02146918487548828, 0.021622016906738283, 0.021342975616455078, 0.021405567169189454, 0.02141606330871582, 0.021413888931274414, 0.021414976119995117, 0.021326784133911134, 0.021386592864990235, 0.0215251522064209, 0.02157756805419922, 0.021546527862548827, 0.02153536033630371, 0.02148761558532715, 0.02148054313659668, 0.021537696838378906, 0.021415935516357423, 0.021534624099731444, 0.02149590492248535, 0.021369888305664064, 0.021424991607666016, 0.02143449592590332, 0.02142617607116699, 0.02150399971008301, 0.021794815063476563, 0.021737375259399415, 0.02177987289428711, 0.02177299118041992, 0.02168422317504883, 0.02167193603515625, 0.021605791091918944, 0.021524383544921876, 0.021541343688964844, 0.02155084800720215, 0.021487552642822264, 0.021493824005126953, 0.021479328155517577, 0.02144723129272461, 0.021429824829101562, 0.021422527313232423, 0.021399551391601563, 0.0214400634765625, 0.021430719375610353, 0.02151420783996582, 0.02436297607421875, 0.022849599838256837, 0.022174047470092773, 0.02178451156616211, 0.021524639129638673, 0.021380096435546874, 0.021539360046386718, 0.02143075180053711, 0.021466976165771486, 0.021514400482177735, 0.021518335342407227, 0.021423328399658204, 0.021598655700683592, 0.02144700813293457, 0.021506048202514647, 0.021454048156738282, 0.02151296043395996, 0.021487648010253907, 0.021501888275146486, 0.02152774429321289, 0.021520639419555666, 0.021406080245971678, 0.021408000946044923, 0.021459999084472655, 0.021437408447265625, 0.021482559204101564, 0.02155820846557617, 0.021518335342407227, 0.021518335342407227, 0.021491519927978514, 0.02150003242492676, 0.02149996757507324, 0.02148134422302246, 0.021381023406982422, 0.0215185604095459, 0.021424127578735352, 0.02141939163208008, 0.021477792739868166, 0.02136675262451172, 0.021459199905395507, 0.022062944412231444, 0.021524639129638673, 0.021603839874267578, 0.021698272705078125, 0.021766048431396484, 0.021707647323608397, 0.02168217658996582, 0.021590015411376954, 0.021606399536132814, 0.02162393569946289, 0.021459775924682616, 0.021469247817993163, 0.021491455078125, 0.02175820732116699, 0.02165350341796875, 0.021555200576782226, 0.02156515121459961, 0.021640575408935547, 0.021521312713623047, 0.02146249580383301, 0.0214116153717041, 0.021433088302612306, 0.02151219177246094, 0.024359552383422852, 0.022927488327026367, 0.022120256423950196, 0.021696704864501953, 0.02147123146057129, 0.021495231628417967, 0.021389919281005858, 0.021395423889160155, 0.021408927917480468, 0.021389535903930664, 0.021448511123657227, 0.021486303329467774, 0.021450048446655275, 0.021448863983154296, 0.021438144683837892, 0.021443519592285156, 0.021546911239624024, 0.021538719177246094, 0.021672128677368164, 0.02149990463256836, 0.021550687789916992, 0.021526943206787108, 0.021465087890625, 0.02145280075073242, 0.02147942352294922, 0.021399295806884766, 0.02143257522583008, 0.021391359329223633, 0.02136617660522461, 0.021408031463623047, 0.021364479064941405, 0.021590591430664063, 0.021604352951049805, 0.021507232666015626, 0.021471935272216795, 0.021452735900878907, 0.021452959060668946, 0.021479488372802735, 0.021368831634521485, 0.02142617607116699, 0.021534368515014647, 0.02144086456298828, 0.021431360244750976, 0.021567583084106445, 0.021666048049926757, 0.022022239685058592, 0.021701120376586915, 0.021714719772338867, 0.021647584915161132, 0.021647008895874023, 0.021669567108154295, 0.021576351165771484, 0.021605535507202147, 0.02150441551208496, 0.02151059150695801, 0.02151628875732422, 0.021529855728149413, 0.021350271224975587, 0.021409759521484373, 0.02140995216369629, 0.021410144805908204, 0.02145526313781738, 0.021435871124267578, 0.024627231597900392, 0.022957759857177733, 0.02235420799255371, 0.021659648895263672, 0.021565376281738283, 0.02149177551269531, 0.021489664077758788, 0.021377023696899415, 0.02144380760192871, 0.021462976455688478, 0.021418527603149416, 0.021391679763793945, 0.021421728134155275, 0.02143062400817871, 0.02146272087097168, 0.021424448013305664, 0.021432319641113282, 0.021497856140136717, 0.02146303939819336, 0.02143168067932129, 0.021676223754882814, 0.02147577667236328, 0.021493759155273438, 0.02150399971008301, 0.02150399971008301, 0.02144976043701172, 0.02143324851989746, 0.021448768615722657, 0.02152448081970215, 0.022015647888183595, 0.021420383453369142, 0.02146886444091797, 0.021473600387573243, 0.021436351776123047, 0.0214355525970459, 0.02157776069641113, 0.021590911865234375, 0.021563392639160156, 0.02153388786315918, 0.021482303619384767, 0.021552896499633788, 0.021543167114257813, 0.02147737693786621, 0.02171023941040039, 0.021725088119506835, 0.021760704040527344, 0.02171664047241211, 0.02163462448120117, 0.021539615631103515, 0.021614463806152343, 0.021690496444702147, 0.021650848388671876, 0.021729888916015624, 0.021639167785644533, 0.021639167785644533, 0.021557247161865235, 0.0215548152923584, 0.021469568252563475, 0.021489664077758788, 0.021385215759277345, 0.021435871124267578, 0.021466751098632813, 0.021515167236328125, 0.02478803253173828, 0.023114656448364256, 0.02231827163696289, 0.021926591873168946, 0.021563520431518556, 0.02146918487548828, 0.0214234561920166, 0.021389984130859376, 0.021481472015380858, 0.02144223976135254, 0.021340192794799803, 0.021428224563598632, 0.02141404724121094, 0.021524608612060545, 0.02172313690185547, 0.021466880798339843, 0.021602527618408203, 0.02145692825317383, 0.021489664077758788, 0.021454368591308594, 0.021450784683227538, 0.022614463806152344, 0.021352352142333983, 0.021514272689819335, 0.021504064559936524, 0.021507999420166016, 0.021586015701293947, 0.021540864944458008, 0.02166988754272461, 0.02164121627807617, 0.021542623519897462, 0.0215063362121582, 0.021489664077758788, 0.021506048202514647, 0.021512096405029296, 0.021536352157592774, 0.021501951217651367, 0.021426687240600584, 0.021409503936767577, 0.0214749755859375, 0.021596607208251954, 0.021616832733154297, 0.021684160232543947, 0.021616384506225585, 0.021770336151123046, 0.021782751083374022, 0.02182143974304199, 0.02167193603515625, 0.021651391983032228, 0.021632192611694336, 0.021564287185668947, 0.021602304458618164, 0.02147532844543457, 0.02146656036376953, 0.02154310417175293, 0.02150752067565918, 0.02174620819091797, 0.021491392135620117, 0.021571840286254883, 0.021606847763061522, 0.02148316764831543, 0.021504383087158203, 0.021610496520996093, 0.024406208038330077, 0.022962335586547852, 0.022054912567138672, 0.021700288772583006, 0.021565759658813476, 0.021531999588012694, 0.02149990463256836, 0.02145756721496582, 0.02149135971069336, 0.02148700714111328, 0.021504575729370118, 0.02145712089538574, 0.021469343185424806, 0.021485567092895508, 0.021481472015380858, 0.021372928619384765, 0.021499296188354493, 0.02146361541748047, 0.021372415542602538, 0.021482015609741213, 0.021495391845703125, 0.02152057647705078, 0.02151641654968262, 0.021537952423095703, 0.021583999633789062, 0.021520992279052735, 0.021512416839599608, 0.021501951217651367, 0.02150399971008301, 0.02159971237182617, 0.02152707290649414, 0.021469247817993163, 0.021453792572021485, 0.021453792572021485, 0.023709632873535155, 0.021429855346679686, 0.02155094337463379, 0.021590656280517578, 0.021618015289306642, 0.02147315216064453, 0.02160310363769531, 0.02151375961303711, 0.021486047744750977, 0.021592063903808592, 0.02166579246520996, 0.021599584579467774, 0.021609119415283203, 0.021708799362182618, 0.021628000259399413, 0.0216912956237793, 0.021683551788330077, 0.021703327178955078, 0.021577184677124023, 0.02151273536682129, 0.02150809669494629, 0.021423200607299804, 0.021432640075683594, 0.02146774482727051, 0.021405696868896484, 0.021441696166992187, 0.021420127868652345, 0.021497695922851563, 0.021725696563720705, 0.024588895797729493, 0.022974687576293944, 0.022231039047241212, 0.021839872360229492, 0.021593439102172853, 0.02150057601928711, 0.02146303939819336, 0.021347904205322267, 0.021430719375610353, 0.021458112716674804, 0.021426528930664063, 0.021461471557617188, 0.02146246337890625, 0.02146067237854004, 0.021576576232910157, 0.02143427276611328, 0.0215, 0.021519519805908202, 0.02151487922668457, 0.0214366397857666, 0.02149990463256836, 0.021436416625976562, 0.021618112564086914, 0.0214736328125, 0.0215001277923584, 0.02147327995300293, 0.02147225570678711, 0.021445632934570313, 0.021428224563598632, 0.02165247917175293, 0.02150899124145508, 0.02150003242492676, 0.021481472015380858, 0.021544960021972655, 0.02138492774963379, 0.021440351486206054, 0.02146553611755371, 0.021391359329223633, 0.02145280075073242, 0.021428224563598632, 0.021440223693847658, 0.0215184326171875, 0.021469247817993163, 0.021698688507080077, 0.02171219253540039, 0.02169455909729004, 0.021750368118286133, 0.02168832015991211, 0.021663616180419922, 0.02165567970275879, 0.021577728271484374, 0.02149990463256836, 0.02142576026916504, 0.021485984802246092, 0.021575551986694336, 0.02153049659729004, 0.02163737678527832, 0.021482656478881836, 0.02152534484863281, 0.02148761558532715, 0.021462047576904297, 0.021445600509643555, 0.021464864730834962, 0.024314016342163087, 0.022802400588989257, 0.022071296691894532, 0.021827423095703125, 0.021548288345336914, 0.0215350399017334, 0.021463647842407226, 0.021425472259521485, 0.02144326400756836, 0.021444351196289062, 0.021485055923461914, 0.021379840850830077, 0.021380895614624022, 0.021375200271606446, 0.02126985549926758, 0.02133468818664551, 0.021419551849365233, 0.021391839981079103, 0.021536767959594725, 0.02153267288208008, 0.021565439224243164, 0.02141798400878906, 0.021421247482299805, 0.02141676712036133, 0.021522432327270507, 0.0214835205078125, 0.021458623886108398, 0.021455167770385742, 0.02148761558532715, 0.02152992057800293, 0.021510847091674806, 0.021438047409057616, 0.021490079879760742, 0.021493600845336913, 0.02157583999633789, 0.021519392013549805, 0.02149065589904785, 0.021467103958129882, 0.02145414352416992, 0.021422815322875977, 0.021481472015380858, 0.02151219177246094, 0.021557247161865235, 0.021730815887451172, 0.021656063079833983, 0.021598207473754884, 0.021554943084716796, 0.02162073516845703, 0.021641471862792968, 0.02164735984802246, 0.021626880645751953, 0.021690336227416993, 0.021714111328125, 0.021653696060180663, 0.021531295776367188, 0.021587968826293946, 0.021454240798950194, 0.021499679565429686, 0.02152908706665039, 0.02149603271484375, 0.02152787208557129, 0.021484319686889648, 0.02149171257019043]",tokens/s,46.30158031436665,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.47264,11724.06272,0.0,11328.815104,11314.254848,s,1,7.47275341796875,7.47275341796875,0.0,7.47275341796875,7.47275341796875,7.47275341796875,7.47275341796875,[7.47275341796875],,kWh,1.2082180279158667e-05,1.3250766185224244e-06,4.895559471987387e-06,1.830281636966848e-05,,MB,1093.189632,12166.561792,0.0,11760.828416,11713.906688,s,10,4.051521362304687,0.40515213623046875,0.008479341193698546,0.40764956665039065,0.4102277038574219,0.4112820343017578,0.4121254986572266,"[0.3810806579589844, 0.4031009521484375, 0.40672128295898435, 0.4038128662109375, 0.409993408203125, 0.4085778503417969, 0.406339111328125, 0.409971435546875, 0.41233636474609375, 0.40958743286132815]",tokens/s,631.8614098442657,kWh,1.1751291458666704e-05,1.295934292239087e-06,7.827084039440102e-06,2.087430979034589e-05,tokens/kWh,12263878.545981761,MB,1097.920512,12271.419392,0.0,11865.686016,11828.952576,s,10,30.397986328125,3.0397986328125,0.004010404397557613,3.0387587890625003,3.044893994140625,3.045714453125,3.0463708203125,"[3.033953125, 3.03819580078125, 3.038205810546875, 3.040492919921875, 3.039311767578125, 3.03443017578125, 3.03797314453125, 3.046534912109375, 3.044711669921875, 3.044177001953125]",tokens/s,20.725057021856337,kWh,8.89646126238327e-05,9.81164368429653e-06,5.905577502235942e-05,0.00015783203133048863,tokens/kWh,399158.51978159393,,s,630,30.39387789535523,0.048244250627547974,0.00035791633083775157,0.0481997127532959,0.048510780334472654,0.048626995277404786,0.050413185539245606,"[0.05025820922851563, 0.04848777770996094, 0.04789548873901367, 0.04802080154418945, 0.04798847961425781, 0.04777388763427735, 0.047736351013183596, 0.047752128601074216, 0.047713409423828124, 0.04776025772094727, 0.048244735717773435, 0.04807475280761719, 0.047892478942871096, 0.04774092864990234, 0.04773023986816406, 0.047827262878417966, 0.04793363189697265, 0.04795180892944336, 0.04808499145507812, 0.048080894470214845, 0.048648094177246096, 0.04863945770263672, 0.048427486419677736, 0.048218273162841795, 0.04809081649780273, 0.048199745178222654, 0.04799929428100586, 0.047961215972900394, 0.04790345764160156, 0.04789052963256836, 0.04802886581420898, 0.04800518417358399, 0.04820156860351563, 0.048116416931152345, 0.04798396682739258, 0.048184192657470704, 0.048148574829101565, 0.04809104156494141, 0.04803379058837891, 0.04824198532104492, 0.04828639984130859, 0.0483061752319336, 0.048287391662597656, 0.04832092666625976, 0.04823139190673828, 0.04823139190673828, 0.04825398254394531, 0.04825187301635742, 0.04818700790405273, 0.04809971237182617, 0.04806646347045898, 0.048070209503173825, 0.048142879486083985, 0.04819968032836914, 0.048336894989013675, 0.04815462493896484, 0.04833484649658203, 0.04807884979248047, 0.04811174392700195, 0.04833443069458008, 0.04824092864990234, 0.04854988861083984, 0.04851007843017578, 0.05042982482910156, 0.04864176177978516, 0.04799939346313477, 0.04797030258178711, 0.048057376861572264, 0.04798358535766602, 0.04780441665649414, 0.047728641510009766, 0.04773231887817383, 0.048008865356445315, 0.048036830902099606, 0.04782057571411133, 0.04802377700805664, 0.04803152084350586, 0.047826335906982424, 0.04792995071411133, 0.04795619201660156, 0.048086814880371094, 0.04801945495605469, 0.0485478401184082, 0.048543262481689456, 0.0483988151550293, 0.048291519165039064, 0.048360992431640625, 0.04814934539794922, 0.048083038330078126, 0.048151424407958984, 0.04804022216796875, 0.04797328186035156, 0.047903648376464845, 0.04806335830688477, 0.048060352325439454, 0.04801337432861328, 0.0482529296875, 0.048395423889160155, 0.04804473495483398, 0.04804828643798828, 0.048263168334960936, 0.04834643173217774, 0.04841036987304687, 0.04859795379638672, 0.04849868774414062, 0.04835737609863281, 0.04838195037841797, 0.0483061752319336, 0.04830003356933594, 0.048146430969238284, 0.048390209197998045, 0.04827078247070313, 0.04807727813720703, 0.04813827133178711, 0.048252609252929686, 0.048222526550292966, 0.04818473434448242, 0.04814089584350586, 0.04831027221679687, 0.04818329620361328, 0.04819148635864258, 0.04832771301269531, 0.04837295913696289, 0.048623390197753906, 0.04854508972167969, 0.04850960159301758, 0.050522113800048826, 0.048534591674804686, 0.047973312377929685, 0.048004798889160157, 0.04782070541381836, 0.04812035369873047, 0.04784332656860352, 0.04790361785888672, 0.04788326263427734, 0.04785561752319336, 0.04790288162231445, 0.04789807891845703, 0.048116031646728515, 0.04795779037475586, 0.04791116714477539, 0.04797407913208008, 0.048148448944091794, 0.0480852165222168, 0.04799488067626953, 0.0485233268737793, 0.04866044616699219, 0.04848633575439453, 0.04825644683837891, 0.04825369644165039, 0.04816691207885742, 0.048121471405029294, 0.04831907272338867, 0.048086273193359376, 0.04797289657592774, 0.04831155014038086, 0.04797443389892578, 0.04801327896118164, 0.04802227020263672, 0.04821724700927734, 0.04820780944824219, 0.0481901741027832, 0.048156768798828124, 0.04820195388793945, 0.048178752899169924, 0.04844169616699219, 0.048465599060058595, 0.048425247192382816, 0.04848035049438477, 0.04834703826904297, 0.04825702285766602, 0.04835091018676758, 0.0483515510559082, 0.04815462493896484, 0.04808246231079102, 0.04814806365966797, 0.048208545684814454, 0.04814172744750977, 0.04812883377075195, 0.04803596878051758, 0.04808832168579102, 0.048171615600585936, 0.04814448165893555, 0.04833708953857422, 0.04837964630126953, 0.04840240097045898, 0.048459808349609376, 0.04846092987060547, 0.04851801681518555, 0.050372447967529294, 0.048853408813476565, 0.048236286163330075, 0.04806051254272461, 0.048004192352294923, 0.04792822265625, 0.047925247192382815, 0.04789433670043945, 0.04807228851318359, 0.04803039932250976, 0.04795587158203125, 0.047951873779296876, 0.047927520751953126, 0.047914783477783204, 0.048173057556152345, 0.04814438247680664, 0.04819686508178711, 0.04829782485961914, 0.0480285758972168, 0.048346656799316406, 0.04845001602172851, 0.048482017517089845, 0.048433441162109375, 0.0484983024597168, 0.04829363250732422, 0.04826995086669922, 0.04801126480102539, 0.048099552154541016, 0.04808272171020508, 0.048009407043457034, 0.04827936172485352, 0.048205825805664064, 0.04827545547485351, 0.04812595367431641, 0.04803583908081055, 0.048205825805664064, 0.048121856689453124, 0.048356830596923826, 0.048200225830078124, 0.04826726531982422, 0.04843110275268555, 0.04854988861083984, 0.04842291259765625, 0.0484453125, 0.048400062561035156, 0.04843376159667969, 0.0481769905090332, 0.04807680130004883, 0.04816252899169922, 0.04822428894042969, 0.04807500839233399, 0.048254718780517576, 0.04803631973266602, 0.04817049789428711, 0.04822367858886719, 0.0481247673034668, 0.04855398559570313, 0.04826521682739258, 0.04849868774414062, 0.04833884811401367, 0.04844348907470703, 0.048367038726806644, 0.04840300750732422, 0.05048524856567383, 0.04894502258300781, 0.04827971267700195, 0.047988895416259766, 0.04782489776611328, 0.04800511932373047, 0.04788169479370117, 0.048143009185791015, 0.04801715087890625, 0.0479725456237793, 0.04782483291625977, 0.047875358581542966, 0.04790140914916992, 0.04807884979248047, 0.047876094818115236, 0.048228351593017575, 0.048078655242919925, 0.04811747360229492, 0.04812169647216797, 0.048390785217285154, 0.04864521789550781, 0.04863001632690429, 0.04846454238891602, 0.04832665634155273, 0.048140289306640625, 0.0481954231262207, 0.04821539306640625, 0.04807267379760742, 0.04822512054443359, 0.0481743049621582, 0.047988639831542966, 0.04798137664794922, 0.048029281616210937, 0.04796057510375976, 0.04806409454345703, 0.048068191528320314, 0.04826598358154297, 0.04811779022216797, 0.04839984130859375, 0.04847875213623047, 0.04861539077758789, 0.04860742568969727, 0.04852035140991211, 0.04838576126098633, 0.048399326324462894, 0.04830003356933594, 0.04820326232910156, 0.048236446380615236, 0.048192031860351564, 0.04823046493530273, 0.04803379058837891, 0.04808201599121094, 0.04823855972290039, 0.04810028839111328, 0.048196895599365235, 0.04811849594116211, 0.048162849426269534, 0.048162849426269534, 0.04822537612915039, 0.04838896179199219, 0.048347137451171876, 0.048402431488037106, 0.048363521575927736, 0.050638721466064456, 0.048599422454833986, 0.04799897766113281, 0.04786175918579102, 0.04782815933227539, 0.04777558517456055, 0.04788528060913086, 0.047783935546875, 0.047869697570800784, 0.0477861442565918, 0.04775740814208984, 0.04782227325439453, 0.04783161544799805, 0.04800307083129883, 0.04794572830200195, 0.047925247192382815, 0.048080192565917966, 0.0480447998046875, 0.04801059341430664, 0.0482861442565918, 0.048424991607666015, 0.04845375823974609, 0.04836284637451172, 0.04806630325317383, 0.04800723266601563, 0.047936065673828125, 0.047908958435058595, 0.04790707015991211, 0.0479536018371582, 0.0481794548034668, 0.04801945495605469, 0.04806054306030273, 0.047998847961425783, 0.04797030258178711, 0.047916961669921876, 0.048162368774414065, 0.04808348846435547, 0.048246784210205076, 0.048130046844482424, 0.048323841094970704, 0.048487327575683595, 0.04846115112304687, 0.048374080657958986, 0.04828387069702148, 0.04832662582397461, 0.048508926391601564, 0.04816876983642578, 0.048025440216064454, 0.04806009674072265, 0.0480951042175293, 0.0481280632019043, 0.04808319854736328, 0.04814281463623047, 0.04811980819702148, 0.048113662719726565, 0.048233470916748046, 0.04822732925415039, 0.04841632080078125, 0.04840275192260742, 0.04828387069702148, 0.04851004791259766, 0.04840284729003906, 0.04841292953491211, 0.05049379348754883, 0.048779422760009766, 0.048067008972167966, 0.04790182495117187, 0.047860607147216794, 0.047811969757080075, 0.04779052734375, 0.047917247772216794, 0.04789657592773437, 0.04801887893676758, 0.04807123184204101, 0.04798463821411133, 0.04791484832763672, 0.04782505416870117, 0.047905792236328126, 0.04792348861694336, 0.047943519592285155, 0.04802150344848633, 0.04800806427001953, 0.04818467330932617, 0.04860550308227539, 0.04842940902709961, 0.04838809585571289, 0.04822630310058594, 0.04813833618164062, 0.04801865768432617, 0.0480959358215332, 0.0481300163269043, 0.048265056610107424, 0.04803193664550781, 0.04801126480102539, 0.04798681640625, 0.04807462310791016, 0.04804118347167969, 0.04814313507080078, 0.04811980819702148, 0.048277408599853515, 0.04822844696044922, 0.0482979850769043, 0.04827344131469727, 0.04846588897705078, 0.048494590759277346, 0.04838195037841797, 0.048428192138671874, 0.04827827072143555, 0.04829193496704102, 0.0481710090637207, 0.048247806549072264, 0.04824335861206055, 0.04820556640625, 0.04811008071899414, 0.0483328971862793, 0.04821212768554688, 0.048166751861572266, 0.048132095336914066, 0.04828521728515625, 0.04823731231689453, 0.04836748886108398, 0.048371551513671875, 0.04851507186889648, 0.048429153442382813, 0.048607135772705076, 0.04847206497192383, 0.05049971389770508, 0.04876697540283203, 0.048233566284179685, 0.048142398834228516, 0.04804390335083008, 0.04808156967163086, 0.0479747200012207, 0.048138240814208984, 0.048121631622314455, 0.04812412643432617, 0.04811775970458984, 0.04814438247680664, 0.04803193664550781, 0.048213024139404294, 0.048183361053466794, 0.048186080932617184, 0.048129470825195315, 0.04823068618774414, 0.04825494384765625, 0.04849407958984375, 0.048651073455810545, 0.048637664794921875, 0.048498977661132814, 0.04840179061889648, 0.04821868896484375, 0.048213760375976564, 0.04818483352661133, 0.048081600189208984, 0.04815679931640625, 0.0481341438293457, 0.04832172775268555, 0.04818937683105469, 0.04828351974487305, 0.04837273788452148, 0.04830374526977539, 0.048293888092041014, 0.04819782257080078, 0.048363712310791014, 0.04839833450317383, 0.04849478530883789, 0.04867862319946289, 0.04858848190307617, 0.048578144073486325, 0.048476993560791014, 0.04839846420288086, 0.0484466552734375, 0.04835187149047852, 0.048363582611083984, 0.0483279037475586, 0.04829990386962891, 0.04817523193359375, 0.048481056213378906, 0.04835548782348633, 0.04829964828491211, 0.04829177474975586, 0.04821430587768555, 0.04818124771118164, 0.048388031005859374, 0.04844345474243164, 0.04855795288085937, 0.048600383758544925, 0.048608062744140625, 0.048465503692626956, 0.05048566436767578, 0.04885488128662109, 0.04823484802246094, 0.04803023910522461, 0.048045921325683597, 0.04798463821411133, 0.04814233779907227, 0.048112735748291016, 0.04800604629516601, 0.047994686126708985, 0.048115455627441406, 0.04803414535522461, 0.04786118316650391, 0.048036128997802734, 0.047978527069091795, 0.04826556777954102, 0.04811161422729492, 0.04812799835205078, 0.048151775360107424, 0.048239391326904295, 0.04866457748413086, 0.048568286895751954, 0.04847824096679688, 0.04830003356933594, 0.04833280181884766, 0.048220321655273436, 0.04810736083984375, 0.04820787048339844, 0.04817824172973633, 0.04807980728149414, 0.048154048919677735, 0.04814652633666992, 0.04809366226196289, 0.04816486358642578, 0.04820787048339844, 0.048271358489990236, 0.04827859115600586, 0.04832147216796875, 0.04843270492553711, 0.048372161865234374, 0.04862118530273438, 0.04849638366699219, 0.04850956726074219, 0.04838358306884766, 0.048449951171875, 0.04844748687744141, 0.04832406234741211, 0.048364063262939454, 0.04831350326538086, 0.04825548934936524, 0.048195934295654295, 0.04824476623535156, 0.0483430061340332, 0.04852345657348633, 0.048365375518798825, 0.048467967987060545, 0.04841267013549805, 0.048457534790039065, 0.04852134323120117, 0.04848646545410156, 0.048502880096435545, 0.04852918243408203, 0.048648319244384765, 0.050347999572753904, 0.04873532867431641, 0.048140865325927734, 0.04796627044677734, 0.047962432861328126, 0.0479021110534668, 0.04787891387939453, 0.04796921539306641, 0.04789715194702148, 0.04791289520263672, 0.04789712142944336, 0.04788611221313477, 0.04799065780639648, 0.04790879821777344, 0.04801523208618164, 0.0479378547668457, 0.04794169616699219, 0.048041248321533205, 0.04804867172241211, 0.04844972610473633, 0.0486297607421875, 0.04842623901367187, 0.048445758819580076, 0.04827932739257813, 0.04811439895629883, 0.04824825668334961, 0.048187904357910157, 0.04820787048339844, 0.04819046401977539, 0.04812083053588867, 0.04815420913696289, 0.04818179321289062, 0.04826217651367187, 0.04825174331665039, 0.04816864013671875, 0.048322879791259765, 0.04829100799560547, 0.04830495834350586, 0.04839136123657226, 0.04845036697387695, 0.048642047882080076, 0.04893199920654297, 0.04854460906982422, 0.04861030578613281, 0.04846899032592773, 0.048500415802001956, 0.04844326400756836, 0.04832505416870117, 0.048358497619628904, 0.04830915069580078, 0.048256992340087894, 0.04828131103515625, 0.04831468963623047, 0.04848796844482422, 0.04844182586669922, 0.04843110275268555, 0.0483749771118164, 0.048509281158447264, 0.04844182586669922, 0.04851030349731445, 0.04864886474609375, 0.048623615264892575, 0.04879359817504883]",tokens/s,20.72785849074811,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 657, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 324, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 238.12 MiB is free. Process 87449 has 14.51 GiB memory in use. Of the allocated memory 14.39 GiB is allocated by PyTorch, and 1.53 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,835.82976,3354.329088,0.0,2959.081472,2957.493248,s,1,7.60441064453125,7.60441064453125,0.0,7.60441064453125,7.60441064453125,7.60441064453125,7.60441064453125,[7.60441064453125],,kWh,1.005276507085379e-05,1.1015301663254118e-06,4.153892212005461e-06,1.5308187449184663e-05,,MB,1128.615936,3423.535104,0.0,3017.801728,2552.885248,s,10,0.5859242820739747,0.058592428207397476,0.002362301742255165,0.05809035110473633,0.05996758232116699,0.06253839855194092,0.06459505153656006,"[0.06510921478271485, 0.0585645751953125, 0.05581372833251953, 0.05712700653076172, 0.05939628982543945, 0.058537055969238284, 0.057635486602783205, 0.05768281555175781, 0.05849788665771485, 0.05756022262573242]",tokens/s,4369.1652288900905,kWh,2.154151239307669e-06,2.3743067485282434e-07,1.4369149567204602e-06,3.828496870880954e-06,tokens/kWh,66866973.8108192,MB,1138.286592,3423.535104,0.0,3017.801728,2552.887808,s,10,12.852337768554687,1.285233776855469,0.012224236546737058,1.289853271484375,1.2956299560546873,1.2969684204101561,1.298039191894531,"[1.298306884765625, 1.29533251953125, 1.294235595703125, 1.293533203125, 1.2904178466796874, 1.2892886962890624, 1.275995849609375, 1.2716734619140626, 1.2858623046875, 1.25769140625]",tokens/s,49.01831957306602,kWh,3.715348410902399e-05,4.097759460257526e-06,2.1655742406280632e-05,6.290698597556217e-05,tokens/kWh,1001478.5961049536,,s,630,12.84996529006958,0.020396770301697745,0.00037085017510198466,0.020403215408325195,0.020714876365661623,0.020847481441497805,0.02165227920532227,"[0.021066112518310545, 0.02083488082885742, 0.02062531280517578, 0.02076054382324219, 0.020461727142333984, 0.02046553611755371, 0.020426752090454102, 0.020465215682983397, 0.02067840003967285, 0.02053590393066406, 0.020598527908325195, 0.020515167236328125, 0.020353023529052734, 0.02046953582763672, 0.020455135345458984, 0.020515327453613282, 0.020817920684814452, 0.020729312896728514, 0.02049430465698242, 0.020668800354003907, 0.020670047760009767, 0.020471839904785155, 0.02052355194091797, 0.020469696044921874, 0.020738143920898438, 0.020580352783203124, 0.020473407745361327, 0.020440927505493166, 0.020510751724243163, 0.020568639755249023, 0.0203505916595459, 0.02026697540283203, 0.020344831466674804, 0.02058608055114746, 0.02115839958190918, 0.02113260841369629, 0.022361087799072265, 0.02051584053039551, 0.020597152709960938, 0.020817695617675783, 0.021066560745239257, 0.0205614070892334, 0.02032614326477051, 0.02053606414794922, 0.020586496353149415, 0.02038374328613281, 0.020475839614868162, 0.02048543930053711, 0.020498592376708983, 0.020699743270874024, 0.020553728103637696, 0.02041651153564453, 0.020514463424682616, 0.020670816421508788, 0.020668415069580077, 0.020525056838989256, 0.020358400344848634, 0.02049305534362793, 0.020557823181152343, 0.020657760620117187, 0.020686464309692384, 0.020398880004882814, 0.020418560028076172, 0.02129913520812988, 0.02044937515258789, 0.02044313621520996, 0.020438335418701173, 0.020499135971069334, 0.020596736907958983, 0.020344831466674804, 0.020529151916503906, 0.020545536041259766, 0.02068070411682129, 0.020444320678710937, 0.020466527938842773, 0.020559871673583984, 0.020563968658447264, 0.020537343978881836, 0.020682752609252928, 0.02056188774108887, 0.020637727737426757, 0.021139455795288087, 0.020524543762207033, 0.02073855972290039, 0.02047385597229004, 0.0210984001159668, 0.020659360885620117, 0.02035807991027832, 0.02040403175354004, 0.02050886344909668, 0.020812864303588866, 0.02082419204711914, 0.020410879135131836, 0.020457855224609377, 0.02056185531616211, 0.020596736907958983, 0.020531200408935548, 0.02075823974609375, 0.020402368545532228, 0.020547679901123047, 0.02059587287902832, 0.020553728103637696, 0.02042678451538086, 0.020369535446166993, 0.0203885440826416, 0.020549631118774413, 0.02048793601989746, 0.020549888610839843, 0.020465375900268555, 0.020435136795043947, 0.020608768463134766, 0.020525407791137696, 0.020568063735961914, 0.020458656311035155, 0.0205296630859375, 0.020626943588256837, 0.02035593605041504, 0.02057027244567871, 0.020567264556884766, 0.020607616424560545, 0.020518911361694335, 0.02039193534851074, 0.020479999542236327, 0.020534751892089843, 0.020465503692626952, 0.02038240051269531, 0.02088243293762207, 0.020664575576782227, 0.02041484832763672, 0.020543872833251955, 0.020746240615844725, 0.020398080825805662, 0.020849727630615236, 0.02044380760192871, 0.020334880828857423, 0.020512767791748047, 0.020389888763427736, 0.021529727935791016, 0.020369760513305663, 0.020412960052490235, 0.020612096786499022, 0.020242719650268554, 0.020423391342163085, 0.02060633659362793, 0.020472448348999025, 0.020578304290771485, 0.020781248092651368, 0.020649791717529297, 0.02050048065185547, 0.02042265510559082, 0.020436607360839843, 0.020599168777465822, 0.020567039489746093, 0.02037820816040039, 0.02026460838317871, 0.02044326400756836, 0.02032633590698242, 0.020087135314941405, 0.02034320068359375, 0.020228031158447266, 0.02033203125, 0.020213760375976563, 0.021139968872070314, 0.020799488067626954, 0.020573631286621093, 0.020531776428222657, 0.020448671340942384, 0.02056867218017578, 0.020701183319091796, 0.02047385597229004, 0.02041609573364258, 0.020422624588012694, 0.020314815521240235, 0.02025651168823242, 0.02041651153564453, 0.020320287704467775, 0.0203504638671875, 0.020083168029785155, 0.020351999282836913, 0.02077289581298828, 0.020705408096313476, 0.020458335876464843, 0.020699392318725585, 0.022826751708984374, 0.020645696640014647, 0.02057360076904297, 0.020406911849975586, 0.020424032211303712, 0.020321088790893553, 0.02088960075378418, 0.020375520706176757, 0.020336639404296874, 0.020813535690307618, 0.020738336563110353, 0.02062745666503906, 0.02045283126831055, 0.020380191802978516, 0.020395488739013673, 0.02068675231933594, 0.020378240585327147, 0.02040415954589844, 0.020387903213500976, 0.02037881660461426, 0.020322240829467774, 0.02026723289489746, 0.020404064178466796, 0.020703712463378907, 0.020490591049194335, 0.020297727584838866, 0.02039948844909668, 0.020417152404785158, 0.020733951568603515, 0.020858816146850586, 0.020471872329711913, 0.02045747184753418, 0.02034876823425293, 0.020319807052612306, 0.02026691246032715, 0.02046636772155762, 0.02065203285217285, 0.020466815948486327, 0.020539264678955078, 0.020494335174560546, 0.02041548728942871, 0.020412479400634766, 0.02055776023864746, 0.0204466552734375, 0.02082259178161621, 0.020447231292724608, 0.020410367965698242, 0.020369407653808593, 0.02038374328613281, 0.020798847198486327, 0.020638336181640626, 0.0203855037689209, 0.020336927413940428, 0.020356832504272462, 0.02049849510192871, 0.020762847900390624, 0.020410367965698242, 0.0204977912902832, 0.020458112716674803, 0.020467039108276366, 0.020286111831665038, 0.020400224685668947, 0.020483999252319335, 0.020504159927368162, 0.020430816650390624, 0.020422975540161134, 0.020510719299316405, 0.021272031784057618, 0.022427616119384767, 0.020874624252319337, 0.02043996810913086, 0.020322303771972656, 0.020555776596069338, 0.020563968658447264, 0.020361215591430663, 0.02037555122375488, 0.02037555122375488, 0.020426816940307617, 0.020391744613647463, 0.02035036849975586, 0.020415199279785155, 0.02046883201599121, 0.020540447235107423, 0.020559743881225587, 0.020361120223999024, 0.02067875289916992, 0.020927743911743166, 0.02074025535583496, 0.020564479827880858, 0.02065417671203613, 0.020415552139282228, 0.020495296478271485, 0.02044313621520996, 0.020551263809204103, 0.020318784713745118, 0.020387104034423828, 0.020437568664550782, 0.020215808868408205, 0.020375455856323242, 0.020284799575805663, 0.020392671585083007, 0.020576255798339844, 0.020168319702148437, 0.02047433662414551, 0.020367040634155273, 0.020367584228515624, 0.020745311737060547, 0.020517791748046875, 0.02040233612060547, 0.02052079963684082, 0.020455423355102538, 0.020440927505493166, 0.020528703689575194, 0.020400480270385744, 0.020340959548950197, 0.02033433532714844, 0.020318496704101564, 0.021116895675659178, 0.020486175537109377, 0.02063564872741699, 0.020516511917114257, 0.020412832260131835, 0.020492223739624022, 0.020350976943969725, 0.020206975936889648, 0.020854560852050782, 0.020509536743164063, 0.02059676742553711, 0.02047792053222656, 0.020379520416259764, 0.020434240341186523, 0.020489023208618163, 0.02088640022277832, 0.020379520416259764, 0.020236415863037108, 0.020536352157592773, 0.020687135696411132, 0.020585119247436525, 0.020477983474731447, 0.020431968688964845, 0.020341663360595702, 0.02047385597229004, 0.020628639221191406, 0.020433536529541017, 0.020408384323120116, 0.02032451248168945, 0.020545440673828123, 0.02067465591430664, 0.020461856842041017, 0.02064067268371582, 0.020521631240844728, 0.02047369575500488, 0.020277568817138672, 0.020316160202026368, 0.020928512573242186, 0.020516159057617188, 0.020480287551879882, 0.020394176483154298, 0.020347103118896485, 0.02036636734008789, 0.02031622314453125, 0.0204705924987793, 0.0208155517578125, 0.020304288864135742, 0.020307968139648438, 0.020391424179077147, 0.02025507164001465, 0.020297887802124024, 0.020365312576293947, 0.020338687896728515, 0.020197376251220703, 0.02053638458251953, 0.020701887130737305, 0.020522335052490234, 0.020585023880004882, 0.020396095275878906, 0.0204716796875, 0.02042255973815918, 0.02035353660583496, 0.02069708824157715, 0.020436992645263673, 0.020387840270996094, 0.02026905632019043, 0.020415552139282228, 0.02056604766845703, 0.020157344818115236, 0.02027724838256836, 0.020641183853149413, 0.02080214309692383, 0.020518911361694335, 0.0205897274017334, 0.02038256072998047, 0.02049238395690918, 0.020385696411132814, 0.02022604751586914, 0.02081875228881836, 0.020418336868286133, 0.02046175956726074, 0.020223648071289062, 0.02026473617553711, 0.02034262466430664, 0.020339424133300782, 0.02002943992614746, 0.020116607666015626, 0.01990950393676758, 0.020244768142700195, 0.020332256317138673, 0.02036735916137695, 0.020353023529052734, 0.020308063507080077, 0.020321279525756835, 0.020239263534545898, 0.020141439437866213, 0.02008127975463867, 0.020015104293823242, 0.019920896530151368, 0.019986080169677733, 0.020010879516601562, 0.01982512092590332, 0.021702335357666015, 0.02112544059753418, 0.020215808868408205, 0.020246143341064452, 0.02060736083984375, 0.02007859230041504, 0.020531200408935548, 0.02005731201171875, 0.020046144485473632, 0.019919008255004884, 0.020033855438232422, 0.02020966339111328, 0.020714559555053712, 0.020298688888549805, 0.020106592178344727, 0.02007107162475586, 0.020136959075927736, 0.020173824310302735, 0.02011039924621582, 0.020013311386108398, 0.02024323272705078, 0.020260768890380858, 0.020298912048339845, 0.020278112411499023, 0.020484256744384765, 0.020156160354614257, 0.020084320068359376, 0.020166976928710938, 0.019982143402099608, 0.020182432174682616, 0.020291872024536133, 0.020503231048583984, 0.020256767272949217, 0.020186656951904296, 0.020084768295288085, 0.020695232391357423, 0.02007046318054199, 0.019983936309814453, 0.020088544845581056, 0.020717727661132813, 0.020153375625610353, 0.02018604850769043, 0.020221439361572266, 0.0199616641998291, 0.019862016677856444, 0.020035776138305664, 0.020262912750244142, 0.020312000274658203, 0.020231231689453125, 0.020261215209960937, 0.02011408042907715, 0.019976192474365235, 0.019995744705200196, 0.02014668846130371, 0.020264991760253905, 0.01996633529663086, 0.01989017677307129, 0.02005638313293457, 0.02026652717590332, 0.019949216842651368, 0.019876224517822266, 0.01999488067626953, 0.019996543884277344, 0.019900224685668946, 0.019879552841186525, 0.019929664611816406, 0.01988096046447754, 0.019993024826049803, 0.020518943786621092, 0.019972192764282228, 0.02005990409851074, 0.019971839904785155, 0.02012460708618164, 0.02036729621887207, 0.02080508804321289, 0.02080419158935547, 0.020658079147338866, 0.020574304580688478, 0.020661279678344725, 0.02052102470397949, 0.020721759796142578, 0.02085487937927246, 0.020675296783447265, 0.02048409652709961, 0.020155967712402342, 0.02002579116821289, 0.02020672035217285, 0.01999875259399414, 0.019850080490112304, 0.01988582420349121, 0.019799808502197265, 0.0199869441986084, 0.019927040100097656, 0.019869632720947265, 0.020035648345947267, 0.020189184188842774, 0.020275136947631837, 0.020230207443237305, 0.0202128963470459, 0.020095775604248047, 0.020451391220092773, 0.020184320449829103, 0.021155391693115234, 0.02065043258666992, 0.02047529602050781, 0.020656864166259767, 0.020289535522460937, 0.020059808731079102, 0.020040031433105468, 0.02002707290649414, 0.020166976928710938, 0.020197376251220703, 0.020105215072631837, 0.020153440475463868, 0.02010745620727539, 0.02019606399536133, 0.0200392951965332, 0.020093311309814454, 0.020641759872436525, 0.021180448532104493, 0.020551679611206054, 0.020414464950561522, 0.02025494384765625, 0.020300575256347656, 0.020220928192138672, 0.019963903427124022, 0.01983692741394043, 0.019812351226806642, 0.020319520950317384, 0.022896383285522463, 0.020020191192626952, 0.020242240905761717, 0.020376928329467775, 0.02020796775817871, 0.020017759323120117, 0.020375200271606445, 0.020583776473999022, 0.020418495178222657, 0.02068070411682129, 0.021214176177978515, 0.02055276870727539, 0.020251583099365235, 0.020213056564331054, 0.021889759063720704, 0.023609024047851562, 0.020303808212280273, 0.02025904083251953, 0.020203039169311522, 0.020273632049560546, 0.020357248306274413, 0.020189184188842774, 0.019868736267089845, 0.020318464279174806, 0.020626111984252928, 0.02025267219543457, 0.0202259521484375, 0.0200479679107666, 0.01987993621826172, 0.019869695663452147, 0.019916479110717773, 0.0198590087890625, 0.020060928344726562, 0.020844736099243165, 0.020325439453125, 0.020484384536743165, 0.020815040588378905, 0.020240703582763673, 0.020048383712768555, 0.020106719970703124, 0.01999523162841797, 0.019959487915039063, 0.01983513641357422, 0.019748544692993163, 0.019869888305664062, 0.020027103424072264, 0.0198756160736084, 0.01991468811035156, 0.019755712509155275, 0.01980191993713379, 0.01981273651123047, 0.019928159713745116, 0.0199769287109375, 0.01981439971923828, 0.019974143981933593, 0.01995110321044922, 0.01984355163574219, 0.019891616821289062, 0.020498176574707032, 0.019849279403686523, 0.019995456695556642, 0.01989414405822754, 0.020402399063110352, 0.01985526466369629, 0.01974675178527832, 0.020035648345947267, 0.02002124786376953, 0.019998655319213868, 0.019843135833740234, 0.01981955146789551, 0.02006524848937988, 0.020015104293823242, 0.020245983123779298, 0.019849760055541992, 0.01994710350036621, 0.019967744827270508, 0.02000543975830078, 0.01996735954284668, 0.019847904205322266, 0.019860992431640623, 0.019906335830688477, 0.019872095108032225, 0.019827072143554687, 0.019736576080322265, 0.01998361587524414, 0.019966720581054687, 0.01983283233642578, 0.019861631393432617, 0.019925952911376953, 0.01978191947937012, 0.01995779228210449, 0.020001312255859376, 0.019867551803588866, 0.019775680541992188, 0.020377023696899414, 0.01984979248046875, 0.020045440673828126, 0.01998476791381836, 0.019994623184204103]",tokens/s,49.027369784948945,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,835.735552,8760.786944,0.0,8365.539328,8230.228992,s,1,7.5355458984375,7.5355458984375,0.0,7.5355458984375,7.5355458984375,7.5355458984375,7.5355458984375,[7.5355458984375],,kWh,1.1459933887461678e-05,1.2341689698212937e-06,3.4725027780041495e-06,1.616660563528712e-05,,MB,1149.796352,8951.627776,0.0,8545.8944,8499.295232,s,10,2.679052764892578,0.2679052764892578,0.010860641831617654,0.2712129821777344,0.27328599548339844,0.2738173904418945,0.2742425064086914,"[0.2356790771484375, 0.27014712524414064, 0.27174835205078124, 0.27090145874023436, 0.26872930908203124, 0.2731679077148437, 0.2715245056152344, 0.2729480285644531, 0.27434878540039065, 0.26985821533203125]",tokens/s,955.5616199678128,kWh,7.3679221284370055e-06,8.121571587166401e-07,4.902892811200002e-06,1.3082972098353649e-05,tokens/kWh,19567419.243538313,MB,1154.797568,8953.724928,0.0,8547.991552,8499.297792,s,10,18.936921508789062,1.8936921508789062,0.005111415589837481,1.89559033203125,1.8990034301757812,1.8996725524902343,1.900207850341797,"[1.8848922119140625, 1.8963909912109376, 1.894943603515625, 1.892427734375, 1.885875732421875, 1.898854736328125, 1.8976529541015625, 1.896237060546875, 1.8893048095703124, 1.9003416748046875]",tokens/s,33.26834299374386,kWh,5.593598999989695e-05,6.169981117973483e-06,3.6891668402201307e-05,9.899763952007175e-05,tokens/kWh,636378.8096909802,,s,630,18.934200822830196,0.030054287020365396,0.000419725913338335,0.029986000061035155,0.030328757858276368,0.03050526542663574,0.0327036884689331,"[0.03242416000366211, 0.030667360305786134, 0.030141727447509765, 0.02985203170776367, 0.029776224136352537, 0.02967897605895996, 0.02964748764038086, 0.02971238327026367, 0.029749248504638674, 0.02972390365600586, 0.029741855621337892, 0.029601503372192382, 0.02968310356140137, 0.029616992950439455, 0.029657087326049804, 0.02959974479675293, 0.02976518440246582, 0.029729215621948243, 0.029764671325683594, 0.02981091117858887, 0.029688543319702148, 0.029853696823120116, 0.02996428871154785, 0.02980454444885254, 0.029784063339233398, 0.02978816032409668, 0.029814016342163085, 0.029776639938354492, 0.0298024959564209, 0.029878271102905272, 0.029820928573608397, 0.029859840393066408, 0.03020595169067383, 0.03010527992248535, 0.030054719924926757, 0.02996553611755371, 0.030034719467163087, 0.02991923141479492, 0.029914335250854494, 0.02989481544494629, 0.02987446403503418, 0.029851999282836914, 0.029911039352416992, 0.030033119201660158, 0.030029951095581056, 0.029911712646484376, 0.029937664031982423, 0.029884416580200194, 0.029866016387939454, 0.0299233283996582, 0.029976287841796876, 0.02992767906188965, 0.030025279998779297, 0.029931968688964843, 0.02997248077392578, 0.02997452735900879, 0.029962175369262694, 0.029883615493774413, 0.029870559692382812, 0.029886848449707033, 0.02982809638977051, 0.029907264709472657, 0.029999807357788087, 0.03290230560302734, 0.03099091148376465, 0.03013043212890625, 0.02987932777404785, 0.029702239990234375, 0.029717536926269533, 0.029621728897094725, 0.029688192367553712, 0.029620223999023438, 0.029624319076538085, 0.02974652862548828, 0.029745311737060548, 0.029775455474853517, 0.029754495620727538, 0.02989641571044922, 0.029706144332885744, 0.02975257682800293, 0.029741376876831056, 0.029698047637939453, 0.029844127655029296, 0.02996832084655762, 0.030048255920410157, 0.03000934410095215, 0.030011392593383788, 0.030007167816162108, 0.029967487335205076, 0.030071807861328126, 0.030019584655761718, 0.029916351318359374, 0.030004032135009767, 0.03016703987121582, 0.03041279983520508, 0.030543872833251953, 0.030498559951782227, 0.03054751968383789, 0.03041299247741699, 0.030374176025390626, 0.030316768646240236, 0.03024460792541504, 0.029997312545776367, 0.03013222312927246, 0.030242816925048828, 0.02996643257141113, 0.03014851188659668, 0.030123008728027343, 0.030043136596679686, 0.029932863235473634, 0.03013497543334961, 0.030253055572509766, 0.029961536407470703, 0.030270015716552735, 0.03019379234313965, 0.030119935989379884, 0.030228479385375977, 0.03021993637084961, 0.03014486312866211, 0.03008064079284668, 0.03000556755065918, 0.029921279907226563, 0.03004195213317871, 0.0303287353515625, 0.030200128555297853, 0.030318431854248047, 0.03271088027954101, 0.030930816650390627, 0.030321407318115234, 0.029898752212524415, 0.02976483154296875, 0.02975823974609375, 0.029944896697998047, 0.0296847038269043, 0.02999465560913086, 0.03007689666748047, 0.0297903995513916, 0.030206111907958983, 0.029803871154785156, 0.029818559646606447, 0.029751903533935548, 0.029932960510253907, 0.029704704284667968, 0.029819488525390625, 0.029714303970336912, 0.02979840087890625, 0.02978201675415039, 0.030007295608520508, 0.029859840393066408, 0.029837312698364257, 0.02976153564453125, 0.029734912872314452, 0.030007295608520508, 0.029826400756835937, 0.03021683120727539, 0.02993769645690918, 0.03018060874938965, 0.030399168014526367, 0.030398527145385743, 0.030492671966552733, 0.030305791854858398, 0.030429695129394533, 0.030385215759277343, 0.030120351791381835, 0.030062911987304687, 0.029926847457885743, 0.030183263778686523, 0.03015776062011719, 0.030044160842895507, 0.030097408294677733, 0.030070783615112305, 0.030005247116088866, 0.02993152046203613, 0.02992265510559082, 0.030134944915771483, 0.030225791931152345, 0.030245216369628906, 0.030140703201293945, 0.030291967391967774, 0.030013439178466796, 0.029976160049438476, 0.030105472564697266, 0.02993404769897461, 0.030019039154052733, 0.030037631988525392, 0.029876319885253907, 0.02994470405578613, 0.030137760162353516, 0.030101503372192383, 0.032686080932617184, 0.03074835205078125, 0.03022265625, 0.029915391921997072, 0.029895679473876953, 0.029819648742675783, 0.029845312118530275, 0.029944255828857423, 0.029928895950317384, 0.02967977523803711, 0.029718687057495117, 0.0296342716217041, 0.03002191925048828, 0.02975948715209961, 0.02978358459472656, 0.029948383331298827, 0.029847103118896483, 0.029849151611328124, 0.03005939292907715, 0.029984256744384766, 0.029870464324951173, 0.030093439102172853, 0.029841407775878907, 0.02986400032043457, 0.030054336547851564, 0.029869504928588867, 0.029872703552246093, 0.029870080947875976, 0.02983526420593262, 0.030052352905273437, 0.030082944869995118, 0.030328960418701173, 0.03038617515563965, 0.03034217643737793, 0.030172096252441407, 0.030150047302246095, 0.03005504035949707, 0.030192703247070313, 0.03009836769104004, 0.030039552688598634, 0.030101024627685546, 0.02992438316345215, 0.029999040603637697, 0.030089216232299806, 0.030052352905273437, 0.02998240089416504, 0.02999942398071289, 0.02992505645751953, 0.029970048904418945, 0.02995065689086914, 0.0299005126953125, 0.029832576751708983, 0.030040512084960936, 0.030007360458374023, 0.03006096076965332, 0.02997452735900879, 0.029969919204711915, 0.029929983139038087, 0.030109695434570313, 0.030003200531005858, 0.029990943908691406, 0.03002774429321289, 0.02998681640625, 0.032603233337402344, 0.030684064865112305, 0.03004140853881836, 0.029840063095092774, 0.02968329620361328, 0.029661600112915038, 0.02966281509399414, 0.02962499237060547, 0.029615583419799803, 0.02959347152709961, 0.02964521598815918, 0.029741056442260744, 0.029663103103637695, 0.029699935913085937, 0.029720863342285155, 0.029724128723144533, 0.02969215965270996, 0.02972217559814453, 0.029764320373535155, 0.029749248504638674, 0.029683391571044923, 0.029750783920288085, 0.02972329521179199, 0.02967302322387695, 0.029723039627075197, 0.029726911544799804, 0.029710336685180663, 0.029766975402832033, 0.02976633644104004, 0.029838560104370117, 0.030062400817871093, 0.03022332763671875, 0.030457632064819336, 0.030341344833374022, 0.03026460838317871, 0.03008995246887207, 0.03002572822570801, 0.030078975677490235, 0.029988224029541016, 0.029864576339721678, 0.029861440658569337, 0.02985004806518555, 0.02983103942871094, 0.029830400466918944, 0.02986073684692383, 0.029867488861083983, 0.029866527557373047, 0.029865983963012696, 0.029999103546142578, 0.029935039520263673, 0.02998963165283203, 0.029988319396972656, 0.029974880218505858, 0.02994175910949707, 0.029963903427124024, 0.029892192840576173, 0.029883167266845704, 0.030040063858032227, 0.029994047164916993, 0.03010825538635254, 0.030009183883666992, 0.030086816787719725, 0.03007369613647461, 0.03281955337524414, 0.03074662399291992, 0.030135583877563477, 0.029820959091186525, 0.02976838493347168, 0.029669376373291017, 0.029577215194702147, 0.029666784286499024, 0.02970889663696289, 0.029832927703857422, 0.030029504776000977, 0.029959903717041016, 0.030077760696411132, 0.030029823303222656, 0.029988704681396486, 0.030019744873046875, 0.02986537551879883, 0.03053219223022461, 0.030080320358276368, 0.029802431106567384, 0.0299400634765625, 0.03007263946533203, 0.029846176147460938, 0.030010911941528322, 0.03023094367980957, 0.029868032455444334, 0.03013327980041504, 0.02998179244995117, 0.030080896377563476, 0.030143680572509764, 0.03019830322265625, 0.030246463775634766, 0.030637983322143555, 0.030320671081542967, 0.03040336036682129, 0.03040870475769043, 0.030295040130615233, 0.030166015625, 0.030208000183105467, 0.030064640045166017, 0.03004787254333496, 0.029991296768188475, 0.030097408294677733, 0.030220287322998047, 0.030228479385375977, 0.030211231231689454, 0.03019206428527832, 0.03019817543029785, 0.030150400161743165, 0.030161151885986327, 0.030173248291015625, 0.03008505630493164, 0.030076927185058593, 0.030192640304565428, 0.03001241683959961, 0.03015065574645996, 0.03026915168762207, 0.03019411277770996, 0.03006857681274414, 0.029982591629028322, 0.030068864822387697, 0.030271488189697264, 0.03012777519226074, 0.03287305450439453, 0.0310064640045166, 0.030153087615966797, 0.030005247116088866, 0.029834400177001952, 0.029749536514282228, 0.029909568786621092, 0.02979430389404297, 0.029931167602539062, 0.029920927047729494, 0.029889215469360353, 0.030170591354370117, 0.029907487869262697, 0.02982649612426758, 0.029801023483276366, 0.029863199234008788, 0.029909727096557617, 0.029963327407836915, 0.029929695129394532, 0.03004899215698242, 0.029968095779418946, 0.029905055999755858, 0.029780096054077148, 0.029824447631835938, 0.02997920036315918, 0.029880319595336914, 0.03091654396057129, 0.030005216598510742, 0.02981488037109375, 0.03011686325073242, 0.030108671188354492, 0.0304202880859375, 0.03053228759765625, 0.03054128074645996, 0.030340639114379883, 0.03015920066833496, 0.030106239318847657, 0.030010976791381837, 0.02999545669555664, 0.029995008468627928, 0.030051776885986328, 0.029990976333618163, 0.03012444877624512, 0.029941408157348633, 0.030095008850097655, 0.03025315284729004, 0.03013907241821289, 0.02993561553955078, 0.030212095260620117, 0.030166816711425782, 0.029878496170043945, 0.03001651191711426, 0.030071456909179686, 0.030226783752441408, 0.030082975387573242, 0.030163040161132814, 0.030067840576171876, 0.029979103088378905, 0.030257568359375, 0.030275583267211914, 0.030216192245483397, 0.03019161605834961, 0.03012403106689453, 0.03308038330078125, 0.03097078323364258, 0.03037593650817871, 0.029882368087768556, 0.029797439575195313, 0.03003049659729004, 0.030408992767333984, 0.029917184829711913, 0.029773279190063475, 0.029830911636352538, 0.02995631980895996, 0.030059072494506837, 0.029949951171875, 0.030040063858032227, 0.029937599182128908, 0.02988425636291504, 0.02980067253112793, 0.030151775360107422, 0.02994883155822754, 0.029895967483520507, 0.029985504150390627, 0.0301527042388916, 0.029839359283447265, 0.030089216232299806, 0.030035968780517577, 0.02997657585144043, 0.029869407653808595, 0.029860511779785156, 0.029824256896972656, 0.029934335708618164, 0.03021004867553711, 0.030216192245483397, 0.03031804847717285, 0.030484672546386718, 0.030242399215698244, 0.030431999206542968, 0.030390335083007813, 0.030268928527832032, 0.030029951095581056, 0.029960512161254883, 0.030228479385375977, 0.029945951461791992, 0.029902624130249023, 0.029919359207153322, 0.029949951171875, 0.029965599060058593, 0.02992201614379883, 0.030099456787109374, 0.030189535140991212, 0.029957792282104493, 0.03001910400390625, 0.029997919082641603, 0.02994528007507324, 0.029986495971679687, 0.030022527694702147, 0.029952096939086913, 0.0299703369140625, 0.029988000869750977, 0.029958976745605468, 0.0299704647064209, 0.030044000625610353, 0.03005766487121582, 0.030186464309692383, 0.03293356704711914, 0.031051231384277345, 0.030247711181640626, 0.029863967895507812, 0.029722015380859376, 0.02969251251220703, 0.029677568435668947, 0.029628416061401368, 0.029765695571899415, 0.02968364715576172, 0.029885759353637697, 0.029809343338012696, 0.029818431854248047, 0.029802944183349608, 0.029773567199707033, 0.029773759841918945, 0.029729087829589843, 0.02975103950500488, 0.029899072647094727, 0.02981216049194336, 0.029796512603759765, 0.02971683120727539, 0.029829120635986327, 0.02978611183166504, 0.029841312408447264, 0.029800287246704103, 0.029804319381713868, 0.029765504837036133, 0.029790815353393556, 0.029845504760742186, 0.029930559158325196, 0.030208959579467773, 0.030510751724243165, 0.03034761619567871, 0.030334304809570313, 0.030233247756958008, 0.030117151260375976, 0.03006057548522949, 0.030079679489135744, 0.029976415634155273, 0.02997395133972168, 0.029997791290283203, 0.029951007843017578, 0.029905311584472655, 0.02990342330932617, 0.029892032623291015, 0.029827648162841797, 0.029855743408203125, 0.03000044822692871, 0.0298187198638916, 0.029948415756225585, 0.02991958427429199, 0.02997039985656738, 0.029968416213989258, 0.029906944274902345, 0.02991823959350586, 0.029927839279174806, 0.029976543426513673, 0.030095840454101564, 0.029976703643798827, 0.030035295486450196, 0.030077472686767578, 0.030111295700073242, 0.03284787368774414, 0.030930944442749023, 0.030246496200561523, 0.029925119400024413, 0.029825696945190428, 0.02974048042297363, 0.029772287368774415, 0.029750879287719727, 0.02976358413696289, 0.029780288696289063, 0.029792192459106446, 0.029762943267822264, 0.029709152221679688, 0.02976972770690918, 0.02976563262939453, 0.02976972770690918, 0.02977996826171875, 0.029838399887084963, 0.029847679138183595, 0.029764608383178712, 0.02987615966796875, 0.029941631317138673, 0.03006073570251465, 0.0300664005279541, 0.030038112640380858, 0.030145856857299806, 0.03008787155151367, 0.03019161605834961, 0.030012639999389648, 0.030761760711669923, 0.030089216232299806, 0.03032035255432129, 0.030791967391967774, 0.030568384170532228, 0.030522687911987305, 0.030348255157470704, 0.030376928329467773, 0.030362432479858398, 0.030300159454345704, 0.030242816925048828, 0.0303636474609375, 0.030300159454345704, 0.029994848251342774, 0.030375648498535156, 0.03017568016052246, 0.030074527740478516, 0.030249311447143556, 0.030121984481811522, 0.030176448822021484, 0.030276416778564453, 0.03013222312927246, 0.030121376037597656, 0.03034396743774414, 0.03032806396484375, 0.03026531219482422, 0.030003583908081055, 0.030066911697387694, 0.03020185661315918, 0.030023263931274413, 0.030156383514404295, 0.03029203224182129, 0.03025177574157715, 0.0302259521484375]",tokens/s,33.273123375789275,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.362624,718.209024,0.0,322.961408,314.743808,s,1,7.12930908203125,7.12930908203125,0.0,7.12930908203125,7.12930908203125,7.12930908203125,7.12930908203125,[7.12930908203125],,kWh,4.444330741659997e-06,4.831028945992709e-07,9.975007979889927e-07,5.92493443424826e-06,,MB,1093.103616,810.483712,0.0,404.750336,391.119872,s,33,0.2363698244094849,0.007162721951802572,0.00012241536813872747,0.007113791942596435,0.007291059112548828,0.007445760154724121,0.007545480937957764,"[0.007535232067108154, 0.0070817599296569824, 0.0070991358757019046, 0.007550303936004639, 0.00729257583618164, 0.007106944084167481, 0.007197567939758301, 0.007075488090515137, 0.0072849922180175785, 0.007142528057098389, 0.007201119899749756, 0.007128448009490967, 0.007084000110626221, 0.007113791942596435, 0.007054240226745606, 0.007386112213134765, 0.007106272220611573, 0.00710313606262207, 0.007114751815795899, 0.007225279808044434, 0.007111711978912354, 0.007164415836334229, 0.007107615947723389, 0.0071018881797790525, 0.007068384170532227, 0.007080031871795654, 0.007123487949371338, 0.007161632061004639, 0.007251999855041504, 0.007114175796508789, 0.007083936214447022, 0.007061151981353759, 0.0070557122230529784]",tokens/s,35740.60276562529,kWh,2.3613727125809281e-07,2.6041602155796568e-08,1.5620697501942162e-07,4.1838584843331104e-07,tokens/kWh,611875380.007757,MB,1103.044608,825.163776,0.0,419.4304,391.122432,s,33,9.878880798339848,0.2993600241921165,0.024886758566057144,0.29488934326171873,0.30042205810546874,0.3044090698242187,0.3966516906738281,"[0.3088450927734375, 0.4379724426269531, 0.2999659729003906, 0.30038250732421873, 0.29915423583984374, 0.30145172119140623, 0.30043194580078125, 0.29284234619140626, 0.2969441223144531, 0.297340087890625, 0.29619937133789065, 0.2939107666015625, 0.29546102905273436, 0.2950823974609375, 0.29575534057617187, 0.29818655395507815, 0.29488934326171873, 0.2978695983886719, 0.2899470825195313, 0.29282321166992187, 0.29323248291015624, 0.29367416381835937, 0.2936785888671875, 0.2941048278808594, 0.29431427001953125, 0.29127127075195314, 0.29567034912109375, 0.29308224487304685, 0.29094442749023436, 0.28820068359375, 0.29039175415039065, 0.2868695068359375, 0.28799105834960936]",tokens/s,210.44894076962422,kWh,8.313185671241867e-06,9.168013610817225e-07,3.856812108409e-06,1.3086799140732585e-05,tokens/kWh,4814011.3806677805,,s,2079,9.863568037509914,0.004744380970423241,0.002985095676152631,0.00461740779876709,0.0049251838684082035,0.005016031885147094,0.005280369215011594,"[0.0054271998405456545, 0.00501145601272583, 0.005853248119354248, 0.004840735912322998, 0.004856544017791748, 0.004894815921783448, 0.004773024082183838, 0.00497705602645874, 0.005105343818664551, 0.004977344036102295, 0.004967455863952637, 0.005010528087615967, 0.005041728019714356, 0.00487446403503418, 0.004938943862915039, 0.004944704055786133, 0.004922976016998291, 0.004884031772613526, 0.004895391941070556, 0.005371776103973389, 0.004817344188690185, 0.004728415966033936, 0.004704192161560059, 0.004755807876586914, 0.0048558077812194825, 0.004992320060729981, 0.0050817918777465824, 0.004976640224456787, 0.004951231956481934, 0.0050020480155944826, 0.00499507188796997, 0.005085184097290039, 0.005083392143249512, 0.005064703941345215, 0.004931327819824219, 0.0049268479347229005, 0.004891039848327637, 0.004878560066223145, 0.004812640190124512, 0.004823200225830078, 0.0047309122085571285, 0.004699456214904785, 0.004698783874511719, 0.00466534423828125, 0.0046976637840271, 0.004804800033569336, 0.00509772777557373, 0.004915520191192627, 0.0050438718795776365, 0.004933055877685547, 0.004915711879730225, 0.004849023818969726, 0.004784800052642822, 0.004679999828338623, 0.004675392150878906, 0.004644192218780517, 0.004754015922546387, 0.004597760200500488, 0.004640768051147461, 0.004640768051147461, 0.004712448120117187, 0.004659488201141358, 0.004640480041503906, 0.004346464157104492, 0.004624256134033203, 0.004651008129119873, 0.0046629438400268555, 0.004768095970153809, 0.004857215881347656, 0.0050267200469970704, 0.0048947839736938475, 0.004871327877044678, 0.004870336055755615, 0.004734720230102539, 0.004693568229675293, 0.0046622719764709475, 0.004664735794067383, 0.140567138671875, 0.005075200080871582, 0.004854847908020019, 0.004851456165313721, 0.004910272121429443, 0.0050009598731994625, 0.004939072132110596, 0.004868224143981934, 0.004806335926055908, 0.00479091215133667, 0.0047801599502563475, 0.004822432041168213, 0.004737567901611328, 0.0047391681671142575, 0.004729951858520508, 0.004794943809509278, 0.004886047840118408, 0.004967296123504638, 0.004894336223602295, 0.004933055877685547, 0.004927999973297119, 0.004934112071990967, 0.005045536041259765, 0.005038815975189209, 0.004927135944366455, 0.004842016220092773, 0.004803487777709961, 0.00483571195602417, 0.0048781437873840336, 0.004803296089172363, 0.004674784183502197, 0.004645760059356689, 0.004601759910583496, 0.00459980821609497, 0.004632607936859131, 0.004689888000488281, 0.004840479850769043, 0.004864992141723633, 0.004793983936309814, 0.004798079967498779, 0.004684544086456298, 0.00467683219909668, 0.004678207874298096, 0.004645088195800781, 0.004609951972961426, 0.004647264003753662, 0.004709695816040039, 0.004702720165252685, 0.004698016166687012, 0.00436633586883545, 0.004623360157012939, 0.004641791820526123, 0.004632575988769531, 0.004603903770446777, 0.004593311786651611, 0.004701695919036865, 0.004627295970916748, 0.004640768051147461, 0.004750720024108887, 0.004719232082366943, 0.004966015815734863, 0.005053919792175293, 0.0050861120223999025, 0.005030111789703369, 0.005089216232299805, 0.005037951946258545, 0.005050528049468994, 0.005042272090911865, 0.004902400016784668, 0.004804831981658935, 0.0047223038673400875, 0.004736767768859863, 0.004696127891540528, 0.004724607944488525, 0.004743872165679931, 0.004793856143951416, 0.004770304203033447, 0.004709983825683594, 0.004679999828338623, 0.004659296035766602, 0.004631711959838867, 0.004653952121734619, 0.00465503978729248, 0.004622208118438721, 0.004608160018920899, 0.004673056125640869, 0.004641248226165772, 0.004605247974395752, 0.0046906242370605465, 0.0046564159393310545, 0.004661983966827392, 0.0046284799575805665, 0.004689792156219483, 0.004696191787719727, 0.004817152023315429, 0.004884223937988281, 0.004910816192626953, 0.004743743896484375, 0.004673408031463623, 0.004665184020996094, 0.004640384197235108, 0.004741504192352295, 0.004614143848419189, 0.004610047817230224, 0.004609504222869873, 0.004592160224914551, 0.004642303943634033, 0.0047779521942138675, 0.004925439834594727, 0.005044767856597901, 0.005058623790740967, 0.005099455833435059, 0.004639071941375732, 0.00496614408493042, 0.00498803186416626, 0.004862143993377686, 0.004855904102325439, 0.004809311866760254, 0.004765567779541016, 0.0047842559814453125, 0.0046910080909729, 0.004682688236236572, 0.004636672019958496, 0.00473302412033081, 0.0047634878158569335, 0.004751423835754395, 0.004689919948577881, 0.004644864082336426, 0.004615488052368164, 0.004622208118438721, 0.0045773439407348635, 0.004598176002502442, 0.004591968059539795, 0.004663296222686767, 0.004601856231689453, 0.004599232196807862, 0.004616767883300781, 0.004734079837799072, 0.004846720218658447, 0.004667136192321777, 0.004623360157012939, 0.004663936138153076, 0.004721280097961426, 0.004754303932189941, 0.004710944175720215, 0.004663392066955567, 0.004663648128509522, 0.004683680057525634, 0.00467142391204834, 0.0046633281707763674, 0.0046633281707763674, 0.004810751914978028, 0.004825183868408203, 0.004755360126495361, 0.004773183822631836, 0.004721343994140625, 0.004726784229278564, 0.004724736213684082, 0.0048204798698425295, 0.0050078721046447755, 0.004905055999755859, 0.004833183765411377, 0.0048446722030639645, 0.004829696178436279, 0.004823616027832031, 0.004742847919464111, 0.00477836799621582, 0.0048371200561523435, 0.004908703804016114, 0.004950143814086914, 0.004929183959960937, 0.004976672172546387, 0.005000927925109863, 0.004964896202087402, 0.0048941121101379395, 0.0045896959304809574, 0.004904160022735596, 0.004854112148284912, 0.004882688045501709, 0.004801631927490235, 0.004753568172454834, 0.004736959934234619, 0.004745279788970947, 0.004731135845184326, 0.004684447765350342, 0.0046386241912841795, 0.004667136192321777, 0.004665599822998047, 0.004724063873291016, 0.0048196158409118655, 0.004843776226043701, 0.0048596482276916505, 0.004803967952728271, 0.0049251198768615724, 0.004864448070526123, 0.004831744194030762, 0.004773312091827393, 0.004749824047088623, 0.004779679775238037, 0.004709983825683594, 0.004707136154174805, 0.004679200172424317, 0.00466377592086792, 0.004620287895202637, 0.004640768051147461, 0.004643167972564697, 0.004730207920074463, 0.004595424175262451, 0.004658783912658691, 0.00464793586730957, 0.004785408020019531, 0.004794112205505371, 0.004975264072418213, 0.004994815826416016, 0.004855455875396729, 0.004934783935546875, 0.004947328090667725, 0.004945536136627197, 0.004858047962188721, 0.004784992218017578, 0.004834784030914307, 0.004784448146820068, 0.004727968215942383, 0.004674399852752685, 0.004639935970306396, 0.004651936054229736, 0.004636352062225342, 0.004661471843719483, 0.004644864082336426, 0.004644768238067627, 0.0046449599266052246, 0.00466326379776001, 0.004628064155578614, 0.00462278413772583, 0.004571231842041015, 0.004614048004150391, 0.004579328060150147, 0.0046769919395446774, 0.0043779520988464354, 0.004636384010314941, 0.004629248142242431, 0.0046386241912841795, 0.0045825281143188476, 0.004565887928009034, 0.004624383926391602, 0.004632575988769531, 0.004611839771270752, 0.00461030387878418, 0.004620160102844238, 0.004642111778259277, 0.004612448215484619, 0.004655295848846435, 0.00466921615600586, 0.005441823959350586, 0.0048949441909790035, 0.00497049617767334, 0.005312511920928955, 0.005462016105651855, 0.005805312156677246, 0.004981503963470459, 0.00506060791015625, 0.005040256023406983, 0.005023359775543213, 0.004972383975982666, 0.00495465612411499, 0.0049847040176391605, 0.004952095985412598, 0.004825056076049805, 0.0047964158058166504, 0.004769536018371582, 0.004860447883605957, 0.0046566081047058104, 0.0046369280815124515, 0.004639776229858399, 0.0046080961227416995, 0.004614496231079101, 0.004757535934448242, 0.004717152118682861, 0.004672544002532959, 0.004721536159515381, 0.004693088054656983, 0.004660128116607666, 0.004730879783630371, 0.004724991798400879, 0.0046787199974060055, 0.004856063842773438, 0.004811264038085937, 0.004687424182891846, 0.004661632061004639, 0.004579328060150147, 0.004597760200500488, 0.004663296222686767, 0.004599232196807862, 0.004588096141815185, 0.004589824199676513, 0.004573984146118164, 0.004588511943817139, 0.004657120227813721, 0.0050802559852600095, 0.004856448173522949, 0.004821216106414795, 0.00451584005355835, 0.005016608238220215, 0.005290527820587158, 0.005105855941772461, 0.004919551849365235, 0.004872191905975342, 0.004913023948669434, 0.004888703823089599, 0.004745215892791748, 0.004716639995574951, 0.004701759815216065, 0.004692512035369873, 0.0046590080261230465, 0.004670656204223632, 0.004676415920257569, 0.00461033582687378, 0.004638175964355468, 0.004595967769622802, 0.004566976070404053, 0.004601920127868652, 0.004869728088378906, 0.004843999862670898, 0.004688864231109619, 0.004625376224517822, 0.00459884786605835, 0.004724800109863281, 0.004631423950195312, 0.004614143848419189, 0.004603903770446777, 0.004618336200714111, 0.004582784175872803, 0.004571135997772217, 0.004546271800994873, 0.004567935943603516, 0.004583360195159912, 0.004636672019958496, 0.0046382398605346676, 0.004717023849487305, 0.004863967895507812, 0.00497983980178833, 0.0049919037818908696, 0.0050087041854858395, 0.005182144165039063, 0.005055615901947022, 0.004999135971069336, 0.004893599987030029, 0.004996511936187744, 0.0050653119087219235, 0.004980735778808594, 0.0049409279823303225, 0.004889760017395019, 0.00478384017944336, 0.004758624076843262, 0.004677599906921387, 0.004656064033508301, 0.004601984024047851, 0.0047717118263244625, 0.004777984142303467, 0.004662975788116455, 0.004692287921905518, 0.004598944187164306, 0.004541279792785645, 0.004572800159454346, 0.0043023681640625, 0.00457366418838501, 0.004571135997772217, 0.004585023880004883, 0.0045879678726196286, 0.004567039966583252, 0.004579328060150147, 0.00460364818572998, 0.004598015785217285, 0.0045640959739685055, 0.004565599918365478, 0.004593215942382813, 0.004567264080047607, 0.004572800159454346, 0.00457532787322998, 0.00461407995223999, 0.0047829442024230956, 0.004877664089202881, 0.004852384090423584, 0.004838496208190918, 0.004744095802307129, 0.004800320148468018, 0.00466323184967041, 0.0046657600402832036, 0.004662303924560547, 0.00474399995803833, 0.004709983825683594, 0.0046739521026611325, 0.004646560192108154, 0.004661600112915039, 0.0047894401550292965, 0.005268288135528564, 0.004748288154602051, 0.004684544086456298, 0.004663008213043213, 0.004622464179992676, 0.004601823806762695, 0.004565440177917481, 0.004597760200500488, 0.004596799850463867, 0.004586143970489502, 0.004583712100982666, 0.004595104217529297, 0.004618847846984863, 0.0046178560256958006, 0.004640927791595459, 0.004601151943206787, 0.004554687976837158, 0.004932576179504394, 0.004599199771881103, 0.004554463863372803, 0.004598656177520752, 0.004681727886199951, 0.004655104160308838, 0.004554751873016357, 0.004577280044555664, 0.00456492805480957, 0.004556384086608887, 0.004553184032440185, 0.0045240321159362796, 0.004540319919586182, 0.004554848194122315, 0.0046284799575805665, 0.004928127765655517, 0.0050787200927734375, 0.005093696117401123, 0.005097472190856934, 0.00505241584777832, 0.0050769920349121095, 0.005005311965942383, 0.004978303909301758, 0.005055007934570313, 0.004829279899597168, 0.00483622407913208, 0.004807295799255371, 0.00471011209487915, 0.004667200088500976, 0.0046286721229553225, 0.0046044478416442875, 0.004605984210968018, 0.004560863971710205, 0.00459980821609497, 0.00459555196762085, 0.004581151962280273, 0.004577600002288819, 0.004581408023834228, 0.004581247806549072, 0.0045610561370849605, 0.004582431793212891, 0.0045577921867370605, 0.004575232028961182, 0.004552256107330322, 0.004608448028564453, 0.0049946560859680175, 0.004942399978637695, 0.004848671913146973, 0.004757952213287353, 0.004659167766571045, 0.0046063680648803715, 0.00457040023803711, 0.004754303932189941, 0.004581215858459472, 0.00459980821609497, 0.0045649919509887695, 0.004591616153717041, 0.00466534423828125, 0.004846752166748047, 0.0047329277992248535, 0.004711391925811767, 0.0046854400634765625, 0.004663551807403564, 0.004612095832824707, 0.004593215942382813, 0.004595295906066894, 0.00464572811126709, 0.004607840061187744, 0.004580575942993164, 0.004642879962921143, 0.004647039890289307, 0.0046128640174865725, 0.0045875201225280765, 0.00457260799407959, 0.004559423923492432, 0.004570432186126709, 0.004625088214874268, 0.004681727886199951, 0.004541567802429199, 0.004821887969970703, 0.004742591857910156, 0.004809216022491455, 0.004694079875946045, 0.004673535823822021, 0.004632063865661621, 0.004636159896850586, 0.004684160232543946, 0.004589983940124512, 0.004579552173614502, 0.004616447925567627, 0.00465670394897461, 0.004628320217132568, 0.0048925762176513675, 0.00461568021774292, 0.004567391872406006, 0.0049725441932678225, 0.004581471920013428, 0.00457369613647461, 0.004627871990203858, 0.0046962881088256836, 0.005204351902008056, 0.005133440017700195, 0.0050672321319580075, 0.005015967845916748, 0.005072127819061279, 0.005046527862548828, 0.005003776073455811, 0.005041728019714356, 0.004941887855529785, 0.004917632102966309, 0.004840767860412597, 0.0047942399978637695, 0.004639039993286133, 0.004611968040466308, 0.004579967975616455, 0.004606304168701172, 0.004592607975006104, 0.00458409595489502, 0.004607935905456543, 0.00458351993560791, 0.004605184078216553, 0.004647679805755615, 0.004593440055847168, 0.004546783924102783, 0.004562335968017578, 0.004546144008636475, 0.004565695762634278, 0.004577216148376465, 0.004555136203765869, 0.004701791763305664, 0.004660639762878418, 0.004631552219390869, 0.004589568138122559, 0.004646719932556153, 0.004720287799835205, 0.004699935913085937, 0.004646815776824951, 0.004621151924133301, 0.004624383926391602, 0.004769792079925537, 0.004640768051147461, 0.00467145586013794, 0.004927487850189209, 0.0049205441474914555, 0.004856095790863037, 0.0047291841506958004, 0.004773952007293701, 0.004746431827545166, 0.004704736232757569, 0.004736447811126709, 0.004672287940979004, 0.004614016056060791, 0.004632927894592285, 0.004577280044555664, 0.004575168132781983, 0.004649024009704589, 0.004585472106933594, 0.004589600086212158, 0.004653056144714355, 0.004753376007080078, 0.0046696319580078124, 0.004597568035125733, 0.004582752227783203, 0.004571904182434082, 0.0045559039115905765, 0.004685855865478515, 0.00460649585723877, 0.004581600189208984, 0.004589568138122559, 0.0046284799575805665, 0.004685184001922607, 0.004621151924133301, 0.004572319984436035, 0.004553343772888184, 0.004581344127655029, 0.004630144119262695, 0.0046473278999328614, 0.004576704025268555, 0.0045809922218322755, 0.004616608142852783, 0.0045873279571533206, 0.004676320075988769, 0.004573152065277099, 0.004579360008239746, 0.004534272193908692, 0.004564064025878906, 0.0045454401969909665, 0.004533984184265137, 0.00454915189743042, 0.004673279762268066, 0.004794367790222168, 0.005001215934753418, 0.005238751888275147, 0.00516918420791626, 0.005040128231048584, 0.004908063888549805, 0.004940767765045166, 0.00499507188796997, 0.004900320053100586, 0.004837600231170655, 0.004718368053436279, 0.004676127910614013, 0.00459065580368042, 0.0046150717735290525, 0.0043361282348632815, 0.0046284799575805665, 0.0045853757858276365, 0.004814112186431884, 0.004553120136260986, 0.004599711894989014, 0.004581888198852539, 0.004589568138122559, 0.004612127780914307, 0.004566207885742187, 0.004561696052551269, 0.004554751873016357, 0.0045424637794494625, 0.004668831825256348, 0.004602079868316651, 0.0049155840873718265, 0.00501910400390625, 0.00490777587890625, 0.0049641280174255375, 0.004929535865783692, 0.004743167877197266, 0.00471449613571167, 0.004614304065704346, 0.004636127948760987, 0.004603456020355225, 0.004616960048675537, 0.004583487987518311, 0.00457696008682251, 0.004554143905639648, 0.0047093119621276856, 0.004579616069793701, 0.004566400051116943, 0.004594240188598633, 0.004620351791381836, 0.004588255882263184, 0.004592512130737305, 0.00468828821182251, 0.0045965762138366695, 0.004561759948730469, 0.004562943935394287, 0.004550367832183838, 0.004550943851470948, 0.0045240321159362796, 0.004532383918762207, 0.004581344127655029, 0.004583295822143555, 0.004595263957977295, 0.004676032066345215, 0.004668416023254395, 0.004669536113739014, 0.004693120002746582, 0.004761375904083252, 0.004667712211608887, 0.0046711678504943845, 0.004655104160308838, 0.004683775901794434, 0.004618239879608154, 0.0046694397926330565, 0.004603616237640381, 0.004569375991821289, 0.0055808000564575196, 0.004869503974914551, 0.004661888122558594, 0.004327424049377441, 0.004560544013977051, 0.004563168048858642, 0.004561024188995361, 0.004534272193908692, 0.004536320209503173, 0.004567039966583252, 0.004562367916107177, 0.004577760219573975, 0.004596831798553466, 0.004673823833465576, 0.004940415859222412, 0.005058656215667725, 0.005117951869964599, 0.00506060791015625, 0.0050566082000732425, 0.004956096172332764, 0.0049417920112609865, 0.004884479999542236, 0.0049541440010070804, 0.004840703964233398, 0.004762527942657471, 0.004677248001098633, 0.004622528076171875, 0.004585536003112793, 0.004567999839782715, 0.004577727794647217, 0.0045994877815246586, 0.004639840126037598, 0.004595424175262451, 0.004586559772491455, 0.004570079803466797, 0.004581408023834228, 0.004652895927429199, 0.004716703891754151, 0.004593887805938721, 0.004595136165618897, 0.004589087963104248, 0.004600607872009278, 0.004567423820495606, 0.004904607772827149, 0.0047288317680358885, 0.0047636480331420894, 0.004603903770446777, 0.004582592010498047, 0.0045617280006408694, 0.004564544200897217, 0.004599743843078613, 0.004871679782867432, 0.004821407794952393, 0.004911712169647217, 0.004806848049163819, 0.004814112186431884, 0.004620607852935791, 0.004585887908935547, 0.004589568138122559, 0.004584671974182129, 0.00457747220993042, 0.004612512111663818, 0.004568480014801026, 0.004611711978912353, 0.004604159832000732, 0.0045718722343444824, 0.004315904140472412, 0.004589791774749756, 0.004595776081085205, 0.004628416061401367, 0.004621568202972412, 0.004604671955108643, 0.004611264228820801, 0.004642975807189941, 0.004596384048461914, 0.004591296195983887, 0.004580927848815918, 0.0046806402206420895, 0.004656960010528564, 0.004595967769622802, 0.004564383983612061, 0.004626783847808838, 0.004583424091339112, 0.004688960075378418, 0.004572095870971679, 0.0045875201225280765, 0.004631743907928467, 0.004618368148803711, 0.004593887805938721, 0.00455072021484375, 0.0045428800582885745, 0.004558976173400879, 0.0045443840026855465, 0.004546559810638427, 0.004581600189208984, 0.004562719821929932, 0.005248672008514405, 0.004553055763244629, 0.004534207820892334, 0.004572927951812744, 0.004554719924926758, 0.004683231830596924, 0.005129407882690429, 0.0048512001037597655, 0.004702400207519531, 0.004882368087768554, 0.005003007888793945, 0.0051653761863708495, 0.005128191947937012, 0.0050421757698059086, 0.005015583992004394, 0.004923359870910644, 0.004878335952758789, 0.004821280002593994, 0.004742847919464111, 0.00471395206451416, 0.00467628812789917, 0.0046826558113098144, 0.004608992099761963, 0.004593664169311523, 0.004562431812286377, 0.0045593600273132326, 0.0045281281471252445, 0.00455679988861084, 0.004550496101379395, 0.004610208034515381, 0.004713535785675049, 0.004615104198455811, 0.004581376075744629, 0.004315199851989746, 0.004556032180786132, 0.004541183948516846, 0.004544544219970703, 0.004671743869781494, 0.0049927358627319335, 0.004935808181762695, 0.0047628159523010255, 0.004868800163269043, 0.004576543807983399, 0.004557472229003906, 0.004773087978363037, 0.004786399841308594, 0.004589471817016601, 0.004573056221008301, 0.004684639930725098, 0.00731056022644043, 0.0051495041847229, 0.00467964792251587, 0.004637887954711914, 0.004588607788085937, 0.004558591842651367, 0.004574240207672119, 0.004803743839263916, 0.004593247890472412, 0.0046061758995056155, 0.004589568138122559, 0.004601600170135498, 0.004609568119049072, 0.004611008167266846, 0.004570496082305908, 0.004565408229827881, 0.004546271800994873, 0.0045632319450378414, 0.004537568092346191, 0.004543263912200928, 0.004556447982788086, 0.004570943832397461, 0.004586016178131104, 0.004871520042419433, 0.0048393278121948245, 0.004804416179656983, 0.0047441282272338865, 0.004744575977325439, 0.004663936138153076, 0.004630527973175049, 0.004590720176696777, 0.0045866560935974125, 0.00461740779876709, 0.004629119873046875, 0.004603231906890869, 0.004563360214233398, 0.0045610561370849605, 0.004540224075317383, 0.004533472061157226, 0.00454911994934082, 0.004530655860900879, 0.0045240321159362796, 0.0045281281471252445, 0.004523488044738769, 0.004566559791564942, 0.004754144191741944, 0.004999199867248535, 0.005045760154724121, 0.0050488319396972655, 0.00496127986907959, 0.00496127986907959, 0.004919072151184082, 0.004845791816711425, 0.004921599864959717, 0.0048865280151367185, 0.004755008220672608, 0.004653151988983154, 0.00461033582687378, 0.004642623901367188, 0.0053779840469360355, 0.006050015926361084, 0.00570527982711792, 0.004651296138763428, 0.004655104160308838, 0.004640768051147461, 0.004585696220397949, 0.004599584102630615, 0.004593760013580322, 0.00457692813873291, 0.004542943954467774, 0.0046113600730896, 0.004665855884552002, 0.004634463787078857, 0.004582719802856445, 0.004561791896820069, 0.0045977277755737305, 0.004597439765930176, 0.004583327770233154, 0.004731296062469482, 0.004689919948577881, 0.004642816066741944, 0.004591263771057129, 0.004575583934783935, 0.00454860782623291, 0.0045500478744506835, 0.004534880161285401, 0.004541888236999512, 0.004575808048248291, 0.0047205758094787595, 0.004916768074035644, 0.004830783843994141, 0.004729951858520508, 0.004783999919891357, 0.004700096130371094, 0.004667232036590576, 0.004618463993072509, 0.004571135997772217, 0.004591360092163086, 0.004618495941162109, 0.004614143848419189, 0.00460368013381958, 0.00471289587020874, 0.004770912170410156, 0.0046926078796386715, 0.004621471881866455, 0.004571616172790527, 0.004548863887786865, 0.004608191967010498, 0.004641791820526123, 0.004570112228393554, 0.004348639965057373, 0.004595263957977295, 0.004600128173828125, 0.004592127799987793, 0.004575071811676025, 0.004599711894989014, 0.004631648063659668, 0.004609024047851563, 0.004593599796295166, 0.004578976154327392, 0.004575647830963135, 0.004562367916107177, 0.0045874881744384765, 0.004545119762420655, 0.004554751873016357, 0.004579328060150147, 0.004550655841827392, 0.004584640026092529, 0.00466377592086792, 0.004841824054718017, 0.004941535949707031, 0.005077151775360107, 0.004902304172515869, 0.0048585920333862305, 0.004970176219940186, 0.0050302081108093265, 0.005031199932098389, 0.005069536209106445, 0.0049500160217285155, 0.0049235520362854, 0.004853536128997802, 0.004826655864715576, 0.004848159790039062, 0.004761792182922363, 0.004699647903442383, 0.0047288317680358885, 0.004651328086853027, 0.004632575988769531, 0.004601215839385987, 0.0045756158828735355, 0.004563168048858642, 0.00455404806137085, 0.004601664066314697, 0.004756383895874024, 0.004734975814819336, 0.00466534423828125, 0.004599199771881103, 0.0045840320587158204, 0.004576735973358155, 0.0045716800689697265, 0.004587456226348877, 0.004562560081481933, 0.004548575878143311, 0.004530655860900879, 0.0045649919509887695, 0.004583392143249512, 0.004560351848602295, 0.0045717120170593265, 0.004562943935394287, 0.0045418238639831545, 0.0045512962341308595, 0.004648320198059082, 0.004676224231719971, 0.004329631805419922, 0.004621920108795166, 0.004598112106323242, 0.004618175983428955, 0.004728096008300781, 0.0048873920440673825, 0.004847616195678711, 0.00473199987411499, 0.004670368194580078, 0.004631807804107666, 0.004604671955108643, 0.0046592001914978025, 0.00475551986694336, 0.004724063873291016, 0.004672095775604248, 0.004712448120117187, 0.0046542401313781736, 0.004627295970916748, 0.004650176048278809, 0.00467142391204834, 0.004598656177520752, 0.004632575988769531, 0.004652383804321289, 0.004632575988769531, 0.004581503868103028, 0.0045716800689697265, 0.004551712036132812, 0.00456928014755249, 0.004606751918792724, 0.004583168029785156, 0.004587584018707276, 0.0045651841163635255, 0.004557951927185059, 0.0045577921867370605, 0.005312255859375, 0.005375487804412842, 0.00538316822052002, 0.0046501121520996095, 0.0047803521156311035, 0.004970719814300537, 0.00495033597946167, 0.004912831783294678, 0.004779007911682129, 0.0047562880516052244, 0.004681280136108398, 0.004700352191925048, 0.004682047843933106, 0.004632031917572022, 0.004587264060974121, 0.004565919876098633, 0.004583424091339112, 0.0046267518997192385, 0.004806335926055908, 0.0049780158996582035, 0.004942431926727295, 0.004894527912139892, 0.004835775852203369, 0.004759359836578369, 0.004777120113372803, 0.004789087772369385, 0.004775296211242676, 0.004678271770477295, 0.004624383926391602, 0.004498591899871826, 0.004705215930938721, 0.004601856231689453, 0.004549888134002686, 0.004571904182434082, 0.004562655925750733, 0.004561183929443359, 0.004583263874053955, 0.004554912090301514, 0.0045359678268432615, 0.004528480052947998, 0.004527200222015381, 0.004561823844909668, 0.004627840042114258, 0.004659840106964111, 0.004663584232330323, 0.004622399806976318, 0.004564640045166016, 0.004583712100982666, 0.00457862377166748, 0.004569119930267334, 0.00454204797744751, 0.004534656047821045, 0.004569375991821289, 0.0045686402320861816, 0.004592351913452149, 0.004556640148162842, 0.00456444787979126, 0.004571904182434082, 0.004648736000061035, 0.0046459841728210445, 0.0046843838691711425, 0.004701759815216065, 0.004647744178771973, 0.004575168132781983, 0.004569087982177734, 0.004562943935394287, 0.004579135894775391, 0.004798655986785889, 0.004659135818481445, 0.004644927978515625, 0.004611680030822754, 0.0045931520462036135, 0.004577824115753174, 0.004585279941558838, 0.0045799040794372555, 0.004585472106933594, 0.004579328060150147, 0.004601024150848389, 0.004573760032653809, 0.004574783802032471, 0.004610752105712891, 0.004579328060150147, 0.0046459841728210445, 0.00458025598526001, 0.004560031890869141, 0.0045392317771911625, 0.0045519680976867675, 0.004617152214050293, 0.004699935913085937, 0.004634624004364014, 0.004562943935394287, 0.004601535797119141, 0.004674015998840332, 0.005090752124786377, 0.005066912174224853, 0.005022496223449707, 0.004954976081848144, 0.004960800170898437, 0.004839839935302734, 0.004761568069458008, 0.004729919910430909, 0.004731455802917481, 0.004665631771087647, 0.004618080139160157, 0.004608255863189697, 0.0045649919509887695, 0.004793407917022705, 0.004585792064666748, 0.004679999828338623, 0.004575551986694336, 0.004536320209503173, 0.004571135997772217, 0.00460316801071167, 0.004637407779693604, 0.004596928119659424, 0.004569920063018798, 0.004579328060150147, 0.004550591945648193, 0.004556863784790039, 0.0045640959739685055, 0.004553408145904541, 0.0045426559448242185, 0.004603903770446777, 0.004585472106933594, 0.0045378880500793455, 0.004581855773925781, 0.004552095890045166, 0.004560959815979004, 0.004588064193725586, 0.004726784229278564, 0.00466534423828125, 0.004615968227386475, 0.004591392040252685, 0.004573472023010254, 0.004656832218170166, 0.004660768032073975, 0.004611008167266846, 0.004595647811889649, 0.0046590399742126466, 0.004609248161315918, 0.004590591907501221, 0.004583424091339112, 0.004567359924316406, 0.004570015907287598, 0.004555552005767822, 0.00456876802444458, 0.004540736198425293, 0.0045567359924316405, 0.004545983791351319, 0.004694015979766846, 0.004577919960021972, 0.004554240226745605, 0.00459007978439331, 0.004567039966583252, 0.0045763840675354, 0.004323328018188476, 0.004667327880859375, 0.004732895851135254, 0.004680064201354981, 0.004619999885559082, 0.0045649919509887695, 0.004558847904205322, 0.004530176162719727, 0.0045240001678466795, 0.004555967807769776, 0.0045389761924743656, 0.004554880142211914, 0.004558976173400879, 0.004552703857421875, 0.004562784194946289, 0.004552224159240723, 0.004579967975616455, 0.004530176162719727, 0.004673535823822021, 0.004545567989349365, 0.0045349440574646, 0.004520256042480469, 0.004517888069152832, 0.004558847904205322, 0.00459980821609497, 0.004868192195892334, 0.00491100788116455, 0.005000895977020263, 0.004931903839111328, 0.004906432151794433, 0.004871903896331787, 0.0048607678413391114, 0.004890016078948975, 0.0049560642242431644, 0.004829887866973877, 0.0047381119728088375, 0.004650944232940673, 0.004677663803100586, 0.004672800064086914, 0.004675263881683349, 0.004613632202148437, 0.004591104030609131, 0.004572192192077637, 0.004558815956115723, 0.004552127838134766, 0.004601888179779053, 0.0046003518104553226, 0.004591839790344238, 0.0045504322052001955, 0.004553760051727295, 0.004555744171142578, 0.004552864074707032, 0.0046525120735168455, 0.004730559825897217, 0.004639423847198486, 0.004675583839416504, 0.004574687957763672, 0.004563488006591797, 0.004580416202545166, 0.00456390380859375, 0.004689919948577881, 0.004777984142303467, 0.004757503986358643, 0.004428671836853027, 0.004574592113494873, 0.004565631866455078, 0.004580863952636719, 0.004653567790985107, 0.004648255825042725, 0.004612607955932617, 0.004663487911224365, 0.004642816066741944, 0.0046451201438903805, 0.004636223793029785, 0.004581567764282227, 0.004593664169311523, 0.004664480209350586, 0.004829535961151123, 0.00470633602142334, 0.004632895946502686, 0.004759712219238281, 0.004705535888671875, 0.004550879955291748, 0.004604127883911133, 0.004560927867889405, 0.004565279960632324, 0.004576511859893799, 0.004567808151245118, 0.004539455890655518, 0.0045598077774047854, 0.0046319360733032226, 0.0045905599594116215, 0.00460972785949707, 0.004583104133605957, 0.004648447990417481, 0.00460041618347168, 0.004589151859283447, 0.004763743877410889, 0.004561408042907715, 0.004569087982177734, 0.004571455955505371, 0.00456057596206665, 0.004535776138305664, 0.004565087795257569, 0.004571584224700928, 0.004538368225097656, 0.004566207885742187, 0.004540544033050537, 0.0045203838348388675, 0.004524288177490235, 0.004537568092346191, 0.004569888114929199, 0.004556479930877685, 0.004664639949798584, 0.004975615978240967, 0.005093376159667969, 0.005156864166259765, 0.0050094079971313476, 0.004920576095581055, 0.004865920066833496, 0.005006207942962647, 0.004878015995025635, 0.004720831871032715, 0.0046859197616577145, 0.004632607936859131, 0.004661087989807129, 0.004303264141082764, 0.00460364818572998, 0.004556320190429688, 0.004555776119232178, 0.004532192230224609, 0.004535808086395264, 0.004620800018310547, 0.0046731200218200685, 0.004602272033691407, 0.004566559791564942, 0.004540895938873291, 0.00456492805480957, 0.004565055847167969, 0.004543903827667236, 0.004561183929443359, 0.004573503971099854, 0.004612095832824707, 0.004677375793457031, 0.004582816123962402, 0.004559679985046387, 0.004552735805511475, 0.004595392227172852, 0.004543968200683594, 0.004565536022186279, 0.004575039863586426, 0.00454915189743042, 0.004661119937896728, 0.004867455959320068, 0.004848351955413818, 0.0048692159652709965, 0.004738239765167236, 0.004691679954528808, 0.00458735990524292, 0.004778143882751465, 0.004667391777038574, 0.004684095859527588, 0.004644544124603272, 0.0046547198295593265, 0.004725344181060791, 0.004617440223693848, 0.004634624004364014, 0.004630815982818603, 0.004613471984863281, 0.005613823890686035, 0.004693759918212891, 0.004651743888854981, 0.0047017278671264645, 0.0049749441146850586, 0.005277503967285156, 0.004643360137939453, 0.004682975769042969, 0.004692255973815918, 0.0046228480339050295, 0.004577280044555664, 0.004605823993682861, 0.004640672206878662, 0.004632800102233887, 0.004607999801635742, 0.004591584205627441, 0.004558144092559814, 0.004565728187561035, 0.004585472106933594, 0.004664415836334228, 0.0043786239624023435, 0.00466534423828125, 0.004576704025268555, 0.004544960021972656, 0.004538496017456054, 0.004550399780273437, 0.004560448169708252, 0.0045303359031677246, 0.0045483198165893555, 0.004528160095214844, 0.004540607929229736, 0.004687776088714599, 0.004887296199798584, 0.00517523193359375, 0.005143648147583008, 0.005184447765350342, 0.005152991771697998, 0.005060351848602295, 0.0048558077812194825, 0.004846752166748047, 0.004794303894042969, 0.004700255870819092, 0.004663871765136719, 0.004604095935821533, 0.004588863849639893, 0.004565087795257569, 0.004530399799346924, 0.004571584224700928, 0.004554944038391113, 0.004613952159881592, 0.0045236158370971676, 0.004536736011505127, 0.004545760154724121, 0.004660192012786865, 0.004655136108398438, 0.004629951953887939, 0.004661600112915039, 0.004575104236602783, 0.004556287765502929, 0.004544223785400391, 0.004571392059326172, 0.004545184135437011, 0.004619584083557129, 0.004598176002502442, 0.004615647792816162, 0.004639008045196533, 0.004577824115753174, 0.004596960067749023, 0.0045939841270446775, 0.004676064014434814, 0.004759552001953125, 0.004709568023681641, 0.004663871765136719, 0.0045775361061096195, 0.004558784008026123, 0.0045784001350402835, 0.004551648139953613, 0.004568128108978271, 0.004627232074737549, 0.004806816101074219, 0.004821248054504394, 0.004718400001525879, 0.004651072025299072, 0.004319583892822266, 0.004555103778839112, 0.004560512065887451, 0.004581503868103028, 0.004583295822143555, 0.004632575988769531, 0.004626431941986084, 0.004602176189422607, 0.004554431915283203, 0.004558847904205322, 0.004562111854553222, 0.004685952186584473, 0.00478275203704834, 0.004697984218597412, 0.0045998401641845705, 0.004577407836914063, 0.004613120079040528, 0.004659679889678955, 0.004716959953308106, 0.004710495948791504, 0.00470198392868042, 0.004858304023742676, 0.004687679767608643, 0.004648096084594726, 0.00463753604888916, 0.0045874881744384765, 0.004582655906677246, 0.004579967975616455, 0.004710559844970703, 0.004664512157440186, 0.004662208080291748, 0.004592735767364502, 0.004584224224090576, 0.004560224056243897, 0.0045511040687561035, 0.004542751789093018, 0.004572896003723145, 0.004544735908508301, 0.004577280044555664, 0.004538464069366455, 0.004530079841613769, 0.00454204797744751, 0.004724512100219727, 0.004912831783294678, 0.005063072204589843, 0.0051140799522399905, 0.005103040218353272, 0.004967296123504638, 0.004891871929168701, 0.004944287776947022, 0.004966400146484375, 0.004835072040557861, 0.004722847938537598, 0.004653535842895508, 0.004638720035552979, 0.004630527973175049, 0.004609312057495117, 0.0045677762031555175, 0.004611839771270752, 0.004548863887786865, 0.0045649919509887695, 0.00458128023147583, 0.004579423904418945, 0.00434003210067749, 0.004636672019958496, 0.004583104133605957, 0.0045914239883422855, 0.004566592216491699, 0.004520319938659668, 0.0045749440193176266, 0.004722879886627197, 0.004649631977081299, 0.004572319984436035, 0.004577695846557617, 0.004547008037567139, 0.0045483198165893555, 0.004550943851470948, 0.004550655841827392, 0.004577216148376465, 0.004535744190216065, 0.004567552089691162, 0.004626560211181641, 0.0047226881980895995, 0.004663296222686767, 0.0046267518997192385, 0.004599040031433106, 0.004596096038818359, 0.004571199893951416, 0.004561984062194825, 0.004587584018707276, 0.0048336639404296874, 0.004863679885864258, 0.0047890558242797854, 0.004681727886199951, 0.004628064155578614, 0.004630623817443847, 0.004650464057922363, 0.004616799831390381, 0.004616447925567627, 0.004596928119659424, 0.004618080139160157, 0.004589856147766113, 0.004565055847167969, 0.0046143999099731445, 0.004671103954315185, 0.004791391849517822, 0.004652416229248047, 0.0045632319450378414, 0.00455679988861084, 0.00455452823638916, 0.004587615966796875, 0.004613376140594482, 0.004632575988769531, 0.004656000137329102, 0.00459980821609497, 0.0045994877815246586, 0.004624671936035156, 0.004581408023834228, 0.0046592001914978025, 0.004636159896850586, 0.004661375999450683, 0.0046391038894653324, 0.004741087913513183, 0.004600992202758789, 0.004589759826660157, 0.004565695762634278, 0.004333568096160889, 0.004584544181823731, 0.00456387186050415, 0.004560895919799805, 0.004675136089324951, 0.004827744007110596, 0.004904255867004394, 0.004909408092498779, 0.00498092794418335, 0.004904160022735596, 0.004987328052520752, 0.005027711868286133, 0.004995552062988282, 0.004902912139892578, 0.0048364481925964355, 0.004758431911468506, 0.004761536121368409, 0.004775392055511475, 0.004686431884765625, 0.00466534423828125, 0.004612095832824707, 0.004667744159698486, 0.004599199771881103, 0.004583680152893067, 0.004585472106933594, 0.004533472061157226, 0.004563744068145752, 0.004577280044555664, 0.004570752143859863, 0.004574816226959228, 0.004547359943389892, 0.004583424091339112, 0.004595071792602539, 0.004593472003936768, 0.004792640209197998, 0.004699935913085937, 0.00462499189376831, 0.004632031917572022, 0.00458409595489502, 0.004644576072692871, 0.00481331205368042, 0.004923168182373047, 0.004837376117706299, 0.004709824085235596, 0.004678336143493653, 0.004651999950408935, 0.004596735954284668, 0.004635680198669434, 0.004762495994567871, 0.004780032157897949, 0.004705887794494629, 0.00468620777130127, 0.004642848014831543, 0.004569087982177734, 0.004572512149810791, 0.00457206392288208, 0.004570943832397461, 0.004639999866485596, 0.004633279800415039, 0.004634624004364014, 0.004591360092163086, 0.004581632137298584, 0.004627679824829102, 0.004416031837463379, 0.004639039993286133, 0.004542367935180664, 0.004536608219146729, 0.0045483198165893555, 0.004565343856811523, 0.004576863765716553, 0.004526144027709961, 0.004538368225097656, 0.004646912097930909, 0.004631775856018067, 0.004655903816223145, 0.0047185921669006346, 0.004628736019134522, 0.004629248142242431, 0.00460214376449585, 0.004604640007019043, 0.004554751873016357, 0.004550655841827392, 0.004591616153717041, 0.004593664169311523, 0.004621376037597656, 0.004570047855377197, 0.004569024085998535, 0.004536384105682373, 0.004539743900299072, 0.004563551902770996, 0.004524159908294678, 0.004577216148376465, 0.0045424637794494625, 0.0045829439163208, 0.004624864101409912, 0.004812352180480957, 0.00506873607635498, 0.005022496223449707, 0.004994783878326416, 0.004984831809997559, 0.004946176052093506, 0.0049697279930114744, 0.004861855983734131, 0.00473967981338501, 0.004707712173461914, 0.004971136093139648, 0.004643104076385498, 0.0046293439865112305, 0.004589632034301758, 0.004592383861541748, 0.00461027193069458, 0.004581215858459472, 0.004578976154327392, 0.004560927867889405, 0.004567359924316406, 0.004553791999816895, 0.004539328098297119, 0.004595200061798096, 0.0045409278869628904, 0.004568448066711426, 0.004663936138153076, 0.004671487808227539, 0.004590879917144775, 0.004581727981567383, 0.004596096038818359, 0.004673535823822021, 0.00449894380569458, 0.004710912227630615, 0.004632160186767578, 0.004581215858459472, 0.004620863914489746, 0.004616191864013672, 0.004593408107757569, 0.004614016056060791, 0.004610432147979736, 0.004603392124176026, 0.004563488006591797, 0.00462230396270752, 0.004640992164611816, 0.0046529917716979985, 0.0046262722015380855, 0.004652800083160401, 0.004675839900970459, 0.00462662410736084, 0.004611904144287109, 0.004611104011535644, 0.00479695987701416, 0.004804287910461426, 0.00465177583694458, 0.004591872215270996, 0.004599552154541015, 0.004567039966583252, 0.00451584005355835, 0.004532224178314209, 0.004544511795043945, 0.004544159889221191, 0.004567391872406006, 0.004543968200683594, 0.0045081920623779295, 0.004557151794433594, 0.0045519680976867675, 0.004626560211181641, 0.004606207847595215, 0.00462063980102539, 0.004600927829742431, 0.004563519954681397, 0.004579135894775391, 0.004546751976013183, 0.004571135997772217, 0.004570591926574707, 0.004547008037567139, 0.004573472023010254, 0.004544320106506347, 0.004554656028747559, 0.004527967929840088, 0.004566336154937744, 0.004533215999603272, 0.00457260799407959, 0.004565536022186279, 0.004534304141998291, 0.004581344127655029, 0.004624576091766358, 0.004674975872039795, 0.004759967803955078, 0.0048355841636657714, 0.004765088081359863, 0.004735199928283691, 0.0047079682350158695, 0.004731264114379883, 0.004346047878265381, 0.004556159973144531, 0.004554495811462402, 0.004553919792175293, 0.004539072036743164, 0.004508992195129394, 0.004544064044952393, 0.004507232189178467, 0.004518432140350342, 0.004522336006164551, 0.005264832019805908, 0.004890848159790039, 0.0045730237960815425, 0.0045361919403076175, 0.0045857601165771485, 0.004593664169311523, 0.004761151790618897, 0.00456112003326416, 0.004579552173614502, 0.004591616153717041, 0.004550655841827392, 0.004583263874053955, 0.004560128211975097, 0.004576159954071045, 0.004536320209503173, 0.004545983791351319, 0.004542399883270263, 0.00455452823638916, 0.004563807964324951, 0.004569087982177734, 0.004529376029968262, 0.00463318395614624, 0.004556992053985596, 0.004550848007202148, 0.0045353279113769535, 0.004573344230651855, 0.004553567886352539, 0.0045606718063354494, 0.0045305280685424805, 0.004525599956512451, 0.00452243185043335, 0.004550335884094238, 0.0045788798332214355, 0.004561344146728516, 0.004533440113067627, 0.0045574398040771485, 0.004542240142822266, 0.004551072120666504, 0.0045281281471252445, 0.004517183780670166, 0.004524096012115479, 0.004512383937835694, 0.004517888069152832, 0.004566783905029297, 0.004608255863189697, 0.004519328117370606, 0.004522592067718506, 0.004615808010101318, 0.004565375804901123, 0.004534272193908692, 0.004567359924316406, 0.004531904220581055, 0.004536543846130371, 0.0042871999740600585, 0.004513792037963867, 0.004525087833404541, 0.00452732801437378, 0.004529920101165772, 0.00451584005355835, 0.004546559810638427, 0.004539999961853027, 0.00452239990234375, 0.004533919811248779, 0.0045138559341430665, 0.0058017921447753905, 0.0058475518226623535, 0.0048090238571167, 0.004603551864624023, 0.004546559810638427, 0.004534272193908692, 0.004534272193908692, 0.004544511795043945, 0.004560512065887451, 0.004570752143859863, 0.004526527881622314, 0.004546527862548828, 0.004548960208892823, 0.004542751789093018, 0.004519648075103759, 0.004536128044128418, 0.004581823825836182, 0.004574975967407227, 0.004539711952209473, 0.004573887825012207, 0.004530176162719727, 0.004569087982177734, 0.004534016132354737, 0.004628736019134522, 0.004628767967224121, 0.004533215999603272, 0.004552864074707032, 0.0045493760108947755, 0.004517280101776123, 0.0045101442337036135, 0.004530367851257325, 0.004536128044128418, 0.004533728122711181, 0.004550271987915039, 0.00452291202545166, 0.004532320022583008, 0.0045259838104248044, 0.0045240321159362796, 0.0045281281471252445, 0.00454207992553711, 0.00453056001663208, 0.00456057596206665, 0.004512063980102539, 0.004542111873626709, 0.004528831958770752, 0.004548128128051758, 0.004573311805725098, 0.005467840194702148, 0.004655392169952393, 0.004575263977050781, 0.004538176059722901, 0.00466921615600586, 0.004292704105377197, 0.004561823844909668, 0.004674560070037842, 0.00455679988861084, 0.004554751873016357, 0.0045424637794494625, 0.004545951843261719, 0.004541088104248047, 0.004569024085998535, 0.00455679988861084, 0.004554751873016357, 0.004541759967803955, 0.004526783943176269, 0.004530176162719727, 0.004534560203552246, 0.004582304000854492, 0.004533055782318115, 0.004546559810638427, 0.004537375926971435, 0.004524288177490235, 0.004532639980316162, 0.004552800178527832, 0.004567488193511963, 0.004521759986877441, 0.004575232028961182, 0.0045424637794494625, 0.004522143840789795, 0.0045586562156677245, 0.004517920017242431, 0.004519807815551758, 0.004557087898254395, 0.004543392181396484, 0.004542623996734619, 0.00453331184387207, 0.004534080028533936, 0.004528031826019287, 0.004517888069152832, 0.004558559894561768, 0.004548895835876465, 0.004563199996948242, 0.004555744171142578, 0.004538591861724854, 0.004547135829925537, 0.004518112182617187, 0.004569920063018798, 0.004541408061981201, 0.004571135997772217, 0.004567039966583252, 0.004544511795043945, 0.004558847904205322, 0.0045424637794494625, 0.004546559810638427, 0.004588575839996338, 0.004548736095428467, 0.004563615798950195, 0.004549920082092285, 0.004549536228179931, 0.004577280044555664, 0.004567039966583252, 0.004571135997772217, 0.004588960170745849, 0.0045447998046875, 0.004542784214019775, 0.004333631992340088, 0.004554368019104004, 0.0045344319343566895, 0.004541920185089111, 0.004579584121704102, 0.004540800094604492, 0.004561024188995361, 0.004538047790527344, 0.0045382399559021, 0.004538815975189209, 0.004547808170318603, 0.004552735805511475, 0.004543231964111328, 0.004571455955505371, 0.004549312114715576, 0.004533247947692871, 0.004552703857421875, 0.004527584075927734, 0.00456876802444458, 0.004618624210357666, 0.004559648036956787, 0.004581056118011475, 0.004577216148376465, 0.004571424007415771, 0.0045463361740112306, 0.004564671993255615, 0.004573503971099854, 0.004552703857421875, 0.004568384170532226, 0.004547264099121094, 0.004536320209503173, 0.004533919811248779, 0.004553055763244629, 0.004577280044555664, 0.004530176162719727, 0.004554751873016357, 0.004538271903991699, 0.004598144054412842, 0.004548384189605713, 0.0050236802101135255, 0.004588863849639893, 0.004729536056518555, 0.0045892162322998045, 0.0045994877815246586, 0.004569503784179687, 0.004617887973785401, 0.004566751956939698, 0.0045710082054138184, 0.004580063819885254, 0.004573472023010254, 0.0045649919509887695, 0.004533792018890381, 0.004594143867492676, 0.004571135997772217, 0.004552703857421875, 0.004568607807159424, 0.004520415782928467, 0.0045240321159362796, 0.004552703857421875, 0.0045223040580749515, 0.0045864639282226565, 0.004512479782104492, 0.004530176162719727]",tokens/s,210.77565360666875,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,937.55392,12518.883328,0.0,12123.635712,12121.851904,s,1,7.04834765625,7.04834765625,0.0,7.04834765625,7.04834765625,7.04834765625,7.04834765625,[7.04834765625],,kWh,6.074994333350029e-06,6.6222967911361e-07,2.7161132840070046e-06,9.453337296470643e-06,,MB,1266.823168,12544.049152,0.0,12138.315776,10311.21664,s,10,3.5694169006347654,0.35694169006347654,0.008989654410543094,0.36016143798828126,0.36241114501953126,0.36265688171386723,0.36285347106933596,"[0.33080865478515625, 0.35555950927734375, 0.36139324951171875, 0.36035760498046876, 0.3623565368652344, 0.35996527099609377, 0.36061227416992186, 0.35897454833984377, 0.36290261840820315, 0.35648663330078123]",tokens/s,717.2039779227648,kWh,9.846605048194559e-06,1.0858904184954239e-06,6.535199672599394e-06,1.7467695139289376e-05,tokens/kWh,14655625.596773189,MB,1296.187392,12550.340608,0.0,12144.607232,10311.2192,s,10,27.902214599609376,2.7902214599609376,0.0016507280407052072,2.7901104736328124,2.792058764648438,2.7924627319335937,2.792785905761719,"[2.79008203125, 2.788056640625, 2.79286669921875, 2.7877890625, 2.791530517578125, 2.790138916015625, 2.791968994140625, 2.788630615234375, 2.789557373046875, 2.79159375]",tokens/s,22.578852934806825,kWh,8.205444702389034e-05,9.05076906624156e-06,5.4490515814601306e-05,0.0001455957319047332,tokens/kWh,432704.99193769234,,s,630,27.899994796752935,0.04428570602659195,0.0003147776694465258,0.04423612785339355,0.044469379425048826,0.044581004714965816,0.046426925430297854,"[0.04643603134155273, 0.04465087890625, 0.04418755340576172, 0.04413030242919922, 0.04411097717285156, 0.044069217681884765, 0.04399772644042969, 0.04406854248046875, 0.044176929473876955, 0.04406079864501953, 0.04406531143188477, 0.04401574325561523, 0.0442592658996582, 0.044219520568847655, 0.04408620834350586, 0.04409868621826172, 0.04410047912597656, 0.044009471893310545, 0.044184768676757816, 0.044159809112548826, 0.044199935913085936, 0.04441215896606445, 0.044477088928222656, 0.04446319961547852, 0.04437094497680664, 0.044279487609863284, 0.04431430435180664, 0.044240734100341794, 0.04422934341430664, 0.044272960662841795, 0.04418860626220703, 0.044356414794921875, 0.044186622619628906, 0.04428799819946289, 0.04431980895996094, 0.04422480010986328, 0.04416579055786133, 0.044154560089111325, 0.04408553695678711, 0.04418134307861328, 0.04436191940307617, 0.044246273040771486, 0.04424985504150391, 0.0443043212890625, 0.04454364776611328, 0.04459766387939453, 0.04446928024291992, 0.04436067199707031, 0.044349441528320314, 0.04424649429321289, 0.04431721496582031, 0.04445180892944336, 0.044204063415527343, 0.044290046691894534, 0.04438227081298828, 0.044359615325927734, 0.04424892807006836, 0.044179615020751954, 0.04419321441650391, 0.044302783966064456, 0.044338623046875, 0.04415558242797851, 0.0442174072265625, 0.04638636779785156, 0.04460166549682617, 0.044208641052246096, 0.044077056884765625, 0.04406832122802734, 0.044019775390625, 0.04403804779052734, 0.04411040115356445, 0.0442081298828125, 0.04414467239379883, 0.04407068634033203, 0.04402604675292969, 0.044002464294433594, 0.044027774810791016, 0.04414972686767578, 0.04406486511230469, 0.0440544319152832, 0.04405619049072266, 0.04403033447265625, 0.04425471878051758, 0.04438175964355469, 0.044393566131591795, 0.04451926422119141, 0.04434124755859375, 0.04436572647094727, 0.044281024932861325, 0.044233631134033204, 0.04411910247802735, 0.044165470123291015, 0.04414022445678711, 0.04424182510375976, 0.04427734375, 0.04412403106689453, 0.04413254547119141, 0.04410771179199219, 0.04421791839599609, 0.04418595123291016, 0.04407868957519531, 0.04412508773803711, 0.04407068634033203, 0.04430665588378906, 0.04429619216918945, 0.04424499130249023, 0.044382209777832034, 0.04435279846191406, 0.04445404815673828, 0.04465926361083984, 0.04436095809936524, 0.0442289924621582, 0.04416915130615234, 0.044282241821289064, 0.04424505615234375, 0.044170719146728515, 0.04413040161132813, 0.04424467086791992, 0.04429436874389649, 0.04427804946899414, 0.044206336975097654, 0.04422041702270508, 0.04414838409423828, 0.04419401550292969, 0.04442678451538086, 0.04442892837524414, 0.04642937469482422, 0.04480694580078125, 0.044371967315673826, 0.044068862915039066, 0.04410121536254883, 0.0440733757019043, 0.04420403289794922, 0.04431257629394531, 0.04420608139038086, 0.04425027084350586, 0.04417225646972656, 0.044197662353515625, 0.044048095703125, 0.04409971237182617, 0.044445953369140624, 0.04457062530517578, 0.04419152069091797, 0.04420735931396484, 0.04416611099243164, 0.04433660888671875, 0.04467510223388672, 0.04458752059936524, 0.04446319961547852, 0.04436630249023438, 0.04446047973632813, 0.04459648132324219, 0.04430105590820312, 0.04416921615600586, 0.04414054489135742, 0.044470272064208984, 0.04430819320678711, 0.044179744720458984, 0.044197887420654294, 0.04410947036743164, 0.04428835296630859, 0.04439654541015625, 0.04426342391967773, 0.044126209259033204, 0.04407295989990234, 0.04423680114746094, 0.04442867279052734, 0.04433369445800781, 0.044269630432128906, 0.044385887145996096, 0.044582271575927736, 0.04453884887695313, 0.044335105895996096, 0.04421200180053711, 0.044191967010498046, 0.044158977508544923, 0.04428160095214844, 0.04419964981079102, 0.04428035354614258, 0.044205726623535155, 0.04411836624145508, 0.04425507354736328, 0.044368030548095704, 0.04440662384033203, 0.04429225540161133, 0.044186847686767575, 0.044294944763183595, 0.04434479904174805, 0.04430201721191406, 0.046306270599365235, 0.044703102111816405, 0.04419392013549805, 0.04403660964965821, 0.04406476974487305, 0.04402380752563476, 0.044001022338867185, 0.04390915298461914, 0.0441223373413086, 0.04409907150268555, 0.04402159881591797, 0.04402447891235352, 0.04394911956787109, 0.044217281341552735, 0.0440648307800293, 0.04407904052734375, 0.04402694320678711, 0.044062782287597656, 0.044022655487060545, 0.044251136779785157, 0.04428819274902344, 0.0444147834777832, 0.04444569778442383, 0.04447641754150391, 0.04437606430053711, 0.04432896041870117, 0.044170848846435545, 0.04414710235595703, 0.04421814346313477, 0.044187137603759766, 0.044187934875488284, 0.0441506233215332, 0.044160865783691404, 0.04418841552734375, 0.044219615936279294, 0.04421712112426758, 0.04418560028076172, 0.044143646240234376, 0.04425356674194336, 0.044084991455078125, 0.04433980941772461, 0.04427814483642578, 0.04431232070922852, 0.044329086303710935, 0.04441619110107422, 0.044449951171875, 0.04437801742553711, 0.04429286575317383, 0.04432896041870117, 0.044228321075439454, 0.044281761169433595, 0.04427609634399414, 0.04424703979492187, 0.04420608139038086, 0.04409958267211914, 0.04422598266601562, 0.04431465530395508, 0.04426531219482422, 0.04426607894897461, 0.04423689651489258, 0.04429808044433594, 0.04428518295288086, 0.04419631958007812, 0.04642092895507813, 0.044677120208740234, 0.04419686508178711, 0.044398944854736326, 0.044170974731445316, 0.044092094421386716, 0.04401996612548828, 0.044085247039794925, 0.0441712646484375, 0.04412380981445312, 0.04415667343139648, 0.04401417541503906, 0.044128257751464846, 0.0441343994140625, 0.04420169448852539, 0.04410531234741211, 0.044076831817626956, 0.0440266227722168, 0.04424252700805664, 0.044353439331054685, 0.044407455444335935, 0.044470272064208984, 0.04444569778442383, 0.044401664733886716, 0.044380382537841795, 0.04434143829345703, 0.04421718215942383, 0.04427763366699219, 0.04411782455444336, 0.044381343841552734, 0.044209056854248044, 0.04413849639892578, 0.04406697463989258, 0.04407689666748047, 0.04434124755859375, 0.04421222305297851, 0.04419136047363281, 0.044321151733398435, 0.04417705535888672, 0.044154911041259765, 0.04436742401123047, 0.04436454391479492, 0.04430438232421875, 0.04431222534179687, 0.04429654312133789, 0.04451900863647461, 0.044501407623291016, 0.044414497375488284, 0.044404640197753906, 0.04437974548339844, 0.04449993515014648, 0.04424038314819336, 0.0442147216796875, 0.044205406188964846, 0.04413433456420898, 0.04431248092651367, 0.044589950561523435, 0.044355583190917966, 0.04422246551513672, 0.04480819320678711, 0.04435968017578125, 0.0442259521484375, 0.04414441680908203, 0.04650243377685547, 0.04470924758911133, 0.04436275100708008, 0.04415235137939453, 0.044068641662597656, 0.04405660629272461, 0.044011966705322265, 0.04415692901611328, 0.044179393768310544, 0.0440832633972168, 0.044043807983398436, 0.04398128128051758, 0.04397999954223633, 0.044122142791748045, 0.04416707229614258, 0.04403647994995117, 0.04408982467651367, 0.044060672760009766, 0.044146625518798825, 0.04425324630737305, 0.044290046691894534, 0.04452297592163086, 0.044491294860839845, 0.044453887939453124, 0.044542049407958986, 0.044437408447265625, 0.044290046691894534, 0.04434860610961914, 0.044249919891357424, 0.04417536163330078, 0.044142593383789064, 0.04409673690795898, 0.04412211227416992, 0.04412643051147461, 0.04427167892456055, 0.044176929473876955, 0.04415996932983399, 0.04412588882446289, 0.04411151885986328, 0.04420470428466797, 0.044404159545898436, 0.04428803253173828, 0.044372318267822265, 0.04442736053466797, 0.04442531204223633, 0.0445030403137207, 0.04448160171508789, 0.04440121459960938, 0.044321151733398435, 0.044297534942626955, 0.04423545455932617, 0.0442716178894043, 0.04461977767944336, 0.04421017456054688, 0.04420544052124024, 0.04429225540161133, 0.044335391998291014, 0.0441426887512207, 0.04418764877319336, 0.04412985610961914, 0.04428188705444336, 0.04433097457885742, 0.0442578239440918, 0.046436321258544924, 0.044775806427001956, 0.04426710510253906, 0.04414505767822265, 0.044060672760009766, 0.044041759490966795, 0.044130847930908206, 0.04404012680053711, 0.04416921615600586, 0.04414668655395508, 0.044068862915039066, 0.044134273529052734, 0.044066272735595706, 0.04422892761230469, 0.04420595169067383, 0.044136287689208985, 0.044182144165039065, 0.04417536163330078, 0.044058624267578124, 0.04416694259643555, 0.044343521118164066, 0.04453324890136719, 0.04440934371948242, 0.04435696029663086, 0.044466846466064455, 0.04440019226074219, 0.044299808502197266, 0.044290046691894534, 0.04407814407348633, 0.044229503631591796, 0.04418860626220703, 0.04414656066894531, 0.044168800354003904, 0.044165409088134766, 0.04423299026489258, 0.04431372833251953, 0.04440550231933594, 0.044224639892578126, 0.04415692901611328, 0.04417232131958008, 0.044241470336914064, 0.044265857696533205, 0.04438224029541016, 0.04466483306884766, 0.04437811279296875, 0.04475904083251953, 0.044543998718261715, 0.04455833435058594, 0.04449801635742188, 0.044211105346679686, 0.04440659332275391, 0.04428787231445312, 0.04441120147705078, 0.044313663482666014, 0.044243167877197266, 0.04425939178466797, 0.04422268676757812, 0.04418195343017578, 0.044267520904541016, 0.044184959411621094, 0.0445794563293457, 0.044365825653076174, 0.04430368041992187, 0.046508384704589845, 0.04454550552368164, 0.04416780853271484, 0.044150047302246094, 0.044087135314941406, 0.043938591003417966, 0.04394598388671875, 0.043979808807373046, 0.04408419036865235, 0.044027904510498046, 0.04408028793334961, 0.04404291152954101, 0.044017921447753905, 0.043982784271240236, 0.04417523193359375, 0.04417958450317383, 0.044025856018066405, 0.044025856018066405, 0.044082847595214844, 0.04423920059204101, 0.0444026870727539, 0.04436345672607422, 0.044410400390625, 0.04440348815917969, 0.04442726516723633, 0.04432691192626953, 0.044203136444091795, 0.04412707138061524, 0.044137950897216796, 0.044224288940429686, 0.04412044906616211, 0.04407923126220703, 0.04401587295532226, 0.04409552001953125, 0.04404608154296875, 0.04421769714355469, 0.044252063751220705, 0.04417331314086914, 0.04417536163330078, 0.04413577651977539, 0.04426387023925781, 0.04433670425415039, 0.044352161407470704, 0.04448863983154297, 0.04445724868774414, 0.04454275131225586, 0.0445030403137207, 0.044437503814697264, 0.044339199066162106, 0.04425475311279297, 0.04425932693481445, 0.04437859344482422, 0.044273662567138675, 0.04423884963989258, 0.044214046478271485, 0.04420828628540039, 0.04441510391235352, 0.04425286483764648, 0.044448001861572266, 0.04422796630859375, 0.04424505615234375, 0.04429062271118164, 0.04434124755859375, 0.04647647857666016, 0.04498470306396484, 0.04424105453491211, 0.04410195159912109, 0.044063743591308595, 0.04395008087158203, 0.043953121185302736, 0.04398694229125977, 0.04405657577514648, 0.04424246215820313, 0.044042720794677734, 0.043993087768554685, 0.04402175903320313, 0.04411801528930664, 0.044230430603027344, 0.044041759490966795, 0.04411052703857422, 0.04405593490600586, 0.04408163070678711, 0.044140705108642576, 0.044206241607666015, 0.044448673248291014, 0.04467705535888672, 0.04444672012329102, 0.04441497421264649, 0.04430995178222656, 0.044269630432128906, 0.0441671028137207, 0.04412438583374023, 0.04412451171875, 0.04422860717773437, 0.04414611053466797, 0.04408787155151367, 0.0440945930480957, 0.04404723358154297, 0.04416668701171875, 0.04414614486694336, 0.04417843246459961, 0.04410966491699219, 0.044087390899658206, 0.044367393493652346, 0.04447043228149414, 0.04443142318725586, 0.044373664855957035, 0.04434396743774414, 0.04441244888305664, 0.044521953582763674, 0.04438204956054687, 0.04428201675415039, 0.04424703979492187, 0.04432486343383789, 0.04430412673950195, 0.04426764678955078, 0.044366142272949216, 0.04426681518554688, 0.044300960540771483, 0.04451107025146484, 0.04432393646240235, 0.04434991836547852, 0.04417990493774414, 0.04423884963989258, 0.04434739303588867, 0.04432486343383789, 0.046456222534179685, 0.044665470123291015, 0.044198238372802734, 0.0441610221862793, 0.04430207824707031, 0.04428547286987305, 0.044217056274414065, 0.04419184112548828, 0.04404624176025391, 0.044184734344482425, 0.04430115127563477, 0.044214271545410154, 0.044148735046386715, 0.044093441009521485, 0.04424428939819336, 0.044313278198242184, 0.044439552307128906, 0.04416716766357422, 0.0443449592590332, 0.04426176071166992, 0.04443695831298828, 0.044712127685546874, 0.04454435348510742, 0.04436326217651367, 0.044275806427001956, 0.04428371047973633, 0.04425283050537109, 0.0441572494506836, 0.04424483108520508, 0.04424185562133789, 0.044171104431152346, 0.04422649765014648, 0.04423276901245117, 0.044085247039794925, 0.04414892959594727, 0.044334911346435545, 0.044298240661621094, 0.044222400665283206, 0.04415628814697266, 0.04415353775024414, 0.04425523376464844, 0.04444979095458984, 0.04450867080688477, 0.04444416046142578, 0.04451942443847656, 0.04437782287597656, 0.04444803237915039, 0.04439859390258789, 0.044333057403564455, 0.04431814575195313, 0.04421817779541016, 0.04429395294189453, 0.044165599822998045, 0.0441673583984375, 0.04411763381958008, 0.044171199798583985, 0.04424687957763672, 0.044166015625, 0.04423884963989258, 0.04417327880859375, 0.044136672973632815, 0.04413216018676758, 0.04431849670410156]",tokens/s,22.580649372498126,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 990, in __init__ self.model = GemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 775, in __init__ [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 775, in [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 565, in __init__ self.mlp = GemmaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 139, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 166.12 MiB is free. Process 138145 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1234, in __init__ self.transformer = DbrxModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1009, in __init__ self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1009, in self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 784, in __init__ self.norm_attn_norm = DbrxNormAttentionNorm( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 596, in __init__ self.attn = DBRX_ATTENTION_CLASSES[config._attn_implementation]( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 288, in __init__ self.Wqkv = nn.Linear( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 190.12 MiB is free. Process 109816 has 14.55 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 1.55 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1133, in __init__ self.model = StableLmModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 914, in __init__ [StableLmDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 914, in [StableLmDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 689, in __init__ self.mlp = StableLmMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 273, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 270.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 189611 has 14.72 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 1.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 718, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 324.12 MiB is free. Process 41066 has 14.42 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 12.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,789.020672,1128.136704,0.0,725.614592,666.338304,s,1,7.51341455078125,7.51341455078125,0.0,7.51341455078125,7.51341455078125,7.51341455078125,7.51341455078125,[7.51341455078125],,kWh,2.6275512375150355e-06,2.768387340769674e-07,9.325007459815016e-07,3.8368907175735046e-06,,MB,1129.79968,1144.91392,0.0,731.906048,601.370624,s,22,0.3360850229263305,0.015276591951196844,0.000650896586892799,0.0151385760307312,0.01525221757888794,0.015256490802764892,0.01761568062782287,"[0.01824278450012207, 0.01525487995147705, 0.015215328216552734, 0.015078592300415038, 0.015087264060974121, 0.015060159683227539, 0.0152042236328125, 0.015149888038635253, 0.015256575584411621, 0.015127264022827148, 0.015089119911193848, 0.01520304012298584, 0.015175871849060059, 0.015153696060180664, 0.015005727767944337, 0.01515401554107666, 0.01507539176940918, 0.015069631576538086, 0.015100640296936035, 0.015228256225585938, 0.015101280212402344, 0.0150513916015625]",tokens/s,16757.664328393854,kWh,5.653520787073354e-07,6.231350564512376e-08,3.7284040143138e-07,1.0005059857838392e-06,tokens/kWh,255870533.14772388,MB,1141.694464,1191.051264,0.0,778.043392,601.373184,s,22,9.694244689941407,0.44064748590642755,0.007637729806977857,0.43981463623046874,0.4491977508544922,0.45169085845947266,0.45311893005371096,"[0.433464599609375, 0.4397647094726562, 0.4233686828613281, 0.4271434936523438, 0.44755120849609376, 0.447448974609375, 0.45346429443359376, 0.44924282836914065, 0.44392779541015626, 0.43553292846679686, 0.44879205322265625, 0.4518197021484375, 0.44551901245117187, 0.43081854248046875, 0.4389619750976563, 0.43936288452148436, 0.43685494995117186, 0.43967178344726565, 0.43986456298828125, 0.44578668212890626, 0.4399541320800781, 0.43592889404296875]",tokens/s,142.9714273086269,kWh,1.2280156728719529e-05,1.354314566511231e-06,5.977659848046708e-06,1.9612131143277473e-05,tokens/kWh,3212297.508095889,,s,1386,9.683716889381401,0.006986808722497408,0.0002393117986254164,0.006980671882629394,0.007198256015777588,0.00728272807598114,0.007873208141326917,"[0.0073134078979492185, 0.0073400321006774905, 0.007178016185760498, 0.007057631969451904, 0.00693830394744873, 0.006795584201812744, 0.006758399963378906, 0.0067358717918396, 0.00671068811416626, 0.006785535812377929, 0.006811488151550293, 0.006654208183288574, 0.00666812801361084, 0.006688928127288819, 0.006735455989837646, 0.006920608043670654, 0.007083712100982666, 0.006822112083435058, 0.006778175830841064, 0.0068206400871276855, 0.006785024166107178, 0.006711296081542969, 0.006746111869812011, 0.006814015865325928, 0.007032544136047363, 0.006778848171234131, 0.006737919807434082, 0.006897664070129395, 0.006885536193847656, 0.0068050241470336914, 0.006818111896514893, 0.006839744091033936, 0.00695900821685791, 0.006906367778778076, 0.00684662389755249, 0.006778880119323731, 0.00687059211730957, 0.006912000179290771, 0.007030208110809327, 0.0068618240356445315, 0.006938623905181885, 0.0068853759765625, 0.006998015880584717, 0.007190400123596191, 0.006968544006347656, 0.0070381760597229, 0.007136960029602051, 0.007234879970550537, 0.006897952079772949, 0.006877312183380127, 0.006833439826965332, 0.006802559852600098, 0.006811808109283448, 0.006800608158111572, 0.006727424144744873, 0.006904575824737549, 0.007001823902130127, 0.006779168128967285, 0.006803455829620361, 0.006743616104125977, 0.006689216136932373, 0.006636672019958496, 0.006738815784454346, 0.006851935863494873, 0.006897535800933838, 0.006855455875396729, 0.006913663864135743, 0.006860320091247559, 0.006883423805236816, 0.006877952098846436, 0.006850560188293457, 0.006724991798400879, 0.0067502717971801756, 0.006758975982666016, 0.006737504005432129, 0.006697375774383545, 0.0067686400413513184, 0.006800384044647217, 0.006781888008117676, 0.0067638401985168455, 0.0066909117698669435, 0.006695231914520264, 0.006668320178985596, 0.006972864151000977, 0.006918432235717774, 0.006854944229125977, 0.006912320137023926, 0.006819839954376221, 0.006881279945373535, 0.007088128089904785, 0.007085087776184082, 0.00705020809173584, 0.0071823358535766605, 0.007120895862579346, 0.0071617279052734375, 0.0070841598510742185, 0.007065599918365479, 0.006969344139099121, 0.007854080200195313, 0.007030784130096435, 0.00699951982498169, 0.0069779520034790035, 0.006969471931457519, 0.007223199844360351, 0.007063456058502197, 0.007022111892700195, 0.007045695781707764, 0.007069471836090088, 0.007088255882263183, 0.006957536220550537, 0.006978623867034912, 0.00697001600265503, 0.007034527778625488, 0.0069903039932250974, 0.006977695941925049, 0.0070095682144165035, 0.007029183864593506, 0.006942783832550049, 0.006960159778594971, 0.006988704204559326, 0.0069060797691345216, 0.007720704078674316, 0.007331264019012451, 0.0070863041877746585, 0.006976992130279541, 0.007092127799987793, 0.006807487964630127, 0.006846528053283691, 0.00684438419342041, 0.006718560218811035, 0.0067094721794128415, 0.006694784164428711, 0.006554463863372803, 0.006567679882049561, 0.006600063800811767, 0.006568831920623779, 0.006533120155334473, 0.006612351894378662, 0.0066343040466308595, 0.006696767807006836, 0.0066943039894104, 0.006581024169921875, 0.006535136222839356, 0.006800960063934327, 0.006522367954254151, 0.006624032020568848, 0.006663904190063476, 0.006641600131988525, 0.006698527812957764, 0.006689599990844727, 0.0065781760215759275, 0.006671711921691894, 0.0066341438293457036, 0.0066273918151855465, 0.006682559967041015, 0.0070266880989074704, 0.006891679763793945, 0.007030015945434571, 0.007721375942230224, 0.0069695358276367185, 0.006960192203521729, 0.006650879859924317, 0.006625152111053466, 0.006687903881072998, 0.0066200637817382816, 0.006746079921722412, 0.006610976219177246, 0.006655424118041992, 0.006656352043151855, 0.0066500802040100095, 0.006791168212890625, 0.006823935985565186, 0.006746111869812011, 0.006706431865692138, 0.006697728157043457, 0.0067259521484375, 0.006835264205932617, 0.006635327816009521, 0.0066722240447998045, 0.006626272201538086, 0.006639391899108887, 0.006744287967681885, 0.0066744318008422855, 0.0068436479568481446, 0.00674073600769043, 0.006674496173858643, 0.006649792194366455, 0.006670335769653321, 0.006576128005981445, 0.006604800224304199, 0.006612991809844971, 0.006672383785247803, 0.006805376052856445, 0.006776768207550049, 0.0068076481819152835, 0.006854591846466064, 0.006738080024719238, 0.006741407871246338, 0.006721407890319824, 0.006705887794494629, 0.006686719894409179, 0.006635488033294678, 0.006819871902465821, 0.006657375812530518, 0.006699679851531982, 0.006672383785247803, 0.006666240215301514, 0.006645760059356689, 0.006717440128326416, 0.006675456047058105, 0.006667232036590576, 0.0067190399169921875, 0.00677891206741333, 0.0067916159629821775, 0.006799359798431396, 0.006887423992156983, 0.0070342721939086915, 0.006807168006896972, 0.00690067195892334, 0.006944799900054931, 0.006965248107910156, 0.006780096054077148, 0.006716224193572998, 0.006736127853393554, 0.006815487861633301, 0.006762495994567871, 0.0067355198860168455, 0.006684063911437988, 0.006912576198577881, 0.006683008193969726, 0.006723008155822754, 0.006769216060638428, 0.006795263767242431, 0.006768671989440918, 0.006776735782623291, 0.006815328121185303, 0.006735551834106446, 0.006729568004608154, 0.006851136207580566, 0.006799136161804199, 0.006961056232452393, 0.0069905281066894535, 0.006815743923187256, 0.00683622407913208, 0.0067926721572875975, 0.00684496021270752, 0.006811200141906738, 0.006775296211242675, 0.0067788162231445315, 0.006685023784637451, 0.006740799903869629, 0.00685756778717041, 0.006750207901000976, 0.006803455829620361, 0.006746111869812011, 0.006682559967041015, 0.00682809591293335, 0.0067480640411376955, 0.006750207901000976, 0.006753952026367188, 0.006778975963592529, 0.006707424163818359, 0.007496831893920898, 0.006999040126800537, 0.007252031803131103, 0.007116735935211182, 0.007104512214660645, 0.007315455913543701, 0.007274496078491211, 0.007129087924957276, 0.007208960056304932, 0.007149280071258545, 0.00714576005935669, 0.007489535808563232, 0.0071855678558349605, 0.007100992202758789, 0.0071170878410339354, 0.00707366418838501, 0.007082111835479736, 0.0071015038490295414, 0.007162367820739746, 0.007068416118621826, 0.007169727802276611, 0.007065184116363525, 0.007111072063446045, 0.007120895862579346, 0.0071066880226135255, 0.007141248226165771, 0.007101856231689453, 0.0071943359375, 0.007176127910614014, 0.007084928035736084, 0.006993855953216553, 0.007280511856079102, 0.007148831844329834, 0.007070591926574707, 0.007312607765197754, 0.00712505578994751, 0.0071682238578796385, 0.007268671989440918, 0.0071662402153015135, 0.007112415790557861, 0.00712556791305542, 0.007198560237884522, 0.007200640201568603, 0.00713318395614624, 0.007120639801025391, 0.007067903995513916, 0.007177311897277832, 0.007104671955108642, 0.007230207920074463, 0.007114751815795899, 0.007208415985107422, 0.0071890239715576175, 0.007079936027526855, 0.006904223918914795, 0.007180575847625732, 0.007057407855987549, 0.006980959892272949, 0.00701907205581665, 0.006994016170501709, 0.007098368167877197, 0.00725324821472168, 0.006924320220947266, 0.006926303863525391, 0.006881440162658691, 0.006868607997894287, 0.006982624053955078, 0.006883327960968018, 0.006883327960968018, 0.006957056045532227, 0.006952960014343262, 0.007108736038208008, 0.0073194241523742674, 0.007102464199066162, 0.0070266880989074704, 0.007020544052124023, 0.007085919857025147, 0.006989984035491943, 0.007176191806793213, 0.007012351989746094, 0.00692796802520752, 0.006879648208618164, 0.006917151927947998, 0.006910943984985351, 0.006856704235076904, 0.006791168212890625, 0.00689961576461792, 0.007134335994720459, 0.007019487857818604, 0.007108607769012451, 0.007230976104736328, 0.007547391891479492, 0.007032383918762207, 0.007459263801574707, 0.007647232055664062, 0.007096320152282715, 0.00697049617767334, 0.006953855991363525, 0.009803903579711915, 0.007520287990570068, 0.006975327968597412, 0.006978720188140869, 0.006937407970428467, 0.006997183799743652, 0.006976416110992431, 0.0072906241416931155, 0.007073855876922607, 0.006905983924865722, 0.007045119762420654, 0.0070061440467834475, 0.006991936206817627, 0.007050879955291748, 0.007045216083526611, 0.0070143680572509765, 0.006938943862915039, 0.006989120006561279, 0.007383999824523926, 0.008210944175720216, 0.00720851182937622, 0.0071123518943786625, 0.007035679817199707, 0.007061503887176514, 0.007047167778015137, 0.007048736095428467, 0.007053088188171386, 0.007017248153686524, 0.006996992111206054, 0.0069939842224121095, 0.007013216018676758, 0.00698367977142334, 0.0070553598403930665, 0.007225344181060791, 0.007106560230255127, 0.007016448020935059, 0.007104512214660645, 0.0070381760597229, 0.006968095779418946, 0.007608320236206055, 0.007083263874053955, 0.007178495883941651, 0.007079967975616455, 0.006999936103820801, 0.007192704200744629, 0.007090943813323975, 0.007792352199554443, 0.00800767993927002, 0.008175135612487793, 0.00813913631439209, 0.007196767807006836, 0.007176191806793213, 0.007090176105499267, 0.007096384048461914, 0.007167935848236084, 0.00708403205871582, 0.0070917439460754395, 0.006996448040008545, 0.007116256237030029, 0.007162144184112549, 0.007117055892944336, 0.007344128131866455, 0.007131040096282959, 0.007209055900573731, 0.0071617598533630375, 0.0071389441490173336, 0.007196127891540527, 0.007086207866668701, 0.007218143939971924, 0.007087071895599365, 0.007206912040710449, 0.007162816047668457, 0.007027711868286133, 0.0071112961769104, 0.007309152126312256, 0.007221792221069336, 0.00708134412765503, 0.0071051521301269535, 0.007173183917999267, 0.00703542423248291, 0.007020959854125977, 0.00698473596572876, 0.006989312171936035, 0.007180352210998536, 0.007166719913482666, 0.0069996161460876465, 0.007090559959411621, 0.007083775997161865, 0.007164159774780274, 0.007006207942962647, 0.007042111873626709, 0.007027647972106934, 0.006987775802612305, 0.007054975986480713, 0.007034560203552246, 0.007099071979522705, 0.007172095775604248, 0.007128191947937012, 0.007149888038635254, 0.007024703979492187, 0.007219711780548096, 0.007190688133239746, 0.007121856212615967, 0.007207935810089112, 0.0074065918922424315, 0.007125760078430176, 0.007126719951629639, 0.007067520141601563, 0.007508768081665039, 0.008170975685119628, 0.0070618557929992675, 0.007102464199066162, 0.007137279987335205, 0.0070973758697509765, 0.007131552219390869, 0.007088255882263183, 0.007070144176483154, 0.007118591785430908, 0.007134560108184814, 0.007154592037200928, 0.00714137601852417, 0.0071344318389892575, 0.007119647979736328, 0.007522175788879395, 0.007128672122955322, 0.007372767925262451, 0.007155295848846436, 0.007125984191894532, 0.007282656192779541, 0.007217184066772461, 0.007167520046234131, 0.007027167797088623, 0.007122943878173828, 0.006846464157104492, 0.006762495994567871, 0.006884352207183838, 0.006792191982269287, 0.006985727787017822, 0.006879231929779053, 0.006987679958343506, 0.006903903961181641, 0.007047167778015137, 0.007120063781738281, 0.007232319831848144, 0.007077087879180908, 0.007110655784606934, 0.007145472049713135, 0.007188447952270508, 0.007034463882446289, 0.007131584167480469, 0.0071188478469848635, 0.007129087924957276, 0.007151616096496582, 0.007127039909362793, 0.007251520156860352, 0.007063615798950196, 0.007031263828277588, 0.00698576021194458, 0.00695692777633667, 0.0069896640777587895, 0.0070124478340148925, 0.0069448318481445314, 0.007020415782928467, 0.007237535953521728, 0.007196896076202393, 0.0070529599189758305, 0.007041376113891601, 0.006998144149780274, 0.007040351867675781, 0.007078080177307129, 0.007096288204193116, 0.007180511951446533, 0.006987872123718261, 0.007268415927886963, 0.00698905611038208, 0.007731103897094726, 0.007095136165618896, 0.006926496028900147, 0.007011680126190185, 0.006900191783905029, 0.007227424144744873, 0.0072780799865722655, 0.007100448131561279, 0.007029215812683106, 0.007006207942962647, 0.006983359813690185, 0.006921984195709228, 0.007004735946655273, 0.006936575889587402, 0.0069959678649902345, 0.007049215793609619, 0.0069506878852844236, 0.006811423778533936, 0.00683078384399414, 0.006972671985626221, 0.006867136001586914, 0.006818111896514893, 0.00687494421005249, 0.0072288641929626465, 0.006997824192047119, 0.00705017614364624, 0.006888576030731201, 0.006845312118530273, 0.00695091199874878, 0.006987775802612305, 0.006991551876068115, 0.006836544036865235, 0.00678876781463623, 0.006746816158294678, 0.006871039867401123, 0.006873087882995605, 0.006832159996032715, 0.006842336177825928, 0.0067645440101623535, 0.00689356803894043, 0.0069560642242431645, 0.00689247989654541, 0.00678710412979126, 0.006825984001159668, 0.00687718391418457, 0.006835360050201416, 0.0068599038124084475, 0.00680022382736206, 0.0070107197761535645, 0.0067341761589050295, 0.006718815803527832, 0.006776991844177246, 0.0068282241821289065, 0.006818463802337646, 0.006782271862030029, 0.006794015884399414, 0.006821248054504394, 0.006858687877655029, 0.006943456172943115, 0.006894432067871094, 0.006894432067871094, 0.0070797119140625, 0.007204959869384766, 0.007092319965362549, 0.0070780482292175295, 0.007096159934997559, 0.007052320003509522, 0.006968287944793701, 0.0069959678649902345, 0.007096159934997559, 0.007145631790161133, 0.007036575794219971, 0.00701200008392334, 0.006916800022125244, 0.006876895904541016, 0.006794847965240478, 0.007047872066497803, 0.006973440170288086, 0.006819839954376221, 0.006846303939819336, 0.0068875842094421385, 0.006842368125915528, 0.006881279945373535, 0.006987711906433105, 0.006960959911346436, 0.006868800163269043, 0.006807807922363281, 0.00690342378616333, 0.006937056064605713, 0.007055456161499023, 0.006886688232421875, 0.006885312080383301, 0.006966047763824463, 0.006948863983154297, 0.006843584060668945, 0.006807744026184082, 0.006924287796020508, 0.006936575889587402, 0.0068631677627563475, 0.006895264148712158, 0.006795519828796386, 0.006809375762939453, 0.006812704086303711, 0.006843296051025391, 0.006863967895507812, 0.006893631935119629, 0.006978079795837402, 0.007125376224517822, 0.00704307222366333, 0.006870207786560059, 0.006845248222351074, 0.006858751773834228, 0.007038976192474365, 0.006961152076721191, 0.0069324798583984375, 0.006860383987426758, 0.007086495876312256, 0.006888576030731201, 0.00695795202255249, 0.00713318395614624, 0.00710041618347168, 0.007022336006164551, 0.006944191932678223, 0.006998847961425781, 0.007002111911773682, 0.007000063896179199, 0.007000063896179199, 0.007040832042694092, 0.007050591945648194, 0.006998879909515381, 0.007251359939575195, 0.007115039825439453, 0.007021024227142334, 0.008658783912658691, 0.00821183967590332, 0.007214752197265625, 0.008317919731140136, 0.007337408065795898, 0.007155456066131592, 0.007117536067962647, 0.007247968196868897, 0.007107935905456543, 0.007047840118408203, 0.007161856174468994, 0.007127039909362793, 0.007395328044891358, 0.007188127994537353, 0.007217504024505615, 0.007196671962738037, 0.007136735916137695, 0.00733625602722168, 0.0071907200813293454, 0.007094399929046631, 0.007112607955932617, 0.007061408042907715, 0.007487071990966797, 0.0070845441818237305, 0.007143104076385498, 0.007139584064483642, 0.007033952236175537, 0.007143136024475098, 0.007142752170562744, 0.007137919902801514, 0.007067552089691162, 0.0069978880882263185, 0.00703718376159668, 0.007118048191070557, 0.007162655830383301, 0.007191904067993164, 0.0071502399444580075, 0.007131135940551757, 0.007168000221252442, 0.007135231971740722, 0.007153664112091064, 0.007217152118682861, 0.007278592109680176, 0.007140384197235107, 0.007078879833221435, 0.007417791843414306, 0.00718006420135498, 0.007092512130737304, 0.007184383869171143, 0.00719868803024292, 0.00723356819152832, 0.007221248149871826, 0.007161856174468994, 0.007149087905883789, 0.007197152137756348, 0.0072005119323730465, 0.007178495883941651, 0.007143136024475098, 0.007135615825653076, 0.0070347838401794436, 0.007065599918365479, 0.007136256217956543, 0.007196864128112793, 0.007353151798248291, 0.0072622718811035155, 0.007312960147857666, 0.007177919864654541, 0.007123456001281738, 0.007254303932189942, 0.0071263680458068844, 0.007105023860931397, 0.007082047939300537, 0.007137279987335205, 0.007221248149871826, 0.007184383869171143, 0.007204864025115967, 0.007127039909362793, 0.007179999828338623, 0.0072297282218933105, 0.007247776031494141, 0.007161952018737793, 0.00737824010848999, 0.0072485761642456055, 0.0071940159797668456, 0.007139935970306397, 0.007073887825012207, 0.00705292797088623, 0.007036799907684326, 0.007072159767150879, 0.007051263809204102, 0.0071129918098449705, 0.0071329278945922855, 0.007056447982788086, 0.007152095794677734, 0.007159264087677002, 0.007115647792816162, 0.007165247917175293, 0.007048992156982422, 0.007070720195770264, 0.007159264087677002, 0.007046688079833985, 0.007123968124389648, 0.006858751773834228, 0.00707583999633789, 0.007217152118682861, 0.007318975925445556, 0.0071296639442443846, 0.007006207942962647, 0.007037024021148682, 0.007097536087036132, 0.007055391788482666, 0.007041728019714356, 0.007085343837738037, 0.007113696098327637, 0.007025568008422851, 0.00695136022567749, 0.0071049280166625975, 0.007018496036529541, 0.007005407810211182, 0.006984416007995605, 0.00728275203704834, 0.0071157760620117185, 0.007074816226959229, 0.006940671920776367, 0.007032959938049316, 0.006983551979064941, 0.0069918718338012695, 0.0069816322326660156, 0.006995359897613526, 0.006994527816772461, 0.006987775802612305, 0.006973055839538575, 0.0069753918647766115, 0.007053055763244629, 0.006957024097442627, 0.007138304233551026, 0.007075232028961181, 0.0070267200469970705, 0.00692409610748291, 0.006860479831695556, 0.00677507209777832, 0.0071907200813293454, 0.007139840126037598, 0.006994976043701172, 0.006992127895355225, 0.007039552211761475, 0.0070553598403930665, 0.007360511779785156, 0.007249120235443115, 0.007267104148864746, 0.007128064155578613, 0.006988800048828125, 0.006866943836212158, 0.007020544052124023, 0.00690502405166626, 0.006818624019622803, 0.0067645440101623535, 0.0067990078926086425, 0.0067935681343078615, 0.0066826238632202144, 0.006694623947143555, 0.006740255832672119, 0.0066375679969787596, 0.006810848236083984, 0.00698038387298584, 0.006905856132507324, 0.00690176010131836, 0.006856480121612549, 0.006871263980865478, 0.0067636480331420895, 0.006761343955993653, 0.006758399963378906, 0.007319551944732666, 0.006770688056945801, 0.0068091521263122555, 0.00680947208404541, 0.006687295913696289, 0.006989151954650879, 0.006703775882720947, 0.006673855781555176, 0.0067552318572998045, 0.006815328121185303, 0.006801472187042236, 0.006938623905181885, 0.0068915200233459475, 0.00687718391418457, 0.006772736072540283, 0.006766304016113282, 0.006738207817077637, 0.007005216121673584, 0.006884223937988281, 0.006830175876617431, 0.00705244779586792, 0.00696611213684082, 0.0068544321060180664, 0.006729504108428955, 0.006734015941619873, 0.0067333121299743654, 0.006707456111907959, 0.006762303829193115, 0.006746943950653076, 0.006805376052856445, 0.006799327850341797, 0.007008287906646728, 0.006846496105194092, 0.006851744174957275, 0.006826752185821533, 0.0069279041290283205, 0.00681603193283081, 0.006807807922363281, 0.0067338237762451176, 0.006782432079315185, 0.006799744129180908, 0.006788608074188233, 0.0071502399444580075, 0.0068050241470336914, 0.007035359859466552, 0.006960127830505371, 0.007480319976806641, 0.006907487869262695, 0.0070414400100708005, 0.0070348801612854, 0.0070797119140625, 0.00710262393951416, 0.0071979517936706545, 0.007155935764312744, 0.006998623847961426, 0.007077888011932373, 0.006971392154693603, 0.0069550080299377445, 0.006934336185455322, 0.007059264183044434, 0.006971776008605957, 0.007038976192474365, 0.007110655784606934, 0.006948991775512696, 0.006758272171020508, 0.00683622407913208, 0.0069632000923156735, 0.006815328121185303, 0.006826176166534424, 0.006744287967681885, 0.006765888214111328, 0.006768608093261718, 0.006674304008483887, 0.006878047943115234, 0.006930335998535157, 0.006922207832336426, 0.007009791851043701, 0.0068204798698425296, 0.00683350419998169, 0.006814367771148681, 0.006837376117706299, 0.0070644478797912596, 0.007008255958557129, 0.006875135898590088, 0.006913343906402588, 0.006865632057189941, 0.0073697280883789065, 0.006933472156524658, 0.007129055976867676, 0.006916128158569336, 0.006928063869476318, 0.006872735977172852, 0.006883039951324463, 0.006888544082641602, 0.006903327941894531, 0.006944543838500976, 0.006917952060699463, 0.006932191848754883, 0.006896639823913574, 0.007067391872406006, 0.007010335922241211, 0.00687283182144165, 0.006996575832366943, 0.007098207950592041, 0.007073823928833008, 0.007073791980743408, 0.0069868798255920414, 0.007072639942169189, 0.007401472091674805, 0.007352096080780029, 0.007098336219787598, 0.007049471855163574, 0.007112703800201416, 0.007359807968139649, 0.007168032169342041, 0.007215231895446777, 0.007040639877319336, 0.007081120014190674, 0.007031551837921143, 0.007067872047424316, 0.006959648132324219, 0.006912255764007568, 0.007130847930908203, 0.006964896202087402, 0.006769279956817627, 0.0068280320167541505, 0.006827616214752198, 0.006795680046081543, 0.006801407814025879, 0.006770688056945801, 0.006803455829620361, 0.00686191987991333, 0.006942880153656006, 0.006943359851837159, 0.0068568320274353025, 0.006786367893218994, 0.006789728164672852, 0.00708019208908081, 0.007085919857025147, 0.007006207942962647, 0.006924287796020508, 0.0069402561187744145, 0.00685097599029541, 0.006862847805023193, 0.006846176147460937, 0.006735328197479248, 0.006744895935058594, 0.006725632190704346, 0.006773056030273438, 0.006718783855438232, 0.0067482562065124515, 0.006784575939178467, 0.007062111854553223, 0.006987904071807861, 0.007024703979492187, 0.00701638412475586, 0.0069714560508728025, 0.0070471038818359375, 0.006977536201477051, 0.006954880237579346, 0.006981760025024414, 0.007051040172576904, 0.007012576103210449, 0.006977536201477051, 0.00689961576461792, 0.006957151889801025, 0.007282623767852783, 0.007134399890899658, 0.006892416000366211, 0.0068692159652709965, 0.006940415859222412, 0.0070000958442687986, 0.006882368087768554, 0.006881919860839844, 0.00687340784072876, 0.0068364801406860356, 0.006763999938964844, 0.006750495910644532, 0.006690559864044189, 0.006697216033935547, 0.006807551860809326, 0.007255424022674561, 0.006977375984191895, 0.007033631801605224, 0.0070793919563293456, 0.0071214399337768556, 0.006977536201477051, 0.0069283838272094726, 0.006917984008789063, 0.006906015872955322, 0.00687718391418457, 0.006915264129638672, 0.00697049617767334, 0.007063231945037842, 0.007196479797363281, 0.0072193918228149415, 0.0070863041877746585, 0.007032639980316162, 0.0068689918518066405, 0.006858335971832275, 0.006893951892852784, 0.006726719856262207, 0.006759136199951172, 0.0067069120407104495, 0.006803328037261963, 0.006709856033325195, 0.008132639884948731, 0.006893792152404785, 0.006844255924224853, 0.006727615833282471, 0.006686816215515137, 0.006960127830505371, 0.00686787223815918, 0.006840320110321045, 0.006885087966918945, 0.006948416233062744, 0.0069616961479187014, 0.0070243201255798336, 0.006938560009002686, 0.00688921594619751, 0.006867775917053223, 0.007206399917602539, 0.007026463985443115, 0.006986464023590088, 0.006815743923187256, 0.0067870721817016606, 0.006807551860809326, 0.006761504173278809, 0.006958047866821289, 0.006895808219909668, 0.006788928031921387, 0.0069816322326660156, 0.006762847900390625, 0.006968607902526855, 0.006846560001373291, 0.006797728061676026, 0.006712800025939942, 0.006670207977294922, 0.006607583999633789, 0.006684480190277099, 0.006807680130004883, 0.006757472038269043, 0.006871903896331787, 0.006922463893890381, 0.006959199905395508, 0.006895008087158203, 0.006852960109710694, 0.006852384090423584, 0.006786880016326905, 0.006799776077270508, 0.0072121920585632325, 0.007154528141021728, 0.007172095775604248, 0.007317247867584228, 0.007063039779663086, 0.007020319938659668, 0.006980031967163086, 0.007023136138916016, 0.006954239845275879, 0.007011072158813476, 0.006930431842803955, 0.007012288093566895, 0.006987103939056397, 0.007035647869110108, 0.006936895847320557, 0.006987423896789551, 0.007010111808776856, 0.0073331518173217775, 0.0071766400337219234, 0.006962687969207764, 0.006970335960388183, 0.007164031982421875, 0.007204448223114014, 0.007075136184692383, 0.007086912155151367, 0.007055424213409424, 0.007041024208068848, 0.006922336101531983, 0.0070348801612854, 0.007114751815795899, 0.007028800010681152, 0.00701638412475586, 0.007049215793609619, 0.007097472190856933, 0.006996863842010498, 0.006950592041015625, 0.007079679965972901, 0.006909567832946778, 0.007021503925323486, 0.006930335998535157, 0.007112800121307373, 0.006888480186462403, 0.006808544158935547, 0.00686406421661377, 0.006963391780853271, 0.006795584201812744, 0.006912223815917969, 0.006852543830871582, 0.006858751773834228, 0.006903359889984131, 0.007174752235412598, 0.007020383834838867, 0.007116799831390381, 0.007049215793609619, 0.007242879867553711, 0.008941663742065429, 0.00852233600616455, 0.007428383827209473, 0.0069585280418396, 0.007713247776031494, 0.007063551902770996, 0.006971392154693603, 0.007271935939788818, 0.0070538239479064945, 0.006881279945373535, 0.006881279945373535, 0.006858304023742676, 0.006792831897735596, 0.0068199682235717774, 0.006852479934692382, 0.0067981438636779785, 0.006765984058380127, 0.0066893758773803715, 0.006848512172698974, 0.006778079986572266, 0.006744416236877442, 0.00674451208114624, 0.006744063854217529, 0.006979584217071533, 0.006783135890960693, 0.006774271965026855, 0.006736224174499512, 0.006815711975097657, 0.0067420477867126466, 0.006766592025756836, 0.006759520053863525, 0.0067935361862182615, 0.00682585620880127, 0.006820799827575684, 0.00687116813659668, 0.00693942403793335, 0.006951551914215088, 0.006916351795196533, 0.006831903934478759, 0.0069838080406188965, 0.006914144039154053, 0.007007423877716064, 0.006816576004028321, 0.006812992095947265, 0.0067775359153747555, 0.006855936050415039, 0.006880159854888916, 0.006760064125061035, 0.0068479361534118655, 0.00733673620223999, 0.006991072177886963, 0.00702726411819458, 0.0070002880096435545, 0.007026879787445068, 0.007034656047821045, 0.007034912109375, 0.007100448131561279, 0.00772214412689209, 0.007076352119445801, 0.0070572800636291505, 0.0069777917861938476, 0.007016640186309814, 0.007009503841400147, 0.0069270081520080564, 0.006992000102996826, 0.007120480060577392, 0.0072278079986572265, 0.007085951805114746, 0.007104127883911133, 0.007131648063659668, 0.007108191967010498, 0.007092639923095703, 0.007009696006774902, 0.006975327968597412, 0.007004928112030029, 0.007077695846557618, 0.006946208000183106, 0.006886303901672363, 0.006853568077087402, 0.007054272174835205, 0.007135231971740722, 0.0069704318046569826, 0.006932703971862793, 0.006768767833709717, 0.007033440113067627, 0.007096320152282715, 0.007178368091583252, 0.007028607845306396, 0.006989855766296387, 0.006902751922607422, 0.006914368152618408, 0.006900063991546631, 0.0071560640335083005, 0.007090176105499267, 0.007112927913665771, 0.007085824012756347, 0.007016736030578613, 0.007005440235137939, 0.006975168228149414, 0.006941504001617431, 0.0069632000923156735, 0.006930431842803955, 0.006949024200439453, 0.006997856140136718, 0.008392288208007812, 0.007319200038909912, 0.007117568016052246, 0.007094016075134277, 0.007204991817474365, 0.007140639781951904, 0.007070559978485108, 0.0070225920677185055, 0.007024543762207031, 0.006989920139312744, 0.006977536201477051, 0.007114751815795899, 0.0070594558715820314, 0.007220799922943115, 0.0069259839057922365, 0.006904607772827149, 0.007039199829101563, 0.007065375804901123, 0.007025983810424805, 0.007024735927581787, 0.007981599807739258, 0.007313727855682373, 0.0070694398880004885, 0.007206912040710449, 0.006991231918334961, 0.007061471939086914, 0.007139232158660889, 0.0070899839401245114, 0.007031744003295899, 0.007097504138946533, 0.007109471797943115, 0.00706112003326416, 0.007045536041259765, 0.007327712059020996, 0.007176191806793213, 0.007096320152282715, 0.007402656078338623, 0.006951007843017578, 0.006970111846923828, 0.007045407772064209, 0.006954720020294189, 0.006936575889587402, 0.0069959678649902345, 0.00692409610748291, 0.006875328063964844, 0.006862336158752441, 0.006898047924041748, 0.006922368049621582, 0.00686681604385376, 0.006874815940856933, 0.006755072116851807, 0.006733503818511963, 0.006670335769653321, 0.006916096210479736, 0.006780992031097412, 0.006678143978118897, 0.006713344097137451, 0.006729568004608154, 0.006662591934204102, 0.006817376136779785, 0.006834623813629151, 0.006967296123504638, 0.0068191041946411134, 0.006806240081787109, 0.006846367835998535, 0.006750304222106934, 0.006744063854217529, 0.006852608203887939, 0.006821887969970703, 0.006866943836212158, 0.006870016098022461, 0.007142399787902832, 0.007222527980804443, 0.007106624126434326, 0.006867424011230468, 0.006974880218505859, 0.007081984043121338, 0.007032671928405762, 0.006988671779632569, 0.006989696025848389, 0.006899712085723877, 0.006911456108093262, 0.006922111988067627, 0.006738624095916748, 0.0068650879859924316, 0.006764319896697998, 0.0067621440887451175, 0.006780640125274658, 0.006765120029449463, 0.006929728031158447, 0.006796031951904297, 0.0068321280479431154, 0.007173791885375976, 0.006952928066253662, 0.006787168025970459, 0.0067092800140380856, 0.006723519802093506, 0.006626880168914795, 0.006636288166046143, 0.0067329277992248535, 0.006663040161132812, 0.006602752208709717, 0.006649856090545654, 0.0066007041931152345, 0.006580543994903564, 0.006569664001464844, 0.00659660816192627, 0.006669568061828613, 0.006750239849090576, 0.00701308822631836, 0.006860191822052002, 0.006884223937988281, 0.006813407897949219, 0.006867167949676514, 0.006809375762939453, 0.00684441614151001, 0.006840320110321045, 0.006868832111358643, 0.00693452787399292, 0.006955167770385742, 0.006954400062561035, 0.007115392208099365, 0.007188447952270508, 0.007443647861480713, 0.007109055995941162, 0.007227776050567627, 0.007355743885040283, 0.007142047882080078, 0.007264256000518799, 0.007030303955078125, 0.007094624042510986, 0.00706982421875, 0.007135072231292725, 0.007208896160125732, 0.006924320220947266, 0.006858943939208984, 0.007278592109680176, 0.007349887847900391]",tokens/s,143.1268608771293,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.235648,2127.495168,0.0,1732.247552,1728.316416,s,1,7.494126953125,7.494126953125,0.0,7.494126953125,7.494126953125,7.494126953125,7.494126953125,[7.494126953125],,kWh,9.68461827082289e-06,1.0611642310189389e-06,4.302503442007999e-06,1.5048285943849828e-05,,MB,1107.70176,2328.82176,0.0,1923.088384,1891.2,s,11,0.4685140800476074,0.042592189095237036,0.003926996687913523,0.04137561416625977,0.04316851043701172,0.04874103927612305,0.05319906234741212,"[0.05431356811523438, 0.03801161575317383, 0.040621376037597655, 0.04316851043701172, 0.04131808090209961, 0.04131238555908203, 0.04130863952636719, 0.04286995315551758, 0.041873439788818356, 0.042340896606445313, 0.04137561416625977]",tokens/s,6010.491722498193,kWh,1.7661555776105405e-06,1.940468376263582e-07,1.16911405443373e-06,3.1293164696706284e-06,tokens/kWh,81807002.41767012,MB,1117.642752,2328.82176,0.0,1923.088384,1895.80032,s,11,10.273246093749998,0.9339314630681816,0.004601488466255629,0.93250439453125,0.939381103515625,0.9407118835449219,0.9417765075683594,"[0.9289093627929688, 0.93250439453125, 0.939381103515625, 0.9280496826171875, 0.9309269409179688, 0.9381678466796874, 0.9389963989257812, 0.9330540771484375, 0.9307843627929687, 0.9304292602539063, 0.9420426635742187]",tokens/s,67.45677010712369,kWh,2.6926773923902357e-05,2.9703234128768476e-06,1.599403098920306e-05,4.589112832598225e-05,tokens/kWh,1372814.3608169074,,s,693,10.270677665710455,0.014820602692222868,0.00037578824727001534,0.014731040000915527,0.015048230171203613,0.015191097450256347,0.016418733444213875,"[0.015498559951782226, 0.015063743591308593, 0.014892288208007812, 0.01481833553314209, 0.01478012752532959, 0.014735360145568848, 0.014867487907409669, 0.014663968086242676, 0.014787263870239258, 0.014648703575134278, 0.014684639930725097, 0.014583135604858398, 0.01581753635406494, 0.015015935897827149, 0.014782464027404785, 0.01469644832611084, 0.01491763210296631, 0.014620672225952149, 0.014581952095031738, 0.014794495582580567, 0.014727231979370117, 0.014692352294921876, 0.014645376205444337, 0.014677248001098632, 0.014613375663757325, 0.014599295616149902, 0.014619263648986816, 0.014577664375305176, 0.014755328178405762, 0.014710432052612304, 0.014722111701965333, 0.014655263900756835, 0.01466534423828125, 0.014629247665405274, 0.014617664337158203, 0.01479526424407959, 0.01461683177947998, 0.014622912406921387, 0.014985216140747071, 0.014940159797668457, 0.014796799659729003, 0.014663680076599121, 0.014673439979553223, 0.01463548755645752, 0.014587840080261231, 0.014778431892395019, 0.014650752067565918, 0.014575231552124023, 0.01461350440979004, 0.014645248413085938, 0.014620672225952149, 0.01459404754638672, 0.014607839584350586, 0.01471951961517334, 0.014577504158020019, 0.01457577610015869, 0.014611935615539551, 0.014742048263549804, 0.01476198387145996, 0.014876319885253906, 0.014836064338684082, 0.014735360145568848, 0.014689599990844727, 0.014716352462768555, 0.014578240394592285, 0.014592000007629394, 0.014694399833679199, 0.014569472312927247, 0.014691328048706055, 0.014739520072937011, 0.01469644832611084, 0.01472003173828125, 0.014605600357055664, 0.01851798439025879, 0.014973600387573243, 0.014905344009399414, 0.014911487579345703, 0.01552128028869629, 0.014874464035034179, 0.014854016304016114, 0.014725919723510741, 0.014745183944702148, 0.014657952308654786, 0.014601728439331055, 0.014766592025756836, 0.014609472274780273, 0.014563679695129395, 0.014537311553955079, 0.014645248413085938, 0.014650495529174804, 0.014691200256347656, 0.014681568145751953, 0.014647839546203613, 0.014704416275024414, 0.014622943878173828, 0.014581567764282226, 0.014588095664978028, 0.014667967796325683, 0.014759743690490722, 0.01464748764038086, 0.014624575614929199, 0.014622079849243163, 0.014627455711364746, 0.014720128059387208, 0.014791551589965821, 0.014905344009399414, 0.014962688446044922, 0.014825247764587402, 0.014799072265625, 0.014699935913085938, 0.01470524787902832, 0.014561280250549317, 0.014733311653137206, 0.014657535552978516, 0.01470464038848877, 0.01463100814819336, 0.014720864295959472, 0.014832799911499023, 0.014930848121643067, 0.014774463653564452, 0.014648960113525391, 0.01515443229675293, 0.015434687614440917, 0.01466163158416748, 0.014608384132385254, 0.014684160232543946, 0.014941887855529785, 0.01491811180114746, 0.014723072052001953, 0.014741344451904297, 0.014659487724304199, 0.014606592178344726, 0.014649344444274901, 0.014655488014221191, 0.014751744270324708, 0.01489305591583252, 0.014804991722106933, 0.01467801570892334, 0.014683520317077637, 0.014592639923095703, 0.01461017608642578, 0.014699775695800782, 0.014664287567138673, 0.01495081615447998, 0.01474732780456543, 0.014873920440673828, 0.014982144355773925, 0.01477222442626953, 0.015073280334472656, 0.014868415832519532, 0.015034432411193848, 0.014731264114379883, 0.01475984001159668, 0.014876607894897462, 0.014890368461608888, 0.014884703636169434, 0.014758848190307617, 0.014720416069030762, 0.014807135581970214, 0.014834176063537598, 0.014928895950317383, 0.01477734375, 0.015099040031433105, 0.014801440238952636, 0.014649632453918458, 0.01473459243774414, 0.014773247718811035, 0.014685055732727052, 0.014818207740783691, 0.014790656089782715, 0.014638976097106933, 0.01463923168182373, 0.014618623733520507, 0.014606240272521973, 0.014977375984191894, 0.017809152603149414, 0.01989017677307129, 0.015123647689819336, 0.014771007537841796, 0.014790656089782715, 0.014600192070007324, 0.014711039543151855, 0.014597887992858887, 0.014559231758117675, 0.014872575759887695, 0.014771360397338867, 0.014676992416381835, 0.014686047554016113, 0.014897151947021485, 0.015046336174011231, 0.014784000396728515, 0.014740480422973632, 0.014780223846435547, 0.01460223960876465, 0.015560704231262207, 0.01657823944091797, 0.014774592399597167, 0.014700511932373046, 0.014555040359497071, 0.014641280174255372, 0.014798272132873535, 0.014623104095458984, 0.014600383758544921, 0.014612480163574219, 0.0145830078125, 0.014584128379821776, 0.014715007781982422, 0.014640959739685058, 0.014842399597167969, 0.01477129554748535, 0.01472383975982666, 0.014670207977294921, 0.01468393611907959, 0.014548992156982422, 0.014618111610412597, 0.01464575958251953, 0.014619903564453125, 0.014664447784423829, 0.014673919677734374, 0.014796640396118164, 0.01468227195739746, 0.014929920196533204, 0.014632960319519044, 0.014648991584777832, 0.01456982421875, 0.014573568344116212, 0.014663680076599121, 0.014632896423339843, 0.014594112396240234, 0.014582880020141601, 0.014637984275817872, 0.014655488014221191, 0.01465664005279541, 0.015078399658203125, 0.014714431762695313, 0.014611935615539551, 0.014667648315429688, 0.01458790397644043, 0.014695327758789062, 0.014727456092834473, 0.014665504455566407, 0.014702591896057129, 0.014730879783630371, 0.014680447578430176, 0.014589952468872071, 0.014643487930297851, 0.014649056434631347, 0.014727168083190918, 0.01469644832611084, 0.014682111740112304, 0.014706687927246094, 0.014624544143676758, 0.014831616401672363, 0.014730208396911621, 0.014741503715515136, 0.014724767684936524, 0.014689888000488281, 0.014727295875549317, 0.014957183837890626, 0.01470809555053711, 0.014700672149658204, 0.01476863956451416, 0.01479916763305664, 0.01523475170135498, 0.014870176315307617, 0.014926176071166992, 0.014736800193786622, 0.014673983573913574, 0.014704895973205566, 0.014723360061645508, 0.014720128059387208, 0.015069664001464844, 0.014917759895324708, 0.015108384132385255, 0.014791999816894531, 0.014798912048339843, 0.014836352348327636, 0.014687487602233887, 0.014852864265441895, 0.01480294418334961, 0.014663680076599121, 0.014628864288330079, 0.014628288269042968, 0.014692416191101074, 0.014713376045227052, 0.01487664031982422, 0.014839936256408691, 0.014665599822998046, 0.014694399833679199, 0.014665727615356445, 0.014691840171813965, 0.014782976150512696, 0.01481935977935791, 0.014659487724304199, 0.014710335731506348, 0.014707200050354004, 0.01459404754638672, 0.01465334415435791, 0.01511843204498291, 0.01482652759552002, 0.014750656127929687, 0.01485580825805664, 0.014780832290649413, 0.014679231643676759, 0.0147542724609375, 0.014711135864257813, 0.014652576446533203, 0.014715423583984375, 0.014698816299438476, 0.014684160232543946, 0.014696576118469239, 0.014636672019958497, 0.014678272247314453, 0.014796192169189454, 0.015120991706848145, 0.01546656036376953, 0.015187359809875489, 0.0149302396774292, 0.015048447608947755, 0.01488700771331787, 0.014885343551635743, 0.014739456176757813, 0.014681568145751953, 0.01460860824584961, 0.014584128379821776, 0.014897151947021485, 0.014731072425842285, 0.01477552032470703, 0.014667872428894044, 0.0147010555267334, 0.01490777587890625, 0.014693440437316895, 0.014705120086669922, 0.014797280311584473, 0.014809087753295898, 0.014917183876037597, 0.015276288032531738, 0.01499289608001709, 0.014787263870239258, 0.015539551734924316, 0.01778755187988281, 0.015519743919372558, 0.014776320457458495, 0.014895071983337402, 0.014782496452331543, 0.014692352294921876, 0.014671872138977051, 0.014749695777893066, 0.01472697639465332, 0.014683903694152832, 0.014746047973632813, 0.01519820785522461, 0.01469983959197998, 0.014809632301330566, 0.014779840469360352, 0.01479139232635498, 0.014702591896057129, 0.014673919677734374, 0.014656671524047852, 0.01471564769744873, 0.01472707176208496, 0.014672063827514648, 0.015119744300842284, 0.015082112312316895, 0.014788607597351074, 0.014712575912475587, 0.014852352142333984, 0.014710559844970703, 0.014702176094055176, 0.015079104423522949, 0.015038463592529297, 0.014687359809875488, 0.014681920051574707, 0.014794015884399414, 0.01468489646911621, 0.014658687591552734, 0.014814080238342285, 0.014856191635131836, 0.014937984466552734, 0.014757375717163086, 0.014722751617431641, 0.015066047668457032, 0.014750847816467285, 0.01461952018737793, 0.01459404754638672, 0.014735360145568848, 0.014730527877807617, 0.014848608016967774, 0.015125696182250976, 0.014913951873779297, 0.014783007621765137, 0.01498259162902832, 0.014770751953125, 0.015138815879821778, 0.015023551940917968, 0.016580863952636717, 0.016404863357543945, 0.015110079765319823, 0.014892095565795899, 0.01483193588256836, 0.015051103591918946, 0.015292703628540039, 0.014703840255737304, 0.014749567985534668, 0.014644000053405761, 0.014759455680847168, 0.014762592315673829, 0.014968832015991211, 0.015050751686096191, 0.01470620822906494, 0.014612544059753417, 0.014690367698669434, 0.014712608337402344, 0.014612192153930665, 0.014715744018554687, 0.014743552207946778, 0.014728287696838378, 0.014719903945922852, 0.01491977596282959, 0.01506704044342041, 0.014935839653015136, 0.015369536399841309, 0.014918496131896972, 0.014718463897705078, 0.015339776039123535, 0.014767999649047852, 0.014821696281433105, 0.015124608039855957, 0.015025152206420898, 0.01480191993713379, 0.01478649616241455, 0.014687616348266602, 0.014731040000915527, 0.014723999977111817, 0.014757696151733399, 0.01494035243988037, 0.014726719856262206, 0.014757951736450196, 0.014858176231384277, 0.014704768180847167, 0.014727487564086914, 0.014962656021118164, 0.01484553623199463, 0.014786975860595703, 0.01475699234008789, 0.014899488449096679, 0.014897279739379883, 0.014789088249206543, 0.014858240127563477, 0.014880767822265625, 0.014774271965026856, 0.014749695777893066, 0.014723072052001953, 0.014702079772949218, 0.014688480377197265, 0.014767904281616211, 0.014966336250305176, 0.014889920234680176, 0.015042112350463868, 0.01531481647491455, 0.014983327865600585, 0.01514742374420166, 0.014841856002807617, 0.014904895782470702, 0.014888863563537597, 0.015196703910827636, 0.014999551773071289, 0.01470464038848877, 0.014673727989196778, 0.01463929557800293, 0.014675840377807618, 0.014787903785705566, 0.014777152061462403, 0.014993535995483398, 0.014759807586669922, 0.014551039695739745, 0.01469587230682373, 0.014703167915344238, 0.01485209560394287, 0.015103039741516113, 0.014960672378540039, 0.015010592460632324, 0.0147640323638916, 0.014640607833862306, 0.014639776229858398, 0.014710783958435059, 0.014773856163024903, 0.014629280090332031, 0.014735360145568848, 0.014667776107788086, 0.014784511566162109, 0.01493507194519043, 0.01462166404724121, 0.014671872138977051, 0.014796799659729003, 0.014661408424377442, 0.014702560424804688, 0.014624128341674805, 0.0148090238571167, 0.014683072090148925, 0.014669280052185058, 0.014708959579467773, 0.014761311531066895, 0.014689472198486328, 0.014790111541748047, 0.014873120307922363, 0.014700544357299805, 0.0147640323638916, 0.01468825626373291, 0.014669568061828613, 0.014659839630126953, 0.014712832450866698, 0.014643199920654297, 0.014722623825073243, 0.014659551620483399, 0.014628735542297364, 0.014715167999267578, 0.014686528205871583, 0.014632543563842774, 0.014666144371032714, 0.014654687881469727, 0.014635807991027832, 0.014630911827087402, 0.014721023559570312, 0.015084575653076172, 0.014773216247558594, 0.01469155216217041, 0.01477507209777832, 0.01466982364654541, 0.014735360145568848, 0.014698495864868164, 0.01527990436553955, 0.014732576370239258, 0.014726079940795899, 0.014654687881469727, 0.014756159782409668, 0.01479043197631836, 0.014762687683105468, 0.014680064201354981, 0.0148602876663208, 0.014804991722106933, 0.014725119590759277, 0.014796480178833008, 0.014729087829589843, 0.014690752029418946, 0.014825471878051758, 0.014854080200195312, 0.014673536300659179, 0.01470736026763916, 0.014780032157897948, 0.014764287948608398, 0.014671775817871094, 0.014731264114379883, 0.014790656089782715, 0.01472111988067627, 0.014710304260253906, 0.014829952239990235, 0.014716320037841797, 0.014742112159729004, 0.014753791809082031, 0.014892831802368164, 0.015126272201538086, 0.015077823638916016, 0.015091744422912598, 0.01495257568359375, 0.014948224067687988, 0.014907391548156738, 0.014872544288635254, 0.014878848075866698, 0.014788736343383789, 0.014872447967529297, 0.014742752075195313, 0.014779168128967285, 0.01460204792022705, 0.01468230438232422, 0.014610431671142577, 0.014695712089538574, 0.014678688049316406, 0.014669343948364258, 0.014643551826477051, 0.014600383758544921, 0.014827520370483398, 0.01459609603881836, 0.014698016166687011, 0.014633440017700195, 0.014583807945251465, 0.01464089584350586, 0.014704895973205566, 0.015078623771667481, 0.01504736042022705, 0.014864480018615723, 0.014800127983093261, 0.01464361572265625, 0.014682463645935059, 0.014636863708496093, 0.014825823783874512, 0.014767264366149902, 0.014744256019592285, 0.014841407775878905, 0.014785120010375977, 0.014831456184387207, 0.014732640266418457, 0.01470736026763916, 0.014714879989624024, 0.014730239868164062, 0.014707615852355957, 0.014675264358520507, 0.014635807991027832, 0.014733311653137206, 0.014774271965026856, 0.014855487823486328, 0.014703295707702636, 0.014728192329406739, 0.014655872344970703, 0.014623231887817383, 0.014604415893554688, 0.01467369556427002, 0.014705056190490723, 0.014767935752868652, 0.01473737621307373, 0.014743583679199219, 0.015027392387390136, 0.014891712188720704, 0.015210304260253906, 0.01563811206817627, 0.014886943817138672, 0.014805439949035645, 0.014782719612121582, 0.014687295913696288, 0.014713983535766601, 0.015320480346679688, 0.015183775901794434, 0.015125184059143067, 0.01496678352355957, 0.01495587158203125, 0.014879391670227051, 0.014899200439453125, 0.015009792327880859, 0.017104352951049805, 0.015395232200622559, 0.014792192459106445, 0.014868800163269042, 0.014770496368408203, 0.015015935897827149, 0.015674816131591798, 0.016312351226806642, 0.014946368217468262, 0.015055328369140626, 0.014984736442565918, 0.015106528282165528, 0.014891008377075195, 0.014927871704101562, 0.01556275177001953, 0.015281279563903809, 0.015017120361328126, 0.014738176345825195, 0.01463593578338623, 0.014647359848022462, 0.015914912223815917, 0.014717023849487304, 0.01547379207611084, 0.014780608177185059, 0.01478112030029297, 0.014693408012390137, 0.014664640426635742, 0.014716959953308106, 0.014716927528381347, 0.014733311653137206, 0.014640159606933593, 0.01467619228363037, 0.014680031776428223, 0.014713184356689453, 0.014993856430053711, 0.014791680335998534, 0.01481328010559082, 0.0146844482421875, 0.014867391586303711, 0.014716480255126953, 0.014591808319091798, 0.01465775966644287, 0.014661120414733888, 0.014701151847839355, 0.01466982364654541, 0.014695615768432617, 0.014711935997009278, 0.014687007904052734, 0.015067968368530273, 0.014706080436706542, 0.01479695987701416, 0.014688799858093262, 0.014696703910827637, 0.01465727996826172, 0.014972800254821778]",tokens/s,67.47363928221026,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 358, in __init__ self.v_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 478446 has 14.74 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 90.39 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.502976,13880.918016,0.0,13478.395904,13476.849152,s,1,7.4507392578125,7.4507392578125,0.0,7.4507392578125,7.4507392578125,7.4507392578125,7.4507392578125,[7.4507392578125],,kWh,8.675561329164339e-06,9.495999270887102e-07,3.1994470040264122e-06,1.2824608260279461e-05,,MB,1199.276032,14113.701888,0.0,13700.694016,13671.637504,s,10,12.471690307617187,1.2471690307617187,0.003799162042470298,1.2475398559570312,1.2511787353515624,1.2520418212890625,1.2527322900390625,"[1.238984619140625, 1.2431243896484374, 1.24744677734375, 1.24539111328125, 1.246914794921875, 1.2501849365234374, 1.2509869384765624, 1.2476329345703125, 1.248118896484375, 1.2529049072265626]",tokens/s,205.26487884617043,kWh,3.638427394292345e-05,4.012711230125099e-06,2.415799154860121e-05,6.455497672164976e-05,tokens/kWh,3965612.1495299903,MB,1247.41632,14115.79904,0.0,13702.791168,13671.640064,s,10,37.742427734375,3.7742427734375,0.0019473062255054348,3.77512548828125,3.7762343505859373,3.7763550903320313,3.776451682128906,"[3.77006103515625, 3.7753271484375, 3.7734580078125, 3.771849365234375, 3.77529248046875, 3.776475830078125, 3.77495849609375, 3.77620751953125, 3.773302734375, 3.7754951171875]",tokens/s,16.692089985144474,kWh,0.00011031098059624582,1.2167773649950273e-05,7.335591979580059e-05,0.00019583467404199668,tokens/kWh,321699.9252465867,,s,630,37.73923788452148,0.05990355219765315,0.00028402794498264347,0.05992238426208496,0.060189503479003904,0.060276842880249024,0.0610971081161499,"[0.06109894561767578, 0.059594753265380856, 0.059493438720703125, 0.05972787094116211, 0.059446208953857424, 0.05974806213378906, 0.05949673461914062, 0.059498462677001956, 0.05948828887939453, 0.059600990295410154, 0.05976166534423828, 0.0597262077331543, 0.05958710479736328, 0.05983590316772461, 0.05962351989746094, 0.05943926239013672, 0.0595786247253418, 0.05962694549560547, 0.05972188949584961, 0.05979971313476563, 0.05960319900512695, 0.05954531097412109, 0.05953113555908203, 0.059507102966308595, 0.05952511978149414, 0.059686622619628905, 0.059555198669433596, 0.05969807815551758, 0.059676673889160155, 0.05976620864868164, 0.0596231689453125, 0.059835201263427736, 0.060082176208496096, 0.060061153411865235, 0.05976694488525391, 0.05990848159790039, 0.059731201171875, 0.05997820663452148, 0.05980176162719727, 0.05984080123901367, 0.05968880081176758, 0.0597567367553711, 0.05995500946044922, 0.06007318496704102, 0.06001462554931641, 0.05997177505493164, 0.060055999755859374, 0.05998092651367187, 0.06025932693481445, 0.06019036865234375, 0.06008252716064453, 0.060038814544677736, 0.060139263153076175, 0.06003977584838867, 0.059926239013671875, 0.06007017517089844, 0.05994432067871094, 0.060050048828125, 0.059996158599853515, 0.060068992614746096, 0.06017932891845703, 0.06032793426513672, 0.0603422737121582, 0.06103548812866211, 0.05951846313476562, 0.059358848571777346, 0.05945798492431641, 0.059488094329833985, 0.05955958557128906, 0.059529216766357425, 0.05967993545532226, 0.059709182739257814, 0.059789310455322264, 0.05973526382446289, 0.05995727920532227, 0.05978310394287109, 0.05995503997802734, 0.05981513595581055, 0.05989503860473633, 0.060137248992919924, 0.06011116790771484, 0.059846206665039064, 0.05983523178100586, 0.059979774475097655, 0.059850753784179686, 0.059749824523925785, 0.05966291046142578, 0.05964534378051758, 0.05982166290283203, 0.05992940902709961, 0.059967681884765624, 0.06011084747314453, 0.060090110778808596, 0.0599738883972168, 0.05980979156494141, 0.05979340744018555, 0.05991600036621094, 0.05979923248291016, 0.0598902702331543, 0.05972991943359375, 0.05992169570922851, 0.05992659378051758, 0.059894046783447265, 0.05981561660766602, 0.05995135879516601, 0.05975247955322266, 0.060076446533203126, 0.060203006744384766, 0.060004127502441405, 0.05988345718383789, 0.059912479400634766, 0.059952735900878906, 0.06031727981567383, 0.05998675155639648, 0.060098560333251956, 0.059977886199951175, 0.06023891067504883, 0.059996543884277345, 0.060080257415771485, 0.06003740692138672, 0.06020710372924805, 0.0600489616394043, 0.060357246398925785, 0.06002259063720703, 0.06016409683227539, 0.06016339111328125, 0.060915328979492187, 0.05965020751953125, 0.05939420700073242, 0.059300350189208983, 0.05932838439941406, 0.05946963119506836, 0.05949187088012695, 0.059644607543945315, 0.05972137451171875, 0.05955644989013672, 0.05964147186279297, 0.05957392120361328, 0.059544288635253906, 0.059643905639648435, 0.05959203338623047, 0.059959232330322264, 0.05979209518432617, 0.059891841888427735, 0.05994873428344726, 0.05996563339233398, 0.059870433807373044, 0.05997647857666016, 0.06003254318237305, 0.059974113464355466, 0.059957248687744144, 0.05997318267822266, 0.05994921493530273, 0.06000668716430664, 0.05968387222290039, 0.05968361663818359, 0.05981203079223633, 0.05983404922485352, 0.0600291519165039, 0.05994230270385742, 0.05978134536743164, 0.059967071533203124, 0.05986352157592773, 0.0597672004699707, 0.05966998291015625, 0.05972431945800781, 0.06000751876831055, 0.06014838409423828, 0.05999027252197266, 0.05997910308837891, 0.05990198516845703, 0.060166782379150394, 0.05999196624755859, 0.05995276641845703, 0.06001081466674805, 0.060291103363037106, 0.06030316925048828, 0.06010070419311524, 0.060049633026123046, 0.06027779388427734, 0.060275680541992185, 0.06011651229858399, 0.06002719879150391, 0.060029216766357425, 0.060036991119384764, 0.05999411010742187, 0.05996054458618164, 0.060022655487060546, 0.06005203247070313, 0.06111276626586914, 0.059845664978027346, 0.05966864013671875, 0.05946361541748047, 0.05950886535644531, 0.05951974487304688, 0.059428863525390625, 0.059643905639648435, 0.05948361587524414, 0.059530975341796875, 0.05959916687011719, 0.05969884872436523, 0.05976684951782227, 0.05966080093383789, 0.059674751281738284, 0.05980995178222656, 0.059815902709960934, 0.059837535858154295, 0.05954764938354492, 0.05954246520996094, 0.05938995361328125, 0.05952864074707031, 0.059531841278076175, 0.05949849700927735, 0.059557697296142575, 0.05961651229858399, 0.05965100860595703, 0.059834369659423826, 0.05986873626708984, 0.05973180770874023, 0.05974486541748047, 0.059865215301513675, 0.05989161682128906, 0.05993878555297852, 0.05983571243286133, 0.059927070617675784, 0.05992464065551758, 0.06002483367919922, 0.059891841888427735, 0.05995302581787109, 0.0599818229675293, 0.059998207092285157, 0.05978521728515625, 0.060080127716064455, 0.059925758361816406, 0.06006617736816406, 0.06007251358032226, 0.060174144744873044, 0.0600857925415039, 0.060252254486083984, 0.06010249710083008, 0.06022588729858398, 0.06011103820800781, 0.06016409683227539, 0.060131072998046875, 0.060006752014160156, 0.06022339248657226, 0.06010863876342774, 0.0601800651550293, 0.0601196174621582, 0.06001663970947266, 0.060171585083007816, 0.060226047515869144, 0.06134982299804687, 0.0599222412109375, 0.059566944122314454, 0.05955379104614258, 0.059660255432128904, 0.05996550369262695, 0.05982015991210937, 0.05962736129760742, 0.05959065628051758, 0.0599890251159668, 0.05981078338623047, 0.059666431427001954, 0.05954313659667969, 0.05977945709228515, 0.05980729675292969, 0.05980748748779297, 0.05985353469848633, 0.06013132858276367, 0.05991424179077148, 0.05998591995239258, 0.05969286346435547, 0.05973215866088867, 0.05960812759399414, 0.059607200622558594, 0.059786014556884766, 0.05972732925415039, 0.059700801849365236, 0.05978403091430664, 0.059883392333984375, 0.059971839904785156, 0.06010879898071289, 0.05979353713989258, 0.0600021743774414, 0.059954975128173826, 0.06004553604125976, 0.05997516632080078, 0.05989836883544922, 0.059891422271728514, 0.059719680786132816, 0.059828510284423826, 0.05963081741333008, 0.059773311614990235, 0.059695518493652344, 0.05982015991210937, 0.05998579025268555, 0.06000230407714844, 0.05991027069091797, 0.05997907257080078, 0.05988521575927734, 0.06030140686035156, 0.060197696685791016, 0.06005929565429687, 0.05993711853027344, 0.060375038146972655, 0.06012723159790039, 0.06000620651245117, 0.06018886566162109, 0.06011078262329102, 0.060024894714355466, 0.06016582489013672, 0.060264766693115236, 0.06019596862792969, 0.060232574462890626, 0.06141689682006836, 0.05968905639648438, 0.05938665771484375, 0.05960294342041016, 0.05948825454711914, 0.059545600891113284, 0.059510784149169924, 0.05944313430786133, 0.059455551147460935, 0.059722881317138675, 0.05962364959716797, 0.0595577278137207, 0.05962630462646484, 0.05977702331542969, 0.059650047302246094, 0.059868415832519534, 0.060322559356689454, 0.05986713409423828, 0.05997772979736328, 0.05999411010742187, 0.05978319931030274, 0.05992547225952148, 0.05988224029541016, 0.0599738883972168, 0.0599736328125, 0.05995110321044922, 0.05980508804321289, 0.06009724807739258, 0.05989468765258789, 0.05993539047241211, 0.0601170883178711, 0.06006099319458008, 0.06018940734863281, 0.06032608032226563, 0.059989280700683593, 0.05994160079956055, 0.05973606491088867, 0.06004713439941406, 0.0598694076538086, 0.059968769073486326, 0.05995596694946289, 0.060098560333251956, 0.06009148788452148, 0.06012540817260742, 0.0599354248046875, 0.05993203353881836, 0.06014371109008789, 0.06022780990600586, 0.06004108810424805, 0.060246463775634765, 0.06027225494384766, 0.060327678680419924, 0.059957248687744144, 0.05992252731323242, 0.05988406372070312, 0.059930622100830076, 0.05999526214599609, 0.06001103973388672, 0.05994847869873047, 0.060048095703125, 0.05996326446533203, 0.06005120086669922, 0.06003564834594727, 0.06113750457763672, 0.05970534515380859, 0.05958860778808594, 0.05949235153198242, 0.05940224075317383, 0.05970684814453125, 0.05947401428222656, 0.05951887893676758, 0.0596894416809082, 0.05957043075561524, 0.05951398468017578, 0.059562686920166016, 0.05954681777954102, 0.05967340850830078, 0.05963776016235352, 0.06003839874267578, 0.06004198455810547, 0.060254207611083986, 0.05994598388671875, 0.05973030471801758, 0.05952166366577148, 0.059539615631103514, 0.059498336791992186, 0.05960908889770508, 0.05959270477294922, 0.05969027328491211, 0.0598422737121582, 0.05968998336791992, 0.05959884643554687, 0.059848705291748044, 0.059850753784179686, 0.06004844665527344, 0.06021011352539062, 0.06006579208374024, 0.05995929718017578, 0.05999599838256836, 0.05985910415649414, 0.06011888122558594, 0.059964702606201174, 0.05998579025268555, 0.05998470306396484, 0.060166336059570315, 0.06006579208374024, 0.060214561462402345, 0.06004780960083008, 0.06007222366333008, 0.060088321685791014, 0.059957248687744144, 0.060114944458007816, 0.060278785705566405, 0.06020048141479492, 0.060472896575927734, 0.06058860778808594, 0.06013167953491211, 0.059998207092285157, 0.060006401062011716, 0.05997158432006836, 0.060247169494628904, 0.06002694320678711, 0.060193374633789064, 0.06001187133789063, 0.06016630554199219, 0.05994979095458984, 0.06126406478881836, 0.0599054069519043, 0.05942537689208984, 0.05946108627319336, 0.059496990203857424, 0.05966233444213867, 0.05968812942504883, 0.059476222991943356, 0.059709087371826175, 0.05975875091552734, 0.05967539215087891, 0.059772926330566405, 0.059774974822998046, 0.059676639556884764, 0.05974428939819336, 0.05975584030151367, 0.0597982063293457, 0.06006492614746094, 0.059855712890625, 0.05970524978637695, 0.059633758544921874, 0.06005350494384765, 0.05961536026000976, 0.059716991424560543, 0.059673088073730465, 0.05976063919067383, 0.05971913528442383, 0.05982227325439453, 0.05992892837524414, 0.059979774475097655, 0.060212287902832035, 0.05997580718994141, 0.060240287780761716, 0.06006140899658203, 0.059826881408691406, 0.05980108642578125, 0.059755008697509764, 0.05983001708984375, 0.059735553741455075, 0.060033790588378905, 0.05998591995239258, 0.06013951873779297, 0.060010337829589845, 0.05987548828125, 0.05984179306030273, 0.06019094467163086, 0.060196609497070314, 0.060064319610595704, 0.059934337615966796, 0.06007366561889648, 0.05996227264404297, 0.06000230407714844, 0.05990195083618164, 0.06016614532470703, 0.06019071960449219, 0.05999539184570313, 0.06012390518188476, 0.060346561431884764, 0.060167999267578126, 0.060056735992431644, 0.06024585723876953, 0.060431198120117186, 0.06088719940185547, 0.0613969612121582, 0.05963708877563476, 0.05941075134277344, 0.0597158088684082, 0.05952524948120117, 0.05977702331542969, 0.059428863525390625, 0.05945663833618164, 0.059488704681396484, 0.059574432373046875, 0.05960086441040039, 0.05959222412109375, 0.0595300178527832, 0.05961286544799805, 0.05960326385498047, 0.059796703338623046, 0.05993552017211914, 0.05997568130493164, 0.05976662445068359, 0.05980316925048828, 0.05976678466796875, 0.059891937255859375, 0.059728321075439454, 0.059671680450439454, 0.059738975524902344, 0.06020463943481445, 0.05989007949829102, 0.059834369659423826, 0.06000435256958008, 0.06010617446899414, 0.06005817413330078, 0.06005964660644531, 0.059842399597167965, 0.05999631881713867, 0.06008550262451172, 0.06011331176757812, 0.05995759963989258, 0.06004326248168945, 0.059967071533203124, 0.05988140869140625, 0.059816097259521486, 0.05993094253540039, 0.0600002555847168, 0.05996467208862305, 0.05999033737182617, 0.06006419372558594, 0.06002467346191406, 0.06016630554199219, 0.05996271896362305, 0.05998041534423828, 0.05993475341796875, 0.06009036636352539, 0.05992652893066406, 0.0599101448059082, 0.059775104522705076, 0.05978713607788086, 0.05980883026123047, 0.06000547027587891, 0.05997676849365234, 0.05998995208740234, 0.06000252914428711, 0.060093055725097655, 0.060316864013671874, 0.06109260940551758, 0.05989897537231445, 0.05969398498535156, 0.059582015991210935, 0.05961363220214844, 0.05954732894897461, 0.059482433319091796, 0.059581535339355465, 0.0595640640258789, 0.059543903350830076, 0.05975913619995117, 0.05977500915527344, 0.05960905456542969, 0.059842559814453126, 0.05977088165283203, 0.05987526321411133, 0.06000044631958008, 0.06001855850219726, 0.06000844955444336, 0.059756542205810545, 0.05963961410522461, 0.059928768157958986, 0.059555774688720704, 0.05967603302001953, 0.05967350387573242, 0.05972150421142578, 0.05968387222290039, 0.059687454223632815, 0.05986732864379883, 0.059786495208740235, 0.05983334350585937, 0.0599054069519043, 0.059853439331054685, 0.05992854309082031, 0.060017822265625, 0.05984947204589844, 0.05977920150756836, 0.0600002555847168, 0.05979545593261719, 0.060044830322265624, 0.059850719451904295, 0.06018304061889648, 0.06005670547485351, 0.06009945678710937, 0.0598928337097168, 0.06013430404663086, 0.059911392211914063, 0.06021206283569336, 0.06012268829345703, 0.06012873458862305, 0.059972545623779294, 0.06017798233032227, 0.06006816101074219, 0.06035670471191406, 0.06010060882568359, 0.06013951873779297, 0.06052067184448242, 0.060171646118164064, 0.060115486145019534, 0.060188545227050784, 0.05996134567260742, 0.06055526351928711, 0.060034271240234374]",tokens/s,16.693500857853586,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 344.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 66.12 MiB is free. Process 481571 has 14.67 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 1.71 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 344.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 66.12 MiB is free. Process 480798 has 14.67 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 1.71 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 344.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 66.12 MiB is free. Process 480420 has 14.67 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 1.71 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 270.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 98.12 MiB is free. Process 473886 has 14.64 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 1.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 357, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 475787 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.629952,13880.918016,0.0,13478.395904,13476.849152,s,1,7.44209521484375,7.44209521484375,0.0,7.44209521484375,7.44209521484375,7.44209521484375,7.44209521484375,[7.44209521484375],,kWh,8.533166512552271e-06,9.292595227919346e-07,4.431947989991247e-06,1.3894374025335453e-05,,MB,1301.487616,14115.79904,0.0,13700.694016,13671.637504,s,10,2.014640563964844,0.20146405639648438,0.0029224045882831192,0.20157713317871095,0.20514818115234376,0.2053638702392578,0.20553642150878906,"[0.19455564880371093, 0.2011693115234375, 0.20166236877441407, 0.20557955932617186, 0.2014918975830078, 0.20251881408691405, 0.20047529602050781, 0.19947071838378908, 0.20510025024414064, 0.20261669921875]",tokens/s,1270.6981313638798,kWh,5.922008150915644e-06,6.530836137094912e-07,3.9255031403995225e-06,1.0500594905024657e-05,tokens/kWh,24379571.09244363,MB,1344.43008,14117.896192,0.0,13702.791168,13671.640064,s,10,37.266224609374994,3.7266224609375,0.0034248930075563787,3.7261666259765622,3.7312097412109373,3.7315943725585936,3.7319020776367187,"[3.721352783203125, 3.721993408203125, 3.724694580078125, 3.72783544921875, 3.725023193359375, 3.72577099609375, 3.731124267578125, 3.726562255859375, 3.729888671875, 3.73197900390625]",tokens/s,16.905388367178787,kWh,0.0001089647369836657,1.2019017154273896e-05,7.247764131540325e-05,0.00019346139545334287,tokens/kWh,325646.3639806306,,s,630,37.26310935211176,0.05914779262239971,0.0005291504965589355,0.05905755233764648,0.05940442314147949,0.059623657989501956,0.06256688827514649,"[0.062236351013183595, 0.05933628845214844, 0.058813121795654295, 0.058745887756347655, 0.058747550964355466, 0.05874700927734375, 0.058901790618896485, 0.05887263870239258, 0.05884112167358398, 0.058950817108154294, 0.058764190673828126, 0.05880976104736328, 0.05898080062866211, 0.059053569793701174, 0.05890115356445313, 0.05901107025146484, 0.05920934295654297, 0.059629184722900394, 0.05928160095214844, 0.05898092651367187, 0.058834945678710934, 0.05877145767211914, 0.05893856048583984, 0.05884105682373047, 0.05872438430786133, 0.05883168029785156, 0.05873833465576172, 0.05884143829345703, 0.059146495819091795, 0.059049503326416015, 0.05894371032714844, 0.059053054809570314, 0.059145088195800784, 0.059213985443115236, 0.05913091278076172, 0.059261886596679685, 0.05924761581420898, 0.05881753540039063, 0.05894553756713867, 0.05889011383056641, 0.05911779022216797, 0.05903964614868164, 0.058966014862060545, 0.05905363082885742, 0.059199840545654296, 0.0590623664855957, 0.05915238571166992, 0.05910742568969726, 0.059147232055664065, 0.0592158088684082, 0.05909401702880859, 0.05913737487792969, 0.059275936126708985, 0.05932463836669922, 0.05903747177124023, 0.059230209350585934, 0.059053886413574216, 0.05895775985717774, 0.05889049530029297, 0.058955806732177735, 0.058912734985351566, 0.05885337448120117, 0.05900896072387695, 0.06227558517456055, 0.05934921646118164, 0.059057952880859375, 0.05886886215209961, 0.05882281494140625, 0.05885779190063477, 0.058939807891845705, 0.05883055877685547, 0.05907894515991211, 0.0590458869934082, 0.05909299087524414, 0.058851329803466794, 0.058761215209960936, 0.058877952575683595, 0.05878169631958008, 0.059121662139892575, 0.059289600372314455, 0.05937676620483399, 0.05914662551879883, 0.05917753601074219, 0.05897004699707031, 0.05925068664550781, 0.05894313430786133, 0.058986335754394534, 0.05902182388305664, 0.059071807861328124, 0.058974910736083984, 0.05889225769042969, 0.05899849700927735, 0.05881497573852539, 0.058781505584716794, 0.05884928131103516, 0.05900006484985352, 0.059061119079589844, 0.059229278564453126, 0.05929158401489258, 0.05916339111328125, 0.059084320068359376, 0.05902598571777344, 0.05890361785888672, 0.05912198257446289, 0.05908924865722656, 0.05884118270874023, 0.05895507049560547, 0.05886819076538086, 0.05883100891113281, 0.058943008422851564, 0.05887599945068359, 0.05895145416259766, 0.05908675384521484, 0.05944416046142578, 0.05944518280029297, 0.05937113571166992, 0.059178592681884766, 0.05910947036743164, 0.05910134506225586, 0.05895193481445313, 0.0588163833618164, 0.058802654266357425, 0.05883699035644531, 0.0591192626953125, 0.05911177444458008, 0.0589252815246582, 0.06318867111206054, 0.05991251373291016, 0.05905408096313477, 0.05914575958251953, 0.05892051315307617, 0.05896636962890625, 0.059191871643066406, 0.05912508773803711, 0.059126430511474606, 0.05909228897094727, 0.058980159759521485, 0.05886592102050781, 0.058771553039550783, 0.05883132934570313, 0.05894089508056641, 0.059007583618164064, 0.05938294219970703, 0.05941660690307617, 0.05929248046875, 0.05913190460205078, 0.05902684783935547, 0.05881507110595703, 0.05873644638061523, 0.05870390319824219, 0.05871142578125, 0.05867731094360352, 0.058802913665771485, 0.05874393463134766, 0.05877030563354492, 0.05892419052124023, 0.05885558319091797, 0.05883475112915039, 0.05939804840087891, 0.0593678092956543, 0.059316673278808595, 0.0590843505859375, 0.05892156982421875, 0.05882255935668945, 0.058990497589111325, 0.058904769897460935, 0.05875436782836914, 0.05899852752685547, 0.058829761505126955, 0.05877660751342773, 0.05922208023071289, 0.05933334350585937, 0.05913212966918945, 0.059053665161132814, 0.05917724609375, 0.059168094635009764, 0.05924121475219726, 0.05916579055786133, 0.05930486297607422, 0.05916262435913086, 0.05901311874389648, 0.05908684921264649, 0.05943296051025391, 0.05920153427124023, 0.059066368103027345, 0.05913600158691406, 0.05893500900268555, 0.059259166717529295, 0.05913708877563477, 0.06314944076538086, 0.06012169647216797, 0.05912799835205078, 0.058939552307128905, 0.05897552108764648, 0.05873443222045899, 0.05879241561889648, 0.05897052764892578, 0.059012096405029295, 0.05890969467163086, 0.058961185455322265, 0.0588520622253418, 0.05887500762939453, 0.058847774505615236, 0.05875132751464844, 0.05903769683837891, 0.05931753540039063, 0.05941263961791992, 0.05916048049926758, 0.05901583862304687, 0.05896384048461914, 0.05892108917236328, 0.05871820831298828, 0.05892403030395508, 0.05909158325195313, 0.059130016326904296, 0.05922623825073242, 0.05888380813598633, 0.05906470489501953, 0.059006431579589846, 0.058831390380859376, 0.058763454437255856, 0.059254592895507815, 0.059404193878173826, 0.059418750762939454, 0.05944316864013672, 0.05914419174194336, 0.05908639907836914, 0.059203136444091795, 0.059096031188964844, 0.05923836898803711, 0.05906016159057617, 0.05905609512329101, 0.05902953720092773, 0.05916175842285156, 0.059074527740478514, 0.058915969848632815, 0.05909171295166016, 0.059189823150634764, 0.05921334457397461, 0.059347774505615236, 0.059571937561035154, 0.05979340744018555, 0.05926505661010742, 0.05928188705444336, 0.05907798385620117, 0.05916726303100586, 0.059326400756835936, 0.059193408966064454, 0.0590184326171875, 0.059173694610595705, 0.05897942352294922, 0.05881296157836914, 0.06259478378295899, 0.05961548614501953, 0.059053569793701174, 0.0588163833618164, 0.058839710235595706, 0.05883699035644531, 0.05897180938720703, 0.05908720016479492, 0.058874881744384766, 0.05881923294067383, 0.05901724624633789, 0.05873491287231445, 0.05899673461914062, 0.05886771011352539, 0.05886886215209961, 0.05929049682617187, 0.059731742858886716, 0.05977695846557617, 0.05938556671142578, 0.05925126266479492, 0.05895577621459961, 0.059195327758789065, 0.05877561569213867, 0.059098751068115234, 0.05898892974853515, 0.058966014862060545, 0.0589147834777832, 0.05882064056396484, 0.05894937515258789, 0.05876099014282227, 0.05873712158203125, 0.05889238357543945, 0.059221920013427735, 0.05962720108032227, 0.05946716690063476, 0.059160991668701174, 0.05931673431396484, 0.059209312438964844, 0.05891727828979492, 0.05882265472412109, 0.05887334442138672, 0.0590832633972168, 0.05900406265258789, 0.059020030975341795, 0.05912960052490234, 0.05905561447143555, 0.058948448181152344, 0.058948833465576174, 0.059171104431152345, 0.059275104522705076, 0.05944591903686523, 0.05956198501586914, 0.059294910430908204, 0.05917161560058594, 0.05911308670043945, 0.05905596923828125, 0.058919486999511717, 0.05882265472412109, 0.05883903884887695, 0.058775230407714846, 0.05892323303222656, 0.058929248809814455, 0.059085857391357424, 0.06313340759277344, 0.0600497932434082, 0.05915369415283203, 0.05897894287109375, 0.05892300796508789, 0.058826751708984375, 0.0588939208984375, 0.059077022552490234, 0.059009025573730466, 0.05924249649047852, 0.05897420883178711, 0.05893734359741211, 0.05878988647460937, 0.05897625732421875, 0.058842784881591795, 0.059100574493408206, 0.059653057098388675, 0.059737438201904296, 0.05940291213989258, 0.0591847038269043, 0.05918489456176758, 0.05894009780883789, 0.05884511947631836, 0.058800193786621095, 0.058714111328125, 0.0588163833618164, 0.05891494369506836, 0.058810367584228515, 0.05883484649658203, 0.059027553558349606, 0.058921119689941404, 0.058936256408691406, 0.05921814346313477, 0.05923641586303711, 0.059343486785888674, 0.05931827163696289, 0.05923350524902344, 0.059069217681884764, 0.058832672119140624, 0.058753246307373046, 0.05881430435180664, 0.05881052780151367, 0.058828800201416016, 0.05886518478393555, 0.0588251838684082, 0.05878707122802734, 0.059019584655761716, 0.05914054489135742, 0.05923638534545898, 0.05920560073852539, 0.05955379104614258, 0.05950070571899414, 0.05924985504150391, 0.05931280136108399, 0.059099136352539064, 0.05921529769897461, 0.05895840072631836, 0.05899017715454102, 0.05911145782470703, 0.059003166198730465, 0.05893657684326172, 0.059016033172607424, 0.05934188842773438, 0.06328556823730469, 0.06006579208374024, 0.05921712112426758, 0.05888079833984375, 0.05898448181152344, 0.05914182281494141, 0.059064128875732425, 0.05904431915283203, 0.05901311874389648, 0.05879510498046875, 0.059056640625, 0.05884560012817383, 0.058845184326171876, 0.0589087028503418, 0.05878271865844727, 0.059482433319091796, 0.05971420669555664, 0.05990607833862305, 0.05953532791137695, 0.05917491149902344, 0.05893865585327149, 0.058843521118164065, 0.05893155288696289, 0.058912769317626956, 0.05888191986083984, 0.058951423645019534, 0.05882822418212891, 0.059014110565185546, 0.059027423858642576, 0.05894863891601562, 0.059133056640625, 0.05921571350097656, 0.0590398063659668, 0.05938988876342773, 0.05946499252319336, 0.059404769897460935, 0.05929177474975586, 0.059234622955322266, 0.0591861457824707, 0.0590379524230957, 0.05914643096923828, 0.059170528411865236, 0.05906911849975586, 0.05908473587036133, 0.05917279815673828, 0.05918527984619141, 0.05908620834350586, 0.05899318313598633, 0.05920512008666992, 0.05921791839599609, 0.059406784057617186, 0.05936111831665039, 0.059203041076660155, 0.05932118225097656, 0.05941862487792969, 0.05933583831787109, 0.059068862915039065, 0.059087265014648435, 0.05900812911987305, 0.059248897552490236, 0.059173568725585934, 0.059227294921875, 0.0591794548034668, 0.06249859237670898, 0.05957804870605469, 0.058963775634765625, 0.05897750473022461, 0.05902924728393555, 0.05918310546875, 0.05905715179443359, 0.05910015869140625, 0.05893020629882813, 0.05915235137939453, 0.059169792175292966, 0.059154430389404294, 0.05892505645751953, 0.05885491180419922, 0.058947391510009765, 0.0591366081237793, 0.05954703903198242, 0.059681407928466795, 0.05920979309082031, 0.05914809417724609, 0.05893164825439453, 0.05909795379638672, 0.059009952545166014, 0.05891446304321289, 0.058955936431884765, 0.05932870483398438, 0.05904793548583984, 0.05889228820800781, 0.059154464721679685, 0.058985759735107425, 0.059035743713378906, 0.05901987075805664, 0.05919539260864258, 0.05950054550170898, 0.05935257720947266, 0.05928102493286133, 0.05921843338012695, 0.05914393615722656, 0.05900672149658203, 0.05889318466186524, 0.058875423431396484, 0.05910297775268555, 0.058983135223388675, 0.059009025573730466, 0.058894336700439455, 0.05895766448974609, 0.059041793823242185, 0.059111583709716795, 0.05909097671508789, 0.059230175018310544, 0.05928550338745117, 0.05976678466796875, 0.05935078430175781, 0.05915468978881836, 0.058996318817138675, 0.058915233612060545, 0.05889948654174805, 0.05873491287231445, 0.059187393188476566, 0.0589150390625, 0.05886387252807617, 0.05884710311889649, 0.058874015808105466, 0.06305587387084961, 0.05977088165283203, 0.05918239974975586, 0.059160415649414065, 0.05908307266235351, 0.05921775817871094, 0.05903839874267578, 0.05890166473388672, 0.05900694274902344, 0.05898329544067383, 0.0587407341003418, 0.058879966735839846, 0.059023231506347654, 0.058848865509033205, 0.0590032958984375, 0.0592911376953125, 0.05979996871948242, 0.05963801574707031, 0.05940003204345703, 0.059184417724609376, 0.0591512336730957, 0.05915615844726563, 0.05919996643066406, 0.05888911819458008, 0.05881951904296875, 0.0588076171875, 0.05878239822387695, 0.05883676910400391, 0.058783775329589845, 0.058869632720947265, 0.05894713592529297, 0.059046783447265626, 0.0595145263671875, 0.05954377746582031, 0.05941641616821289, 0.059348480224609375, 0.05932099151611328, 0.05916579055786133, 0.05890550231933594, 0.05888528060913086, 0.05885164642333984, 0.0588825912475586, 0.05901311874389648, 0.05892505645751953, 0.05892019271850586, 0.058902881622314454, 0.05920195388793945, 0.059087871551513675, 0.05932502365112305, 0.05947155380249024, 0.05975868988037109, 0.059394176483154294, 0.05939865493774414, 0.05938995361328125, 0.0590970573425293, 0.05927743911743164, 0.059287456512451174, 0.0592710075378418, 0.05947158432006836, 0.05897027206420898, 0.05897859191894531, 0.058924129486083984, 0.05916332626342773, 0.06317670440673828, 0.06004470443725586, 0.0593370246887207, 0.05896547317504883, 0.05898432159423828, 0.05906227111816406, 0.059001407623291015, 0.05918726348876953, 0.05912790298461914, 0.05913417434692383, 0.05899788665771484, 0.05901171112060547, 0.05903945541381836, 0.059265247344970705, 0.05892313766479492, 0.059216064453125, 0.059619327545166016, 0.059770782470703124, 0.059695201873779295, 0.05915340805053711, 0.05890268707275391, 0.058757408142089844, 0.05880889511108398, 0.05890572738647461, 0.05891775894165039, 0.058965217590332034, 0.05893164825439453, 0.05888035202026367, 0.058838623046875, 0.05891097640991211, 0.05897190475463867, 0.05910902404785156, 0.05904374313354492, 0.05930054473876953, 0.05937372970581055, 0.05929574584960937, 0.059364479064941404, 0.05924563217163086, 0.059063201904296876, 0.05913692855834961, 0.059268447875976564, 0.05921859359741211, 0.05917695999145508, 0.05924863815307617, 0.05922313690185547, 0.05921590423583984, 0.059183937072753906, 0.05894662475585938, 0.05921046447753906, 0.05936675262451172, 0.059261695861816406, 0.05940438461303711, 0.059474113464355466, 0.059500446319580076, 0.05928732681274414, 0.059248863220214845, 0.059361278533935545, 0.05910678482055664, 0.05934150314331055, 0.059141281127929685, 0.05911344146728516, 0.05899529647827149, 0.058982528686523435]",tokens/s,16.906801685466323,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 357, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 474649 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 358, in __init__ self.v_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 478833 has 14.74 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 90.39 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 357, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 474286 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 466, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 358, in __init__ self.v_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 479249 has 14.74 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 90.39 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.302272,13880.918016,0.0,13478.395904,13476.849152,s,1,7.47723583984375,7.47723583984375,0.0,7.47723583984375,7.47723583984375,7.47723583984375,7.47723583984375,[7.47723583984375],,kWh,8.356690474935627e-06,9.140224576802741e-07,4.39417018199828e-06,1.3664883114614182e-05,,MB,1313.103872,14117.896192,0.0,13702.791168,13671.637504,s,10,2.070696838378906,0.20706968383789062,0.002858703480905049,0.2068339309692383,0.2103961486816406,0.21114521026611327,0.2117444595336914,"[0.20096687316894532, 0.2057232666015625, 0.20559382629394532, 0.21189427185058593, 0.20810124206542968, 0.2102296905517578, 0.2055417938232422, 0.20664309692382812, 0.20897801208496095, 0.20702476501464845]",tokens/s,1236.2987920550245,kWh,6.0499221190465976e-06,6.671960034602054e-07,4.0029623860417225e-06,1.0720080508548527e-05,tokens/kWh,23880417.669984624,MB,1368.6784,14119.993344,0.0,13702.791168,13671.640064,s,10,37.2264951171875,3.7226495117187497,0.0032393016081026156,3.7232304687499997,3.7265671386718746,3.7266802978515625,3.7267708251953127,"[3.719655517578125, 3.720308837890625, 3.715724365234375, 3.723212646484375, 3.721630126953125, 3.724402587890625, 3.724977294921875, 3.72679345703125, 3.7265419921875, 3.723248291015625]",tokens/s,16.92343042278854,kWh,0.00010888612153637406,1.200907676539946e-05,7.24178209727548e-05,0.00019331301927452831,tokens/kWh,325896.3117767678,,s,630,37.22319746398923,0.05908444041903057,0.0004866215369891233,0.05903854560852051,0.05939424514770508,0.05953241024017334,0.06205093574523926,"[0.06186201477050781, 0.05940374374389648, 0.05869107055664063, 0.058590015411376956, 0.05859542465209961, 0.058797534942626954, 0.05857497787475586, 0.058672958374023435, 0.058525856018066404, 0.05876995086669922, 0.058584991455078124, 0.058697601318359376, 0.05894710540771484, 0.05884364700317383, 0.05865420913696289, 0.058744960784912106, 0.059270816802978514, 0.05907459259033203, 0.058818912506103514, 0.059084640502929685, 0.05866092681884766, 0.05908124923706055, 0.058601150512695314, 0.05854550552368164, 0.0590241584777832, 0.05897030258178711, 0.059117568969726565, 0.058896190643310545, 0.05905337524414062, 0.05923724746704102, 0.058789695739746094, 0.058767425537109376, 0.05912160110473633, 0.05944128036499023, 0.05946582412719727, 0.05911257553100586, 0.059086910247802736, 0.05893199920654297, 0.059148193359375, 0.058997856140136716, 0.059167648315429686, 0.05922022247314453, 0.05888723373413086, 0.058794849395751955, 0.0588656005859375, 0.058864673614501956, 0.05873968124389648, 0.05915824127197265, 0.0591956787109375, 0.05936323165893555, 0.0593875846862793, 0.059343265533447265, 0.059444286346435546, 0.05928643035888672, 0.059426849365234374, 0.05932559967041016, 0.05898665618896484, 0.058929729461669925, 0.05909312057495117, 0.05905545425415039, 0.059259552001953125, 0.059211105346679685, 0.05907932662963867, 0.062047550201416016, 0.05919606399536133, 0.05869772720336914, 0.05877145767211914, 0.05875228881835937, 0.058718238830566406, 0.05882694244384765, 0.058863712310791017, 0.05857254409790039, 0.05870185470581055, 0.05881919860839844, 0.05863772964477539, 0.058874462127685545, 0.058791454315185544, 0.058792129516601566, 0.0587262077331543, 0.05912396621704102, 0.05907462310791016, 0.058996894836425784, 0.05902905654907226, 0.059189697265625, 0.05875203323364258, 0.058899070739746096, 0.058687744140625, 0.05876950454711914, 0.05885337448120117, 0.05898854446411133, 0.05882463836669922, 0.05873465728759766, 0.05909635162353516, 0.058885921478271486, 0.058795169830322264, 0.05893312072753906, 0.05891635131835937, 0.058990814208984374, 0.05910358428955078, 0.058947425842285156, 0.05899059295654297, 0.059125152587890625, 0.05917468643188477, 0.05912022399902344, 0.05906249618530274, 0.05888719940185547, 0.05881472015380859, 0.05893791961669922, 0.05920988845825195, 0.0593713264465332, 0.05899196624755859, 0.05921376037597656, 0.05908572769165039, 0.05908070373535156, 0.059170814514160154, 0.05931372833251953, 0.05940444946289063, 0.05925091171264649, 0.059334720611572266, 0.05951232147216797, 0.05939235305786133, 0.05939206314086914, 0.059308128356933595, 0.059170814514160154, 0.05915644836425781, 0.059144222259521484, 0.06264425659179687, 0.05970899200439453, 0.05877414321899414, 0.05844150543212891, 0.05841715240478516, 0.058506271362304685, 0.05865280151367187, 0.05858390426635742, 0.05856175994873047, 0.05884787368774414, 0.05855401611328125, 0.058956287384033204, 0.05916780853271485, 0.05875807952880859, 0.058640350341796876, 0.0588636474609375, 0.058988574981689454, 0.05907043075561524, 0.0591295051574707, 0.05892559814453125, 0.059008670806884767, 0.058920318603515626, 0.05896828842163086, 0.058844928741455076, 0.05873132705688477, 0.05880176162719727, 0.058740222930908206, 0.05876339340209961, 0.05865897750854492, 0.05885177612304687, 0.05879827117919922, 0.05878374481201172, 0.059305889129638675, 0.059119712829589846, 0.05927635192871094, 0.059175968170166016, 0.05915615844726563, 0.059281631469726564, 0.05902336120605469, 0.0587243537902832, 0.05873155212402344, 0.058855392456054687, 0.05874585723876953, 0.0588042221069336, 0.05886975860595703, 0.05882169723510742, 0.058813087463378905, 0.05888959884643555, 0.05890895843505859, 0.059097728729248046, 0.05904572677612305, 0.05899484634399414, 0.05898553466796875, 0.05888710403442383, 0.05897011184692383, 0.05901657485961914, 0.05890111923217774, 0.05924454498291016, 0.05903891372680664, 0.059030143737792966, 0.059291007995605466, 0.05919417572021484, 0.0590643196105957, 0.062204383850097654, 0.05919574356079101, 0.058554367065429686, 0.05849699020385742, 0.0588370246887207, 0.058590240478515625, 0.0587880973815918, 0.058759902954101564, 0.05859328079223633, 0.058531841278076174, 0.05880124664306641, 0.05861676788330078, 0.0587501106262207, 0.05876422500610352, 0.058673023223876956, 0.058927104949951174, 0.059553409576416014, 0.0600682258605957, 0.05922147369384766, 0.058972705841064454, 0.05897420883178711, 0.05879587173461914, 0.05875523376464844, 0.05884272003173828, 0.05867686462402344, 0.058823455810546876, 0.05883612823486328, 0.05916963195800781, 0.05905168151855469, 0.05911996841430664, 0.059263103485107424, 0.05918912124633789, 0.059361312866210936, 0.059049598693847655, 0.059060577392578126, 0.05897536087036133, 0.05914483261108398, 0.05921203231811523, 0.059124801635742186, 0.05926982498168945, 0.0588713264465332, 0.05879676818847656, 0.05890460968017578, 0.05892502212524414, 0.059000129699707034, 0.05899539184570313, 0.05898796844482422, 0.05917660903930664, 0.05910006332397461, 0.05918265533447266, 0.059419231414794924, 0.059184158325195316, 0.05926694488525391, 0.05924959945678711, 0.05948416137695312, 0.05937696075439453, 0.05934531021118164, 0.05960291290283203, 0.059579742431640624, 0.05935945510864258, 0.05908351898193359, 0.05931417465209961, 0.059113502502441406, 0.06282854461669922, 0.05969100952148437, 0.05871974563598633, 0.05856665420532227, 0.05855487823486328, 0.05855548858642578, 0.058671329498291014, 0.058587841033935543, 0.05851289749145508, 0.058577407836914064, 0.05896540832519531, 0.0586671028137207, 0.058612224578857425, 0.058705921173095706, 0.05865881729125977, 0.05917507171630859, 0.059209087371826175, 0.05928803253173828, 0.05914214324951172, 0.05920153427124023, 0.05892832183837891, 0.05880867385864258, 0.058812896728515626, 0.058710014343261716, 0.05891884613037109, 0.05900310516357422, 0.058812255859375, 0.05876019287109375, 0.058848255157470705, 0.058729633331298825, 0.05870409774780273, 0.05877619171142578, 0.05916681671142578, 0.0591890869140625, 0.059367488861083985, 0.05919744110107422, 0.05931827163696289, 0.0592213134765625, 0.05898694229125977, 0.05896422576904297, 0.05924454498291016, 0.05932032012939453, 0.05903900909423828, 0.05891574478149414, 0.05892076873779297, 0.05903564834594727, 0.05892121505737305, 0.05905775833129883, 0.05896940612792969, 0.0591921615600586, 0.05946268844604492, 0.05945443344116211, 0.05926287841796875, 0.05931782531738281, 0.05924099349975586, 0.05925203323364258, 0.05947635269165039, 0.05940256118774414, 0.05908617782592773, 0.059061023712158205, 0.05911743927001953, 0.05911929702758789, 0.05932681655883789, 0.06220563125610352, 0.05945439910888672, 0.05893529510498047, 0.05868544006347656, 0.05861580657958984, 0.0587960319519043, 0.058738689422607425, 0.05873452758789063, 0.058896446228027345, 0.05889129638671875, 0.05911856079101562, 0.05904761505126953, 0.05877382278442383, 0.05869363021850586, 0.058693313598632814, 0.058937664031982424, 0.05905987167358399, 0.05926681518554688, 0.05915299224853516, 0.05919948959350586, 0.05882662582397461, 0.05877977752685547, 0.059084800720214846, 0.058949630737304685, 0.05890383911132813, 0.05880086517333984, 0.058728225708007814, 0.059256351470947266, 0.058910400390625, 0.058968673706054686, 0.05900918579101563, 0.05927552032470703, 0.059309089660644534, 0.05923635101318359, 0.05931647872924805, 0.059583198547363284, 0.05955583953857422, 0.05930601501464844, 0.0592561264038086, 0.05932857513427734, 0.05894409561157227, 0.05892256164550781, 0.05886816024780273, 0.05900697708129883, 0.05917004776000977, 0.05901180648803711, 0.0589455680847168, 0.05892300796508789, 0.05905193710327149, 0.05921503829956055, 0.059359424591064455, 0.05941443252563477, 0.05967504119873047, 0.059523487091064455, 0.05905372619628906, 0.05903190231323242, 0.059090686798095704, 0.05904803085327148, 0.05908291244506836, 0.05925436782836914, 0.05901359939575195, 0.05906835174560547, 0.05913190460205078, 0.06205231857299805, 0.05920771026611328, 0.058916862487792966, 0.05855353546142578, 0.05846492767333984, 0.05865820693969727, 0.05873337554931641, 0.05873459243774414, 0.05893513488769531, 0.058777694702148435, 0.058916862487792966, 0.05866700744628906, 0.058565696716308596, 0.05877859115600586, 0.058714080810546875, 0.05907251358032226, 0.059280670166015625, 0.05944771194458008, 0.05927967834472656, 0.059099136352539064, 0.058947040557861326, 0.05874537658691406, 0.05863222503662109, 0.05868265533447266, 0.058882526397705075, 0.05898672103881836, 0.059052127838134766, 0.058862495422363284, 0.05877814483642578, 0.05903817749023438, 0.0595432014465332, 0.05908515167236328, 0.05928755187988281, 0.05909411239624023, 0.059140705108642576, 0.05934947204589844, 0.05939388656616211, 0.05938790512084961, 0.05921712112426758, 0.059199455261230466, 0.059200321197509766, 0.05903529739379883, 0.059027809143066406, 0.05898649597167969, 0.05895577621459961, 0.059100448608398436, 0.058894081115722655, 0.058880126953125, 0.058916862487792966, 0.05950345611572266, 0.0591905288696289, 0.05934975814819336, 0.05947596740722656, 0.059522174835205076, 0.05946403121948242, 0.05931478500366211, 0.059367359161376955, 0.0594595832824707, 0.059377662658691405, 0.05936947250366211, 0.05950214385986328, 0.05935878372192383, 0.0592147216796875, 0.06248518371582031, 0.05970902252197265, 0.058859905242919924, 0.058609695434570314, 0.05865852737426758, 0.058605823516845706, 0.05859942245483398, 0.05862556838989258, 0.05889260864257812, 0.05892041778564453, 0.05887039947509766, 0.058864768981933595, 0.0589334716796875, 0.05881103897094726, 0.05880841445922851, 0.058869281768798826, 0.05946227264404297, 0.05940326309204102, 0.05923644638061523, 0.059144351959228514, 0.05899731063842773, 0.05890252685546875, 0.05888819122314453, 0.05880732727050781, 0.058786720275878904, 0.05903760147094726, 0.05885558319091797, 0.05915011215209961, 0.058955360412597656, 0.05880031967163086, 0.05868726348876953, 0.05896451187133789, 0.059324607849121094, 0.05916035079956055, 0.059206977844238284, 0.05916700744628906, 0.0591319694519043, 0.05943888092041016, 0.05896073532104492, 0.058953601837158205, 0.0590909423828125, 0.05940838241577148, 0.0592619857788086, 0.05920582580566406, 0.05920438385009766, 0.059262622833251954, 0.05921007919311523, 0.058993953704833985, 0.05930876922607422, 0.05938902282714844, 0.059288417816162106, 0.0593039665222168, 0.05963983917236328, 0.05957222366333008, 0.05954355239868164, 0.05933465576171875, 0.05931151962280273, 0.05927577590942383, 0.059260478973388674, 0.059369342803955075, 0.05924521636962891, 0.059284702301025394, 0.05910403060913086, 0.06238022232055664, 0.059593441009521485, 0.059100799560546875, 0.05860764694213867, 0.058516990661621096, 0.058843967437744144, 0.058619808197021485, 0.05872243118286133, 0.05861580657958984, 0.05891177749633789, 0.058717151641845704, 0.05871756744384766, 0.058675838470458985, 0.0590048942565918, 0.058877857208251956, 0.05877766418457031, 0.059031070709228514, 0.059328990936279295, 0.059297088623046876, 0.05913471984863281, 0.059028606414794925, 0.058856094360351566, 0.05877987289428711, 0.058788894653320316, 0.05905430221557617, 0.059224830627441404, 0.05931423950195312, 0.05916233444213867, 0.059052352905273435, 0.058982303619384766, 0.05928140640258789, 0.0589496955871582, 0.05904172897338867, 0.0592281608581543, 0.059657886505126954, 0.05967001724243164, 0.059411296844482424, 0.05936332702636719, 0.059205631256103515, 0.058914142608642577, 0.05893392181396485, 0.059039424896240235, 0.059020671844482425, 0.05904684829711914, 0.0591566390991211, 0.05912588882446289, 0.05890633773803711, 0.05902131271362305, 0.05905203247070313, 0.059254783630371094, 0.05933859252929687, 0.05959491348266602, 0.0595230712890625, 0.0595968017578125, 0.059612449645996095, 0.05919120025634766, 0.05909177780151367, 0.05928140640258789, 0.05921779251098633, 0.05909654235839844, 0.05924931335449219, 0.05917283248901367, 0.05927119827270508, 0.061978145599365236, 0.059275745391845706, 0.05868556976318359, 0.05856447982788086, 0.058628223419189454, 0.05858601760864258, 0.058495521545410156, 0.05867961502075195, 0.0586033935546875, 0.05915856170654297, 0.058767711639404294, 0.05879385757446289, 0.05877084732055664, 0.05902726364135742, 0.058742942810058596, 0.0590682258605957, 0.05920236968994141, 0.05936844635009766, 0.059300865173339844, 0.05912118530273437, 0.058933727264404295, 0.059031551361083984, 0.05876649475097656, 0.059162975311279294, 0.05903545761108398, 0.0588741455078125, 0.05875759887695312, 0.058781280517578124, 0.058804576873779296, 0.05893529510498047, 0.05907660675048828, 0.05914214324951172, 0.05929948806762695, 0.05953971099853515, 0.05931017684936524, 0.059426815032958984, 0.05974435043334961, 0.0593974723815918, 0.058894912719726564, 0.059015167236328124, 0.05890172958374024, 0.05890332794189453, 0.05885747146606445, 0.0588636474609375, 0.05878297424316406, 0.05892348861694336, 0.0591833610534668, 0.05924787139892578, 0.05911939239501953, 0.059259071350097656, 0.059372318267822265, 0.05926726531982422, 0.05926591873168945, 0.059345855712890624, 0.05926502227783203, 0.059215873718261716, 0.05915238571166992, 0.059308032989501956, 0.05906227111816406, 0.05938175964355469, 0.059066368103027345, 0.05917491149902344, 0.05925247955322266]",tokens/s,16.924929692283403,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 358, in __init__ self.v_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 477683 has 14.74 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 90.39 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 344.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 66.12 MiB is free. Process 481928 has 14.67 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 1.71 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 270.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 98.12 MiB is free. Process 475038 has 14.64 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 1.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 688.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 598.12 MiB is free. Process 480006 has 14.15 GiB memory in use. Of the allocated memory 14.04 GiB is allocated by PyTorch, and 1.75 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 287, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 150.12 MiB is free. Process 461649 has 14.59 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.43 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 344.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 66.12 MiB is free. Process 482306 has 14.67 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 1.71 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 466, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 358, in __init__ self.v_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 479604 has 14.74 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 90.39 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 688.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 598.12 MiB is free. Process 481180 has 14.15 GiB memory in use. Of the allocated memory 14.04 GiB is allocated by PyTorch, and 1.75 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 456.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 280.12 MiB is free. Process 476905 has 14.46 GiB memory in use. Of the allocated memory 14.35 GiB is allocated by PyTorch, and 3.19 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.838848,13880.918016,0.0,13478.395904,13476.849152,s,1,7.4984443359375,7.4984443359375,0.0,7.4984443359375,7.4984443359375,7.4984443359375,7.4984443359375,[7.4984443359375],,kWh,8.617640091658055e-06,9.432145645993857e-07,4.225836714000231e-06,1.3786691370257671e-05,,MB,1200.222208,14115.79904,0.0,13702.791168,13671.637504,s,10,12.5107314453125,1.25107314453125,0.004904984911468656,1.2531962280273437,1.2565938842773439,1.2568773864746094,1.2571041882324219,"[1.2407294921875, 1.2472108154296875, 1.2475433349609375, 1.24732958984375, 1.253762451171875, 1.2529501953125, 1.254071533203125, 1.2565308837890625, 1.2534422607421876, 1.257160888671875]",tokens/s,204.6243268181715,kWh,3.651900859250266e-05,4.027566965069745e-06,2.4182713790601996e-05,6.472928934817439e-05,tokens/kWh,3954932.9612286277,MB,1248.239616,14115.79904,0.0,13702.791168,13671.640064,s,10,37.68763256835937,3.7687632568359377,0.0032208254817856553,3.7678978271484374,3.7724695556640624,3.773516882324219,3.774354743652344,"[3.76624169921875, 3.765580322265625, 3.76377880859375, 3.767536376953125, 3.76825927734375, 3.774564208984375, 3.7670654296875, 3.77223681640625, 3.770786865234375, 3.771582763671875]",tokens/s,16.71635910951106,kWh,0.00011008414685333417,1.2143142294788706e-05,7.314328073679822e-05,0.00019537056988492104,tokens/kWh,322464.125672095,,s,630,37.68463336563111,0.0598168783581446,0.00029218105063642287,0.05979940795898438,0.06009622993469238,0.06018867111206055,0.06116480209350586,"[0.06113276672363281, 0.059995231628417967, 0.05977999877929688, 0.05948825454711914, 0.059283424377441406, 0.0595088005065918, 0.059512798309326174, 0.059327583312988284, 0.05961164855957031, 0.05954396820068359, 0.05952854537963867, 0.05948688125610352, 0.059468990325927736, 0.05962579345703125, 0.05940620803833008, 0.05974284744262695, 0.0597072639465332, 0.05986316680908203, 0.059676673889160155, 0.05960230255126953, 0.059498207092285156, 0.05974662399291992, 0.05942537689208984, 0.05965353775024414, 0.05945609664916992, 0.059846111297607425, 0.05985036849975586, 0.05993318557739258, 0.059771297454833984, 0.05977702331542969, 0.05997488021850586, 0.05986511993408203, 0.06017004776000977, 0.05995001602172852, 0.05987942504882812, 0.05998387145996094, 0.059829952239990235, 0.0597507209777832, 0.05979318237304688, 0.0598623046875, 0.05998483276367188, 0.059815937042236325, 0.059719520568847655, 0.05967273712158203, 0.05991167831420898, 0.05993856048583984, 0.059964160919189456, 0.05974630355834961, 0.05978636932373047, 0.059851646423339844, 0.060063743591308595, 0.05980947113037109, 0.060133697509765625, 0.05975449752807617, 0.05966012954711914, 0.05967068862915039, 0.059684768676757816, 0.06008342361450195, 0.059888256072998046, 0.05987968063354492, 0.05993436813354492, 0.05989206314086914, 0.05984460830688477, 0.061074878692626955, 0.0596627197265625, 0.05942416000366211, 0.059552352905273435, 0.05930995178222656, 0.059308353424072265, 0.05934067153930664, 0.05930163192749023, 0.0594312973022461, 0.05944319915771484, 0.059684864044189455, 0.059682720184326174, 0.059735328674316406, 0.05963654327392578, 0.059829662322998044, 0.05964009475708008, 0.05971785736083984, 0.06001878356933594, 0.05974563217163086, 0.059671199798583985, 0.05976492691040039, 0.0596085433959961, 0.05952700805664062, 0.05943756866455078, 0.059462814331054686, 0.05947683334350586, 0.0594222412109375, 0.05945990371704102, 0.05948140716552734, 0.0595456657409668, 0.06005833435058594, 0.059701152801513675, 0.05973324966430664, 0.05970537567138672, 0.059673473358154296, 0.05968668746948242, 0.059652320861816405, 0.059719680786132816, 0.05972329711914062, 0.05987785720825195, 0.06018867111206055, 0.05985007858276367, 0.05992668914794922, 0.05993932723999024, 0.05975040054321289, 0.06004492950439453, 0.06005523300170899, 0.060066497802734375, 0.06007603073120117, 0.06017433547973633, 0.05993471908569336, 0.059998207092285157, 0.0599183349609375, 0.05969305419921875, 0.059946304321289064, 0.059972286224365234, 0.0600186882019043, 0.059918304443359376, 0.05990403366088867, 0.05976028823852539, 0.059851104736328126, 0.06017433547973633, 0.06018867111206055, 0.06136217498779297, 0.059676673889160155, 0.05932032012939453, 0.05925379180908203, 0.059245536804199216, 0.05936537551879883, 0.059428863525390625, 0.05963897705078125, 0.059784000396728515, 0.059625118255615235, 0.05965820693969726, 0.05953574371337891, 0.05935308837890625, 0.05968281555175781, 0.0596049919128418, 0.05958412933349609, 0.059656574249267576, 0.05983203125, 0.05975619125366211, 0.05959539031982422, 0.05941775894165039, 0.059525985717773434, 0.05975996780395508, 0.05962819290161133, 0.0596110725402832, 0.06000032043457031, 0.05974835205078125, 0.05969676971435547, 0.05992076873779297, 0.060037120819091794, 0.05977487945556641, 0.05999625778198242, 0.060006401062011716, 0.0599060173034668, 0.059762271881103515, 0.05969071960449219, 0.059560672760009765, 0.0595230712890625, 0.059428863525390625, 0.05944319915771484, 0.05953126525878906, 0.05969062423706055, 0.05957875061035156, 0.05957632064819336, 0.059549633026123046, 0.059609153747558596, 0.05964169692993164, 0.05966207885742188, 0.059728286743164063, 0.059848094940185545, 0.05982268905639648, 0.06003302383422852, 0.059923614501953125, 0.05999087905883789, 0.0601426887512207, 0.05978553771972656, 0.0597551040649414, 0.06011283111572266, 0.05996268844604492, 0.06017305755615234, 0.05997158432006836, 0.06011699295043945, 0.05991424179077148, 0.06098342514038086, 0.05954991912841797, 0.05937619018554687, 0.05930188751220703, 0.05940544128417969, 0.059636192321777345, 0.059539871215820314, 0.05941244888305664, 0.05941414260864258, 0.05964751815795898, 0.05951129531860352, 0.05983679962158203, 0.05952102279663086, 0.05995849609375, 0.05974854278564453, 0.05987593460083008, 0.059940864562988284, 0.05973196792602539, 0.05960281753540039, 0.059617408752441405, 0.05955142211914063, 0.05964614486694336, 0.05954281616210937, 0.059517791748046875, 0.059674175262451175, 0.059744705200195314, 0.05976038360595703, 0.05965030288696289, 0.059856895446777345, 0.05991196823120117, 0.059727745056152345, 0.059748703002929685, 0.05974630355834961, 0.05998387145996094, 0.059906047821044923, 0.05997772979736328, 0.059837825775146486, 0.05984899139404297, 0.059781471252441404, 0.059667518615722656, 0.05966070556640625, 0.059785663604736326, 0.05977097702026367, 0.060176063537597656, 0.06009478378295898, 0.06004940795898438, 0.0599818229675293, 0.06004121780395508, 0.05991424179077148, 0.059958656311035155, 0.05987360000610351, 0.05986102294921875, 0.05988380813598633, 0.059953086853027346, 0.05984467315673828, 0.05974835205078125, 0.05971772766113281, 0.05987481689453125, 0.05998175811767578, 0.05995312118530274, 0.0602158088684082, 0.05999411010742187, 0.060183647155761716, 0.06117788696289062, 0.05977702331542969, 0.059595806121826175, 0.05952377700805664, 0.0595274543762207, 0.059477279663085934, 0.05937635040283203, 0.05935103988647461, 0.05940019226074219, 0.05960639953613281, 0.05943974304199219, 0.05967180633544922, 0.0596835823059082, 0.05969305419921875, 0.059772926330566405, 0.05969641494750977, 0.05973382568359375, 0.059638687133789066, 0.05955126571655273, 0.059806175231933593, 0.05962518310546875, 0.059656478881835937, 0.059616737365722657, 0.05964035034179688, 0.059663455963134764, 0.05968783950805664, 0.059734016418457034, 0.05972172927856445, 0.0600203857421875, 0.05985859298706055, 0.059955902099609375, 0.06013542556762695, 0.06006572723388672, 0.06009657669067383, 0.05985279846191406, 0.059893726348876956, 0.0599244155883789, 0.05984880065917969, 0.05967871856689453, 0.05960704040527344, 0.05985452651977539, 0.0598326416015625, 0.059815937042236325, 0.05966233444213867, 0.0599114875793457, 0.05994889450073242, 0.0599826889038086, 0.06007603073120117, 0.059906047821044923, 0.06003036880493164, 0.06005615997314453, 0.059998207092285157, 0.059860481262207034, 0.05986947250366211, 0.05994927978515625, 0.05980160140991211, 0.05976473617553711, 0.059804672241210936, 0.059859073638916016, 0.0600805778503418, 0.059941310882568356, 0.060184574127197264, 0.05994604873657226, 0.061412353515625, 0.05992243194580078, 0.059452735900878906, 0.059646656036376956, 0.05951897430419922, 0.05950252914428711, 0.05960300827026367, 0.05940224075317383, 0.05965619277954102, 0.05962457656860352, 0.059679615020751954, 0.059733280181884764, 0.05985968017578125, 0.059850753784179686, 0.05987737655639649, 0.05980950546264648, 0.060079776763916015, 0.059902591705322264, 0.05994496154785156, 0.059815937042236325, 0.05968694305419922, 0.05975241470336914, 0.05991628646850586, 0.05976678466796875, 0.0596879997253418, 0.05984703826904297, 0.059654720306396486, 0.05974835205078125, 0.059980831146240234, 0.060048030853271483, 0.059952606201171876, 0.059829086303710935, 0.0600021743774414, 0.06006182479858398, 0.06012451171875, 0.05969168090820313, 0.05977702331542969, 0.05978726577758789, 0.05991766357421875, 0.05997609710693359, 0.05967897415161133, 0.05997340774536133, 0.05997385787963867, 0.059667648315429686, 0.05970783996582031, 0.06015203094482422, 0.060242080688476564, 0.05995487976074219, 0.0601459846496582, 0.06033715057373047, 0.060148735046386716, 0.05993881607055664, 0.05995929718017578, 0.05988544082641602, 0.060153984069824216, 0.0598548469543457, 0.05993881607055664, 0.06012518310546875, 0.05996511840820312, 0.05999238586425781, 0.0602534065246582, 0.060289825439453125, 0.06037481689453125, 0.061440574645996095, 0.059670368194580076, 0.05947942352294922, 0.059505439758300784, 0.05934035110473633, 0.05931484985351562, 0.059368896484375, 0.05942512130737305, 0.05936742401123047, 0.05938915252685547, 0.05933955383300781, 0.05933615875244141, 0.05954598236083984, 0.059601150512695315, 0.05947177505493164, 0.05966233444213867, 0.059809185028076174, 0.05968547058105469, 0.059957248687744144, 0.05962460708618164, 0.05966460800170898, 0.05969164657592774, 0.059649856567382815, 0.05979900741577148, 0.05955609512329101, 0.06009619140625, 0.059664894104003906, 0.05987971115112305, 0.05990118408203125, 0.059910686492919925, 0.060006622314453126, 0.05992367935180664, 0.05987990570068359, 0.06001628875732422, 0.059799808502197266, 0.05977446365356445, 0.05983939361572266, 0.059800769805908205, 0.05971231842041016, 0.059643905639648435, 0.05956403350830078, 0.05985004806518555, 0.0596220817565918, 0.059889663696289064, 0.060020511627197265, 0.059998432159423826, 0.06006579208374024, 0.06016159820556641, 0.05990006256103515, 0.060051742553710936, 0.06034431838989258, 0.06003299331665039, 0.06008015823364258, 0.06003225708007812, 0.05977094268798828, 0.059759296417236325, 0.05980057525634765, 0.05979238510131836, 0.059757759094238284, 0.059775806427001955, 0.060007583618164065, 0.059962207794189454, 0.06001663970947266, 0.06128675079345703, 0.059762176513671876, 0.05949276733398438, 0.05975033569335937, 0.05967734527587891, 0.05962473678588867, 0.05953523254394531, 0.059385726928710934, 0.059617313385009765, 0.05941123199462891, 0.059366912841796876, 0.05934928131103516, 0.059449726104736325, 0.059749664306640624, 0.05985756683349609, 0.059762752532958985, 0.05985625457763672, 0.059738304138183596, 0.05982457733154297, 0.059635711669921876, 0.05958041763305664, 0.05958041763305664, 0.05953126525878906, 0.059996158599853515, 0.06000230407714844, 0.059921951293945314, 0.05985238265991211, 0.05973696136474609, 0.06013542556762695, 0.059776416778564455, 0.05990256118774414, 0.05971481704711914, 0.06014438247680664, 0.05991628646850586, 0.059776256561279294, 0.05971635055541992, 0.05966563034057617, 0.05990675354003906, 0.05988463973999023, 0.05978214263916016, 0.059940864562988284, 0.05986304092407226, 0.05968668746948242, 0.05980089569091797, 0.05988185501098633, 0.06037721633911133, 0.06004947280883789, 0.06022108840942383, 0.06017087936401367, 0.06015596771240234, 0.06010406494140625, 0.0601319694519043, 0.06002588653564453, 0.05998691177368164, 0.059979774475097655, 0.05997772979736328, 0.06006579208374024, 0.05997971343994141, 0.06028908920288086, 0.06020697784423828, 0.0601662712097168, 0.060155040740966795, 0.0600830078125, 0.06138265609741211, 0.05958419036865235, 0.05937388610839844, 0.05958041763305664, 0.05974832153320313, 0.05973974227905274, 0.05963206481933594, 0.05933260726928711, 0.05938499069213867, 0.059417438507080075, 0.05941657638549805, 0.059623008728027345, 0.05956620788574219, 0.05986278533935547, 0.05981164932250976, 0.05984739303588867, 0.05990323257446289, 0.05986790466308594, 0.05999375915527344, 0.059693408966064454, 0.05976678466796875, 0.059670272827148436, 0.05965030288696289, 0.05979340744018555, 0.05978464126586914, 0.059883487701416015, 0.05964656066894531, 0.059676673889160155, 0.059774974822998046, 0.05995478439331055, 0.059730335235595705, 0.059827552795410154, 0.0598675537109375, 0.059838718414306644, 0.059834369659423826, 0.05975244903564453, 0.059917438507080076, 0.05965289688110351, 0.05998601531982422, 0.05970943832397461, 0.06016169738769531, 0.05977715301513672, 0.059865310668945314, 0.05977088165283203, 0.05971923065185547, 0.059886016845703126, 0.059891712188720705, 0.05978070449829102, 0.059846206665039064, 0.05991084671020508, 0.0599471664428711, 0.05996073532104492, 0.06000601577758789, 0.06002582550048828, 0.06005132675170898, 0.0603895034790039, 0.06007094573974609, 0.060095455169677736, 0.06023302459716797, 0.060246719360351565, 0.059976959228515626, 0.060291839599609376, 0.0600596809387207, 0.0613482551574707, 0.06028096008300781, 0.059437057495117185, 0.05970739364624023, 0.05940224075317383, 0.05958860778808594, 0.059719680786132816, 0.05954969787597656, 0.05943910217285156, 0.05946323013305664, 0.05945561599731446, 0.0594158706665039, 0.05951177597045899, 0.0595656967163086, 0.05956975936889648, 0.05955462265014649, 0.059772926330566405, 0.05970240020751953, 0.05953011322021484, 0.05951011276245117, 0.05946230316162109, 0.05977907180786133, 0.05978019332885742, 0.05965507125854492, 0.05987123107910156, 0.05996524810791016, 0.059803840637207034, 0.06005107116699219, 0.0600948486328125, 0.060028926849365234, 0.05989766311645508, 0.05996358489990234, 0.06022710418701172, 0.059932415008544924, 0.05989244842529297, 0.059936576843261716, 0.05980588912963867, 0.05994905471801758, 0.05986304092407226, 0.059671775817871094, 0.05976348876953125, 0.059802879333496095, 0.059771648406982424, 0.05970534515380859, 0.06037913513183594, 0.060055328369140626, 0.06015407943725586, 0.060028926849365234, 0.05993865585327148, 0.059936927795410155, 0.059831966400146486, 0.06019424057006836, 0.05995158386230469, 0.059918785095214845, 0.06009417724609375, 0.05987971115112305, 0.059916126251220704, 0.06003638458251953, 0.05995516967773438, 0.06007900619506836, 0.060306560516357424, 0.06034521484375, 0.06006784057617188]",tokens/s,16.717689512526043,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 358, in __init__ self.v_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 477296 has 14.74 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 90.39 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 466, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 357, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 476175 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 466, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 357, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 476534 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 287, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 150.12 MiB is free. Process 467100 has 14.59 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.43 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 456.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 280.12 MiB is free. Process 478042 has 14.46 GiB memory in use. Of the allocated memory 14.35 GiB is allocated by PyTorch, and 3.19 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 344.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 66.12 MiB is free. Process 482722 has 14.67 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 1.71 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 357, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 475396 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,787.738624,1113.45664,0.0,710.934528,657.419264,s,1,7.5675126953125,7.5675126953125,0.0,7.5675126953125,7.5675126953125,7.5675126953125,7.5675126953125,[7.5675126953125],,kWh,2.288202816665337e-06,2.4507725272011925e-07,9.01667387975813e-07,3.4349474573612693e-06,,MB,1149.210624,1157.496832,0.0,744.48896,582.372352,s,20,0.3287394256591797,0.016436971282958988,0.00028613696791011413,0.01638203239440918,0.016473599433898926,0.016591565322875975,0.017430312576293943,"[0.01763999938964844, 0.016368288040161133, 0.016336223602294923, 0.016301408767700195, 0.016279104232788087, 0.01623891258239746, 0.016466623306274415, 0.01640227127075195, 0.016414464950561522, 0.016341791152954102, 0.016257856369018556, 0.01635958480834961, 0.016458816528320312, 0.016455488204956056, 0.016395776748657227, 0.016281887054443358, 0.01639958381652832, 0.016364608764648438, 0.01653638458251953, 0.016440351486206053]",tokens/s,15574.645449761643,kWh,5.907834365215435e-07,6.515256907547164e-08,3.9252561766402816e-07,1.0484616232610433e-06,tokens/kWh,244167258.31486326,MB,1160.773632,1178.468352,0.0,765.46048,582.374912,s,20,10.028625427246093,0.5014312713623046,0.003850425998967264,0.5022394256591797,0.5048314025878906,0.5067566131591797,0.508034428100586,"[0.5011539611816406, 0.502528564453125, 0.503845703125, 0.5066725463867188, 0.4957587890625, 0.49737393188476564, 0.5033777770996094, 0.5022026062011719, 0.49934762573242186, 0.4946473693847656, 0.4921540222167969, 0.5046268310546875, 0.5083538818359375, 0.5029393005371093, 0.5007671508789062, 0.5013140563964844, 0.5034700927734375, 0.5018146057128906, 0.5040003662109375, 0.5022762451171875]",tokens/s,125.64034913267291,kWh,1.4329080358269282e-05,1.5802511212465596e-06,6.6402577863358475e-06,2.2549589265851687e-05,tokens/kWh,2793842.4623726965,,s,1260,10.01843649196625,0.007951140072989086,0.00019335499251095522,0.007932912111282348,0.008077561855316162,0.008144215774536133,0.008634093284606934,"[0.00774179220199585, 0.007991456031799316, 0.008101984024047852, 0.00786636781692505, 0.007847936153411865, 0.008058879852294922, 0.007886367797851563, 0.007879136085510254, 0.007895040035247802, 0.007955455780029297, 0.008022879600524902, 0.008066240310668946, 0.008086496353149415, 0.008023455619812011, 0.007909984111785888, 0.007890944004058837, 0.007847392082214355, 0.007895232200622559, 0.007907680034637451, 0.008128512382507324, 0.008065024375915527, 0.008162816047668458, 0.00798361587524414, 0.008019359588623046, 0.007959136009216309, 0.007938047885894776, 0.00795795202255249, 0.007930111885070801, 0.007966527938842774, 0.007948800086975098, 0.007859263896942139, 0.007840703964233399, 0.007876480102539062, 0.00798528003692627, 0.007898752212524413, 0.00792729616165161, 0.008016160011291504, 0.008006239891052246, 0.008005056381225586, 0.007822239875793457, 0.008009056091308593, 0.007868735790252686, 0.007972608089447022, 0.00789734411239624, 0.007939648151397706, 0.007921279907226563, 0.008120960235595703, 0.00789728021621704, 0.007937503814697265, 0.007914271831512451, 0.007845632076263428, 0.007825407981872558, 0.007868288040161132, 0.007979135990142822, 0.007878655910491944, 0.007831552028656007, 0.007895040035247802, 0.007960800170898438, 0.007914591789245605, 0.008116928100585938, 0.007965983867645264, 0.007926112174987792, 0.007950719833374023, 0.007783936023712158, 0.007987711906433105, 0.008048640251159669, 0.008179903984069824, 0.007975776195526123, 0.00793289613723755, 0.007894015789031983, 0.007914463996887207, 0.008114208221435546, 0.008320992469787597, 0.007900991916656493, 0.007871935844421387, 0.007934751987457276, 0.008097344398498536, 0.00805452823638916, 0.007994336128234863, 0.0081112642288208, 0.007980991840362548, 0.008040608406066895, 0.008003680229187012, 0.00797324800491333, 0.007916768074035645, 0.007990047931671143, 0.008011775970458984, 0.007872159957885743, 0.007942495822906494, 0.007802879810333252, 0.007847936153411865, 0.0079235520362854, 0.007948448181152343, 0.008074943542480468, 0.007854400157928466, 0.007866591930389404, 0.007886752128601075, 0.007956352233886719, 0.008037887573242187, 0.007901408195495606, 0.007969056129455566, 0.007831552028656007, 0.007921599864959716, 0.008056896209716797, 0.008036352157592774, 0.008013824462890624, 0.008011775970458984, 0.008048640251159669, 0.008013824462890624, 0.007966047763824462, 0.00802064037322998, 0.008008895874023437, 0.007919904232025146, 0.008036895751953125, 0.007985151767730713, 0.008038399696350097, 0.007893216133117676, 0.007878431797027588, 0.007866047859191894, 0.00795475196838379, 0.008052736282348634, 0.007903232097625732, 0.007935616016387939, 0.007856512069702148, 0.007862271785736084, 0.007988704204559325, 0.0077441282272338865, 0.008118080139160157, 0.0080447998046875, 0.007938047885894776, 0.007923359870910645, 0.00790563201904297, 0.008048416137695313, 0.008020319938659669, 0.00795359992980957, 0.00797318410873413, 0.007934368133544922, 0.007917535781860352, 0.007933695793151856, 0.008048095703125, 0.00795900821685791, 0.008081727981567383, 0.007935999870300293, 0.00803766441345215, 0.008053471565246582, 0.008003520011901856, 0.008017696380615235, 0.008005472183227539, 0.007980735778808593, 0.008086272239685059, 0.00791484785079956, 0.007901472091674805, 0.007870848178863526, 0.008171327590942383, 0.007946335792541503, 0.007907360076904298, 0.00790121603012085, 0.00793609619140625, 0.008095680236816406, 0.007974239826202392, 0.007866559982299804, 0.008028927803039551, 0.0079617600440979, 0.008116224288940429, 0.00805945587158203, 0.007897088050842285, 0.007872511863708496, 0.008109696388244629, 0.007887135982513428, 0.00791868782043457, 0.007922592163085938, 0.007925856113433837, 0.007958687782287598, 0.008003487586975097, 0.008062911987304687, 0.008052736282348634, 0.008123680114746094, 0.008047327995300294, 0.007934239864349366, 0.008015583992004395, 0.008325119972229005, 0.007921664237976075, 0.007925759792327881, 0.00800393581390381, 0.008058848381042481, 0.008089280128479004, 0.007985151767730713, 0.007984543800354004, 0.008006239891052246, 0.007709792137145996, 0.007981056213378907, 0.007936927795410156, 0.008028160095214844, 0.008068256378173828, 0.008012639999389648, 0.007968768119812012, 0.00802400016784668, 0.00797702407836914, 0.00800704002380371, 0.007954495906829834, 0.007897664070129394, 0.007935776233673095, 0.007966944217681885, 0.007927807807922363, 0.007862271785736084, 0.007985151767730713, 0.008025568008422851, 0.007908031940460206, 0.007944032192230225, 0.007908895969390869, 0.009652992248535157, 0.010857248306274414, 0.008168383598327637, 0.008083200454711914, 0.008012031555175781, 0.007911424160003662, 0.007976960182189942, 0.007925759792327881, 0.00796447992324829, 0.007998847961425781, 0.007863103866577148, 0.007853312015533447, 0.007881728172302246, 0.008043680191040039, 0.008056575775146485, 0.007994207859039306, 0.008005951881408692, 0.008244895935058593, 0.007948319911956787, 0.008013152122497558, 0.007935711860656738, 0.008065312385559083, 0.007929535865783691, 0.007832287788391113, 0.007874847888946533, 0.008019488334655761, 0.007949024200439454, 0.007916384220123291, 0.007895071983337403, 0.00789798402786255, 0.008021984100341797, 0.007896160125732422, 0.007885727882385254, 0.007904607772827149, 0.00793996810913086, 0.007852479934692383, 0.008052127838134766, 0.008055935859680176, 0.00803116798400879, 0.007876736164093017, 0.007951104164123535, 0.007901023864746094, 0.00773795223236084, 0.007976960182189942, 0.007941120147705078, 0.007909855842590331, 0.00786076784133911, 0.00787660789489746, 0.007898464202880859, 0.007844511985778808, 0.007749023914337158, 0.00773795223236084, 0.007849599838256836, 0.007809408187866211, 0.007803936004638672, 0.007855072021484374, 0.007823359966278077, 0.007903232097625732, 0.007815296173095704, 0.007782144069671631, 0.0077816638946533205, 0.007737567901611328, 0.007842688083648682, 0.00782534408569336, 0.007784160137176513, 0.007810912132263184, 0.007762080192565918, 0.0077968001365661625, 0.007800320148468018, 0.00787007999420166, 0.00790825605392456, 0.007772160053253174, 0.007724480152130127, 0.0077783999443054195, 0.0077974720001220705, 0.0078087677955627445, 0.007898560047149658, 0.007840320110321045, 0.007821311950683594, 0.007968992233276366, 0.00788972806930542, 0.007896031856536865, 0.007890944004058837, 0.00789408016204834, 0.00782431983947754, 0.007845888137817383, 0.007772160053253174, 0.007819039821624756, 0.007826655864715577, 0.007824384212493896, 0.007942143917083741, 0.007939712047576904, 0.007833695888519288, 0.007769728183746338, 0.007787456035614013, 0.00793552017211914, 0.007981247901916504, 0.008113984107971192, 0.00805907154083252, 0.00809337615966797, 0.008015263557434081, 0.007985727787017822, 0.007921919822692872, 0.00791100788116455, 0.007955232143402099, 0.007733727931976318, 0.0079234881401062, 0.00793558406829834, 0.007917984008789063, 0.007896959781646729, 0.007902880191802979, 0.007938528060913086, 0.007942143917083741, 0.00791756820678711, 0.007925759792327881, 0.007788224220275879, 0.007907648086547852, 0.00800153636932373, 0.008013376235961914, 0.007745312213897705, 0.007895711898803712, 0.00786137580871582, 0.007840640068054199, 0.007921664237976075, 0.007815167903900147, 0.007872447967529297, 0.007874623775482177, 0.007945504188537598, 0.007973120212554932, 0.007880191802978515, 0.007930463790893554, 0.007879039764404298, 0.0078031039237976075, 0.008555744171142578, 0.007924223899841308, 0.007874559879302979, 0.007914624214172362, 0.007944479942321777, 0.007873184204101562, 0.007876160144805909, 0.007846496105194091, 0.007776095867156983, 0.007809023857116699, 0.007751296043395996, 0.00773363208770752, 0.007716063976287842, 0.007795487880706787, 0.007849535942077636, 0.00779311990737915, 0.007708640098571778, 0.007718912124633789, 0.007811071872711181, 0.008013343811035157, 0.00803273582458496, 0.007953824043273926, 0.007875135898590088, 0.007872159957885743, 0.00781283187866211, 0.008014495849609376, 0.008028160095214844, 0.007970816135406494, 0.007913472175598145, 0.007933792114257812, 0.007815328121185303, 0.007931903839111328, 0.007840928077697753, 0.00782857608795166, 0.007829408168792724, 0.007742688179016113, 0.00801030445098877, 0.007895328044891358, 0.007870336055755616, 0.007864448070526123, 0.00788479995727539, 0.007898272037506104, 0.007868800163269043, 0.007893472194671632, 0.007881887912750244, 0.00790780782699585, 0.007891583919525146, 0.007886047840118409, 0.007844384193420411, 0.007845888137817383, 0.007884448051452636, 0.00784115219116211, 0.0078076162338256835, 0.007870816230773925, 0.008283295631408692, 0.008028096199035644, 0.007991775989532471, 0.00803276824951172, 0.008087039947509766, 0.008136672019958496, 0.008087360382080078, 0.007889408111572266, 0.007848383903503418, 0.00782102394104004, 0.007929120063781738, 0.007842527866363526, 0.007793920040130615, 0.007860991954803467, 0.007988351821899414, 0.008024959564208985, 0.007858176231384278, 0.007966559886932373, 0.008238656044006348, 0.008122271537780761, 0.00796947193145752, 0.007982687950134277, 0.007936416149139404, 0.008351743698120117, 0.008082719802856445, 0.008032992362976074, 0.008630144119262695, 0.008202367782592774, 0.00808569622039795, 0.008077152252197265, 0.008219679832458497, 0.008199104309082031, 0.008068544387817383, 0.00797753620147705, 0.008028160095214844, 0.007991104125976563, 0.007977151870727539, 0.008128512382507324, 0.007937471866607666, 0.007874911785125732, 0.007871007919311524, 0.007906112194061279, 0.007969664096832276, 0.007980991840362548, 0.007747488021850586, 0.007972288131713866, 0.008005536079406739, 0.007901855945587158, 0.007940095901489258, 0.007913343906402588, 0.00786624002456665, 0.00792307186126709, 0.008188511848449707, 0.008140640258789062, 0.007827904224395752, 0.007882751941680909, 0.008075263977050781, 0.00819007968902588, 0.007917439937591553, 0.00783126401901245, 0.007823647975921631, 0.008065024375915527, 0.007910560131072998, 0.007959392070770264, 0.00790841579437256, 0.00788908815383911, 0.007983871936798096, 0.007974912166595459, 0.007927616119384766, 0.007936255931854248, 0.007895264148712158, 0.007829311847686768, 0.007851935863494874, 0.007886847972869874, 0.00788479995727539, 0.00791270399093628, 0.008018688201904297, 0.00789299201965332, 0.007819263935089112, 0.00801318359375, 0.00793833589553833, 0.007989888191223144, 0.00933244800567627, 0.008109760284423829, 0.008026432037353516, 0.008029824256896972, 0.007859807968139648, 0.008033056259155274, 0.007919616222381591, 0.007878655910491944, 0.00799887990951538, 0.007944799900054932, 0.007943391799926758, 0.007962751865386962, 0.00795305585861206, 0.007935008049011231, 0.008043135643005371, 0.00796729612350464, 0.007972640037536621, 0.007853119850158692, 0.00783580780029297, 0.00788156795501709, 0.007847871780395508, 0.00787056016921997, 0.007963808059692382, 0.007973536014556885, 0.007948512077331544, 0.007706624031066894, 0.009202783584594726, 0.008993696212768555, 0.007878655910491944, 0.007880576133728027, 0.007845568180084228, 0.007940544128417968, 0.008045727729797364, 0.007911776065826416, 0.007916031837463379, 0.007952383995056152, 0.007938111782073974, 0.00783353614807129, 0.007810080051422119, 0.0077424321174621585, 0.007781760215759277, 0.007858335971832275, 0.007844319820404053, 0.0077712640762329106, 0.007893951892852783, 0.007997151851654052, 0.007851391792297364, 0.007897408008575439, 0.007815807819366456, 0.007730783939361573, 0.007763391971588135, 0.007751840114593506, 0.007702943801879883, 0.007747903823852539, 0.00800767993927002, 0.007858176231384278, 0.007861951828002929, 0.0077325439453125, 0.007734464168548584, 0.007763775825500488, 0.007753983974456787, 0.007841472148895263, 0.00791164779663086, 0.007818719863891602, 0.00776639986038208, 0.007857855796813964, 0.00794655990600586, 0.007991615772247315, 0.008013440132141112, 0.007959712028503417, 0.007830431938171387, 0.007897088050842285, 0.007992479801177978, 0.007943007946014404, 0.007880256175994874, 0.008031871795654297, 0.007958879947662353, 0.00793452787399292, 0.008136608123779298, 0.0079617600440979, 0.007918432235717773, 0.007899104118347168, 0.007985472202301025, 0.007933599948883057, 0.007907264232635498, 0.007891071796417235, 0.00806486415863037, 0.007876512050628661, 0.007712224006652832, 0.00799014377593994, 0.007929855823516846, 0.007884736061096192, 0.007841856002807616, 0.008043583869934081, 0.007959551811218261, 0.007899263858795165, 0.007946047782897949, 0.008070560455322265, 0.007823008060455322, 0.007772831916809082, 0.007756063938140869, 0.007730271816253662, 0.007745600223541259, 0.007827744007110596, 0.007811647891998291, 0.007770112037658691, 0.007696383953094482, 0.007711808204650879, 0.007781311988830566, 0.00784115219116211, 0.007735487937927246, 0.007733823776245118, 0.007734208106994629, 0.007864319801330566, 0.008017087936401367, 0.007980000019073487, 0.007868607997894288, 0.007803487777709961, 0.007737184047698974, 0.007776415824890136, 0.0077636480331420895, 0.007913792133331298, 0.007881824016571046, 0.007696864128112793, 0.00766815996170044, 0.0077578239440917966, 0.007837215900421142, 0.007975103855133057, 0.008005599975585937, 0.008040767669677734, 0.007976960182189942, 0.007819263935089112, 0.007815167903900147, 0.007817440032958985, 0.007788320064544677, 0.007766016006469726, 0.007995391845703125, 0.008033823966979981, 0.007857728004455566, 0.007879295825958251, 0.007835936069488526, 0.007802591800689697, 0.00774777603149414, 0.007789792060852051, 0.007709568023681641, 0.007817215919494629, 0.007816287994384765, 0.007889472007751464, 0.007919167995452881, 0.007844639778137206, 0.007868703842163085, 0.007591807842254638, 0.007754623889923096, 0.007744639873504639, 0.007771008014678955, 0.00779414415359497, 0.007739264011383057, 0.007760064125061035, 0.007767712116241455, 0.00776035213470459, 0.00771673583984375, 0.007672607898712158, 0.007984320163726806, 0.008067392349243164, 0.008171104431152343, 0.00782099199295044, 0.007724192142486572, 0.007671135902404785, 0.00765331220626831, 0.007641568183898926, 0.007841856002807616, 0.007742656230926514, 0.007726175785064697, 0.00770198392868042, 0.0077224960327148436, 0.007676959991455078, 0.007798783779144287, 0.007876255989074706, 0.007725056171417236, 0.00765331220626831, 0.007670048236846924, 0.007685919761657715, 0.00771017599105835, 0.007690271854400635, 0.007635456085205078, 0.00770630407333374, 0.007819231986999511, 0.0076909117698669436, 0.007694015979766846, 0.008689663887023925, 0.008013152122497558, 0.007864992141723632, 0.007779967784881592, 0.0077268161773681645, 0.00773583984375, 0.007928063869476318, 0.007837567806243896, 0.007982624053955079, 0.007864799976348877, 0.007788703918457031, 0.007877888202667237, 0.007858784198760986, 0.007772160053253174, 0.007730207920074463, 0.007782879829406738, 0.007703167915344238, 0.007739264011383057, 0.007822591781616211, 0.007932672023773193, 0.00791964817047119, 0.00800326442718506, 0.007878943920135498, 0.007895296096801757, 0.007915264129638671, 0.007848608016967773, 0.008013664245605468, 0.007974143981933593, 0.007877535820007324, 0.007884479999542237, 0.007915840148925782, 0.007833600044250488, 0.008085568428039551, 0.007822656154632568, 0.007907616138458253, 0.008038751602172852, 0.008062975883483887, 0.008154879570007325, 0.00806716823577881, 0.008083616256713867, 0.008050687789916992, 0.008095104217529297, 0.008034175872802734, 0.007932640075683594, 0.007866015911102295, 0.007878431797027588, 0.007922111988067627, 0.007938208103179932, 0.007925343990325929, 0.00785964822769165, 0.007891583919525146, 0.00798960018157959, 0.007888895988464355, 0.007937535762786866, 0.007874752044677734, 0.007988959789276122, 0.00806272029876709, 0.007934815883636474, 0.00784825611114502, 0.00787772798538208, 0.007848383903503418, 0.007925151824951172, 0.007959296226501465, 0.007907328128814697, 0.007955743789672851, 0.007919936180114747, 0.00796675205230713, 0.007924032211303712, 0.007915584087371827, 0.007986432075500488, 0.007925759792327881, 0.008188672065734863, 0.008109888076782227, 0.008083647727966308, 0.007980447769165039, 0.007937664031982422, 0.008081824302673339, 0.007914048194885254, 0.00790835189819336, 0.007969791889190675, 0.007921664237976075, 0.00794326400756836, 0.008409888267517089, 0.008710271835327148, 0.00859337615966797, 0.008828831672668456, 0.008021120071411134, 0.007926144123077393, 0.007835487842559814, 0.00800972843170166, 0.007903232097625732, 0.00788646411895752, 0.0081692476272583, 0.008122400283813476, 0.008134688377380371, 0.008145088195800782, 0.008152928352355957, 0.008029791831970215, 0.008058879852294922, 0.008074111938476562, 0.008052767753601075, 0.008069120407104492, 0.007924736022949219, 0.007963295936584472, 0.007956064224243165, 0.00799564790725708, 0.007979519844055176, 0.007976960182189942, 0.007905055999755859, 0.007929215908050537, 0.007925695896148681, 0.007956863880157471, 0.007926208019256592, 0.007845344066619873, 0.007945184230804442, 0.008038271903991699, 0.008023839950561523, 0.007921664237976075, 0.007845888137817383, 0.007895040035247802, 0.008216768264770509, 0.00862611198425293, 0.010333727836608887, 0.008736448287963867, 0.008273599624633789, 0.008182463645935058, 0.007995071887969971, 0.007874207973480225, 0.007912384033203126, 0.007950335979461669, 0.008029696464538574, 0.00789350414276123, 0.0078089599609375, 0.007882815837860107, 0.007921664237976075, 0.008048543930053711, 0.008019712448120117, 0.007928160190582276, 0.007935999870300293, 0.00818995189666748, 0.007987199783325195, 0.008120320320129394, 0.008255423545837403, 0.007968671798706055, 0.008118304252624511, 0.00807744026184082, 0.008038399696350097, 0.007972991943359375, 0.007968639850616455, 0.008123552322387696, 0.007885663986206055, 0.007694752216339112, 0.007983232021331787, 0.007969056129455566, 0.007892320156097412, 0.007879327774047852, 0.007929855823516846, 0.007888895988464355, 0.007900288105010986, 0.007846784114837647, 0.007915520191192627, 0.007907296180725098, 0.007905312061309814, 0.007974048137664795, 0.007935935974121094, 0.007866943836212158, 0.007936223983764649, 0.00796889591217041, 0.00832521629333496, 0.008255392074584962, 0.007911520004272461, 0.008045984268188476, 0.00795084810256958, 0.007880703926086426, 0.008031392097473144, 0.007969632148742675, 0.007933407783508302, 0.007991968154907226, 0.007943999767303467, 0.007872288227081298, 0.00788643217086792, 0.007910143852233887, 0.007913280010223388, 0.007864736080169677, 0.0077974720001220705, 0.007767039775848389, 0.00780841588973999, 0.00777891206741333, 0.007997439861297608, 0.007809023857116699, 0.007765439987182618, 0.008036928176879883, 0.007915520191192627, 0.007911392211914062, 0.00789302396774292, 0.007806528091430664, 0.007724512100219727, 0.007776768207550049, 0.007921984195709228, 0.007909535884857178, 0.007942143917083741, 0.00789299201965332, 0.007895040035247802, 0.007861695766448974, 0.008198431968688965, 0.008155424118041993, 0.009861120223999023, 0.007967967987060547, 0.008206175804138184, 0.008639776229858398, 0.008227871894836427, 0.007985792160034179, 0.007995391845703125, 0.0079170560836792, 0.007851136207580567, 0.008256575584411622, 0.007801919937133789, 0.007785215854644776, 0.007671040058135986, 0.007734015941619873, 0.007862271785736084, 0.007921664237976075, 0.007863808155059814, 0.007795167922973633, 0.007787744045257569, 0.0078076162338256835, 0.007780511856079101, 0.007946271896362305, 0.00772435188293457, 0.007729856014251709, 0.007794655799865722, 0.00812390422821045, 0.007815711975097657, 0.007849984169006348, 0.007909183979034423, 0.007852223873138428, 0.007763999938964844, 0.007804480075836182, 0.007870495796203613, 0.007852416038513183, 0.008060928344726562, 0.007992832183837891, 0.008577312469482422, 0.008042719841003418, 0.008091648101806641, 0.008030176162719727, 0.008007712364196777, 0.00798470401763916, 0.008046879768371581, 0.008072575569152833, 0.008055583953857422, 0.008037759780883788, 0.00809670352935791, 0.008047840118408204, 0.008002016067504883, 0.007954432010650634, 0.008007935523986816, 0.008095487594604493, 0.008046591758728027, 0.007903232097625732, 0.007868512153625488, 0.007988255977630615, 0.007907296180725098, 0.007871615886688232, 0.00784771203994751, 0.00795577621459961, 0.007802847862243652, 0.007868288040161132, 0.007977663993835449, 0.007993023872375488, 0.007971295833587647, 0.008178815841674805, 0.00790396785736084, 0.007948383808135987, 0.007978655815124512, 0.008038816452026367, 0.008032032012939453, 0.007720992088317871, 0.008140095710754395, 0.008010623931884765, 0.007974912166595459, 0.00794332790374756, 0.007937183856964111, 0.007990975856781006, 0.007905280113220215, 0.007832736015319823, 0.007907648086547852, 0.007984960079193115, 0.007901919841766357, 0.007874559879302979, 0.007914815902709961, 0.008254143714904785, 0.007980063915252685, 0.00790831995010376, 0.00800972843170166, 0.007919616222381591, 0.007911424160003662, 0.007936031818389892, 0.007919136047363282, 0.007962048053741455, 0.007891520023345948, 0.007913023948669433, 0.007885695934295655, 0.007851871967315674, 0.007837696075439453, 0.007858335971832275, 0.007809023857116699, 0.007849984169006348, 0.007939295768737793, 0.00789958381652832, 0.007866496086120606, 0.007843103885650635, 0.007926720142364502, 0.00819388771057129, 0.008009535789489746, 0.007995520114898682, 0.007944159984588623, 0.007969024181365967, 0.008075039863586425, 0.008087871551513671, 0.008001567840576172, 0.007978816032409668, 0.0079269437789917, 0.008006560325622558, 0.00814419174194336, 0.008000255584716797, 0.007999584197998047, 0.008044672012329102, 0.00804588794708252, 0.007979423999786376, 0.007882751941680909, 0.00785814380645752, 0.007886879920959473, 0.007957568168640137, 0.007945151805877685, 0.007947455883026124, 0.007937983989715576, 0.007930751800537109, 0.007892896175384521, 0.007934048175811767, 0.007852128028869629, 0.008041215896606444, 0.008008928298950195, 0.0079585599899292, 0.007876351833343505, 0.007871488094329833, 0.007849567890167237, 0.007923903942108155, 0.00798137617111206, 0.00787446403503418, 0.007870463848114014, 0.00788479995727539, 0.007947648048400879, 0.007996032238006592, 0.008013407707214355, 0.00799555206298828, 0.007931359767913818, 0.007797855854034424, 0.007863999843597412, 0.00794540786743164, 0.007958784103393554, 0.007961376190185547, 0.008144672393798829, 0.007966432094573974, 0.00798748779296875, 0.007996960163116455, 0.007993824005126953, 0.007976960182189942, 0.007931903839111328, 0.007939551830291748, 0.007948095798492432, 0.007928256034851075, 0.007961120128631592, 0.007958271980285644, 0.008038687705993652, 0.008011136054992676, 0.007993567943572998, 0.008088864326477051, 0.008117088317871094, 0.008083456039428711, 0.008243200302124023, 0.008097727775573731, 0.007956543922424316, 0.007946144104003907, 0.007999584197998047, 0.00795472002029419, 0.007925631999969483, 0.008001376152038574, 0.008075263977050781, 0.007960927963256837, 0.007996319770812988, 0.007906047821044922, 0.007994815826416016, 0.008024640083312988, 0.007950560092926025, 0.0081692476272583, 0.008103967666625977, 0.007985375881195068, 0.007999231815338135, 0.008039872169494628, 0.00810041618347168, 0.008062975883483887, 0.008007776260375977, 0.007780735969543457, 0.007942016124725343, 0.007958655834197997, 0.007922848224639893, 0.007924575805664063, 0.00791257619857788, 0.007893887996673584, 0.007909567832946777, 0.007840672016143798, 0.007789279937744141, 0.007866559982299804, 0.00792745590209961, 0.007846240043640137, 0.007932928085327149, 0.007842495918273925, 0.007926080226898193, 0.00791756820678711, 0.00785923194885254, 0.007910367965698242, 0.007933343887329102, 0.007918176174163818, 0.007860223770141601, 0.007888512134552002, 0.007848095893859864, 0.007911327838897706, 0.007958847999572754, 0.007786335945129394, 0.007802527904510498, 0.007814911842346192, 0.007921760082244872, 0.007922336101531983, 0.008050687789916992, 0.00787177610397339, 0.007854656219482422, 0.007878335952758789, 0.007969247817993163, 0.007935711860656738, 0.007947968006134034, 0.008165823936462402, 0.007989408016204834, 0.00800153636932373, 0.00808499240875244, 0.008043264389038086, 0.008124159812927247, 0.008065247535705566, 0.008066207885742187, 0.008135295867919922, 0.00800767993927002, 0.007986623764038086, 0.007956511974334717, 0.007950623989105225, 0.008022080421447754, 0.008034496307373047, 0.008010944366455079, 0.007983744144439697, 0.007964863777160644, 0.007935999870300293, 0.008466431617736817, 0.007981279850006103, 0.00809552001953125, 0.008042495727539062, 0.008044639587402343, 0.008122048377990722, 0.007764224052429199, 0.008017663955688476, 0.007879776000976562, 0.00796992015838623, 0.008012960433959961, 0.007999743938446045, 0.00800108814239502, 0.00794707202911377, 0.00816438388824463, 0.0080348482131958, 0.007981152057647704, 0.008008031845092774, 0.00791974401473999, 0.00791539192199707, 0.008056832313537597, 0.007910848140716553, 0.007844287872314453, 0.007868192195892333, 0.00886614418029785, 0.007966464042663574, 0.00790553617477417, 0.007921664237976075, 0.007829760074615479, 0.007831007957458495, 0.007856416225433349, 0.00795801591873169, 0.007954336166381836, 0.007918047904968261, 0.007952703952789306, 0.007917376041412353, 0.00795030403137207, 0.008179295539855956, 0.007974431991577149, 0.008000415802001953, 0.007929855823516846, 0.008052224159240723, 0.008042048454284668, 0.008112128257751466, 0.008037343978881836, 0.008019583702087402, 0.008020416259765625, 0.008018912315368653, 0.008143808364868164, 0.008013312339782714, 0.007896607875823974, 0.007866335868835448, 0.007889920234680176, 0.007968768119812012, 0.008275967597961426, 0.007949376106262207, 0.007918335914611817, 0.00796675205230713, 0.007927968025207519, 0.007981056213378907, 0.008008000373840331, 0.007972256183624267, 0.007978271961212159, 0.00805344009399414, 0.008017215728759765, 0.007983168125152589, 0.00796729612350464, 0.008051072120666504, 0.008078656196594237, 0.007755807876586914, 0.008073151588439942, 0.008058752059936523, 0.00805081558227539, 0.008058048248291016, 0.008030112266540528, 0.00798419189453125, 0.00799119997024536, 0.007964288234710694, 0.008042880058288574, 0.008036352157592774, 0.008005087852478027, 0.007919775962829589, 0.007927584171295165, 0.007971424102783203, 0.007903232097625732, 0.007884479999542237, 0.007901504039764404, 0.008054719924926759, 0.007947968006134034, 0.007860127925872804, 0.007864704132080078, 0.007858272075653077, 0.00801587200164795, 0.008001376152038574, 0.00795465612411499, 0.008081343650817871, 0.00797926378250122, 0.007941887855529786, 0.008005472183227539, 0.007982751846313477, 0.007915711879730225, 0.008019295692443848, 0.008033408164978027, 0.007982207775115966, 0.008040191650390625, 0.007980000019073487, 0.007952383995056152, 0.007878176212310791, 0.00795900821685791, 0.007929855823516846, 0.007919616222381591, 0.007972320079803467, 0.008061471939086915, 0.007924992084503174, 0.007929952144622802, 0.00797763204574585, 0.007935232162475585, 0.007957248210906983, 0.008003583908081055, 0.007927807807922363, 0.007911680221557617, 0.007935743808746338, 0.007797823905944824, 0.00791648006439209, 0.007972864151000977, 0.00795580816268921, 0.007905983924865722, 0.007919583797454834, 0.007990816116333007, 0.007909952163696289, 0.00812390422821045, 0.007997568130493165]",tokens/s,125.76812769241886,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 890, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 822, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 373, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 115, in __init__ self.v_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 399871 has 14.73 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 3.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,787.853312,1843.265536,0.0,1440.743424,1427.743744,s,1,7.54443603515625,7.54443603515625,0.0,7.54443603515625,7.54443603515625,7.54443603515625,7.54443603515625,[7.54443603515625],,kWh,3.2332100208501896e-06,3.4972958111680056e-07,1.1000008799944005e-06,4.682940481961391e-06,,MB,1124.569088,1912.471552,0.0,1499.46368,1436.386304,s,11,0.45361641693115234,0.041237856084650214,0.0024021220387259287,0.04088310241699219,0.041723712921142575,0.045021360397338867,0.0476594783782959,"[0.04831900787353516, 0.04050124740600586, 0.04011996841430664, 0.040591487884521486, 0.04088310241699219, 0.03818435287475586, 0.04123065567016602, 0.04095676803588867, 0.041009441375732425, 0.04009667205810547, 0.041723712921142575]",tokens/s,6207.888195606021,kWh,1.5511524591052024e-06,1.710625973264168e-07,1.0220099298942023e-06,2.7442249863258215e-06,tokens/kWh,93286811.85967642,MB,1136.390144,1954.414592,0.0,1541.40672,1461.974016,s,11,9.952650024414064,0.904786365855824,0.005830624551077123,0.9053418579101562,0.9101365966796875,0.9131183471679687,0.9155037475585938,"[0.903692138671875, 0.9078826293945312, 0.907360107421875, 0.9101365966796875, 0.91610009765625, 0.9053418579101562, 0.8970089111328124, 0.90190234375, 0.8945010375976562, 0.9008506469726563, 0.9078736572265625]",tokens/s,69.62969644266163,kWh,2.5997112897706055e-05,2.8668803228604284e-06,1.3484971372652583e-05,4.2348964593219056e-05,tokens/kWh,1487639.6767936,,s,693,9.945051651954651,0.014350723884494444,0.0002664499954628789,0.014313471794128419,0.014521196937561034,0.01466167049407959,0.01537541095733643,"[0.014147871971130372, 0.014192799568176269, 0.014361696243286133, 0.014222080230712891, 0.01429094409942627, 0.014231552124023437, 0.01416198444366455, 0.014313407897949219, 0.014306559562683105, 0.014560000419616699, 0.014289183616638184, 0.014321663856506347, 0.014268128395080567, 0.014170111656188965, 0.014476672172546386, 0.014166496276855468, 0.01422704029083252, 0.014305855751037598, 0.014249664306640625, 0.01422777557373047, 0.014114336013793945, 0.01428451156616211, 0.014529151916503906, 0.014191007614135743, 0.014092000007629394, 0.014209247589111329, 0.014251808166503906, 0.014272607803344727, 0.01447107219696045, 0.014241791725158692, 0.014292672157287597, 0.014190912246704102, 0.014255583763122559, 0.014244383811950683, 0.01434000015258789, 0.015326496124267577, 0.014240511894226074, 0.014213184356689453, 0.014257632255554199, 0.014135519981384277, 0.014375231742858886, 0.014290047645568847, 0.01455123233795166, 0.014596863746643067, 0.014222720146179198, 0.014303296089172364, 0.014850560188293458, 0.014385408401489257, 0.014590784072875977, 0.01430412769317627, 0.01422060775756836, 0.014246912002563476, 0.014475008010864258, 0.014350336074829101, 0.014245823860168457, 0.014242976188659669, 0.014705504417419434, 0.014524479866027833, 0.014440320014953613, 0.01437504005432129, 0.014362624168395996, 0.014270463943481445, 0.01441817569732666, 0.01404304027557373, 0.014301504135131836, 0.014381600379943847, 0.014362079620361328, 0.014381279945373535, 0.014403327941894532, 0.014360639572143554, 0.014328319549560547, 0.014301535606384277, 0.014216863632202148, 0.01426636791229248, 0.014754079818725585, 0.014339808464050294, 0.014245887756347657, 0.014470527648925782, 0.014355072021484375, 0.014228639602661132, 0.014275775909423829, 0.014295743942260743, 0.014226592063903809, 0.01413094425201416, 0.014235936164855956, 0.014491423606872559, 0.01437673568725586, 0.014444767951965332, 0.014441951751708985, 0.014641599655151367, 0.014436639785766602, 0.014393152236938477, 0.014425472259521484, 0.014363264083862305, 0.014350367546081542, 0.014264479637145996, 0.01431942367553711, 0.014346240043640136, 0.014936063766479492, 0.014350336074829101, 0.01444863986968994, 0.01447321605682373, 0.01436201572418213, 0.014311936378479004, 0.014349984169006349, 0.014338496208190919, 0.014469247817993164, 0.014450559616088867, 0.01443785572052002, 0.014271007537841798, 0.01431510353088379, 0.014305695533752442, 0.014445823669433594, 0.014320192337036132, 0.01440726375579834, 0.014381759643554688, 0.014290176391601562, 0.014341983795166015, 0.01425708770751953, 0.01455023956298828, 0.014610848426818847, 0.014393600463867187, 0.014389247894287109, 0.014817503929138184, 0.01534329605102539, 0.01460643196105957, 0.01439577579498291, 0.014444031715393067, 0.014443039894104004, 0.014495360374450683, 0.014301535606384277, 0.014331968307495117, 0.014401408195495605, 0.014387136459350585, 0.014368608474731445, 0.014335904121398926, 0.014465408325195312, 0.014360608100891114, 0.014397407531738281, 0.014424063682556153, 0.014339424133300782, 0.014332575798034668, 0.014299136161804199, 0.015110431671142578, 0.014677184104919434, 0.014383456230163575, 0.014297344207763673, 0.014284319877624511, 0.014272512435913086, 0.014284704208374023, 0.014194879531860351, 0.014227456092834472, 0.014256447792053222, 0.014233599662780762, 0.014322815895080566, 0.014242815971374511, 0.01495468807220459, 0.01436854362487793, 0.014436256408691407, 0.014410911560058593, 0.01436348819732666, 0.014514399528503417, 0.01558028793334961, 0.014364704132080078, 0.014385791778564453, 0.014293279647827149, 0.014412575721740723, 0.01436956787109375, 0.014474431991577148, 0.01447225570678711, 0.014544447898864747, 0.014297439575195312, 0.014323936462402344, 0.014296319961547852, 0.014373408317565919, 0.014241536140441895, 0.014282912254333495, 0.014467167854309081, 0.014338047981262207, 0.01427244758605957, 0.014303296089172364, 0.014364864349365234, 0.01430025577545166, 0.01427734375, 0.014307583808898926, 0.01421491241455078, 0.014324992179870606, 0.014186400413513184, 0.0142457275390625, 0.013871007919311524, 0.014267871856689453, 0.01426249599456787, 0.014235679626464843, 0.015176032066345215, 0.015935392379760743, 0.015030112266540527, 0.014518719673156738, 0.014305248260498046, 0.014327008247375489, 0.01425049591064453, 0.014280863761901855, 0.014322688102722168, 0.015252479553222656, 0.014799936294555664, 0.014441408157348633, 0.014272512435913086, 0.014379008293151856, 0.014687487602233887, 0.014985983848571777, 0.01436627197265625, 0.014270912170410157, 0.014305343627929687, 0.014231200218200684, 0.014324000358581543, 0.01437286376953125, 0.014347904205322266, 0.014571871757507324, 0.01439782428741455, 0.014437439918518067, 0.014469728469848633, 0.014321439743041993, 0.014477184295654297, 0.014721055984497071, 0.014358528137207031, 0.014240063667297364, 0.01424182415008545, 0.014293215751647948, 0.014227199554443359, 0.014364416122436524, 0.014378623962402344, 0.014205792427062987, 0.014228832244873047, 0.01438969612121582, 0.014254079818725587, 0.0143657283782959, 0.014242400169372558, 0.014252415657043457, 0.014249567985534667, 0.014569343566894532, 0.01462326431274414, 0.014491423606872559, 0.014510304450988769, 0.01431980800628662, 0.014366144180297852, 0.014311264038085938, 0.014261823654174804, 0.014340224266052247, 0.014299839973449706, 0.014776127815246582, 0.014395808219909668, 0.014507391929626464, 0.014187359809875488, 0.014067296028137208, 0.014465951919555664, 0.014689791679382324, 0.014346752166748047, 0.014323712348937988, 0.014368032455444336, 0.01418336009979248, 0.0145033597946167, 0.014340448379516601, 0.014311424255371094, 0.014502112388610839, 0.014470944404602051, 0.014290431976318359, 0.01454745578765869, 0.017620704650878907, 0.016330432891845704, 0.014461536407470704, 0.014436575889587402, 0.01455452823638916, 0.014422495841979981, 0.014526368141174317, 0.014348544120788575, 0.014277888298034667, 0.014457344055175781, 0.014581760406494141, 0.014269696235656739, 0.014381024360656738, 0.014378975868225098, 0.014419072151184082, 0.014296544075012208, 0.014420191764831542, 0.014435680389404296, 0.014268832206726074, 0.01438096046447754, 0.014412128448486328, 0.015316991806030274, 0.01528831958770752, 0.01568489646911621, 0.014339103698730468, 0.014282239913940429, 0.014405823707580567, 0.014407679557800293, 0.014385151863098144, 0.014651328086853028, 0.014411840438842773, 0.014353599548339844, 0.014508864402770997, 0.01445302391052246, 0.014359999656677246, 0.014251359939575195, 0.014527615547180175, 0.014288991928100585, 0.014374688148498535, 0.014473312377929688, 0.014428000450134277, 0.014585503578186035, 0.014399519920349121, 0.014522687911987305, 0.014368895530700684, 0.014255999565124512, 0.01431065559387207, 0.014191200256347656, 0.014490816116333008, 0.013966400146484375, 0.014246848106384278, 0.014288512229919434, 0.01443411159515381, 0.014307519912719727, 0.015335871696472167, 0.014536640167236329, 0.01435807991027832, 0.014406399726867676, 0.014385120391845703, 0.014523327827453613, 0.01436131191253662, 0.014261440277099609, 0.014398048400878907, 0.014426400184631347, 0.01435212802886963, 0.01427023983001709, 0.014636960029602051, 0.015458432197570801, 0.014469568252563476, 0.015368191719055176, 0.014343999862670898, 0.014379232406616212, 0.014512160301208495, 0.014536864280700683, 0.01470412826538086, 0.01436297607421875, 0.014337984085083008, 0.014319135665893554, 0.014318047523498535, 0.014368767738342286, 0.014350336074829101, 0.014295295715332031, 0.014229248046875, 0.014167648315429687, 0.0142708797454834, 0.014187840461730957, 0.014276896476745606, 0.014430239677429199, 0.014440799713134765, 0.014256416320800781, 0.01417801570892334, 0.014112928390502929, 0.014138784408569336, 0.014208895683288574, 0.014160479545593262, 0.014122495651245117, 0.014175040245056152, 0.014177663803100586, 0.014108991622924805, 0.01411257553100586, 0.014268704414367676, 0.014241696357727051, 0.01427455997467041, 0.014170080184936524, 0.014501343727111817, 0.014183135986328125, 0.014157024383544922, 0.014139488220214843, 0.014322208404541016, 0.014361791610717774, 0.014344351768493652, 0.014332575798034668, 0.013952256202697754, 0.014191200256347656, 0.01426038360595703, 0.014295040130615234, 0.014280159950256348, 0.01421190357208252, 0.014094047546386719, 0.014217184066772462, 0.014088352203369141, 0.014482751846313476, 0.014017087936401367, 0.014075551986694336, 0.014116576194763183, 0.014129535675048828, 0.014313887596130372, 0.014227295875549316, 0.014392448425292968, 0.014248224258422851, 0.014170495986938477, 0.014321887969970703, 0.0142510404586792, 0.014285056114196778, 0.014265024185180664, 0.01419267177581787, 0.01416806411743164, 0.014186495780944825, 0.014159616470336913, 0.014416447639465332, 0.014378496170043945, 0.014263744354248048, 0.014439359664916992, 0.014204735755920411, 0.01422492790222168, 0.01419696044921875, 0.014155296325683594, 0.014184767723083497, 0.014139455795288086, 0.014111264228820801, 0.014265215873718262, 0.014231552124023437, 0.014340512275695801, 0.014246432304382325, 0.01416204833984375, 0.014274432182312011, 0.01414355182647705, 0.01419871997833252, 0.014153727531433105, 0.0141844482421875, 0.014056927680969238, 0.014172703742980956, 0.014036767959594727, 0.015012384414672852, 0.014532256126403808, 0.014306816101074218, 0.014264703750610352, 0.014243167877197266, 0.014305343627929687, 0.014145312309265136, 0.014105567932128906, 0.014161824226379394, 0.014110336303710938, 0.014182623863220214, 0.014145983695983886, 0.013936639785766602, 0.014381376266479493, 0.014275296211242676, 0.014281439781188964, 0.014289152145385743, 0.014247936248779297, 0.014194944381713866, 0.014169856071472168, 0.014266719818115234, 0.014355839729309082, 0.014425824165344239, 0.01456595230102539, 0.014362815856933595, 0.014358336448669434, 0.01417420768737793, 0.014182687759399414, 0.014140800476074218, 0.014179903984069825, 0.014436927795410157, 0.014188799858093262, 0.0143023042678833, 0.014236543655395508, 0.014402815818786621, 0.01422822380065918, 0.01429468822479248, 0.014350687980651856, 0.014206975936889648, 0.01423574447631836, 0.01405504035949707, 0.014319904327392578, 0.014232671737670899, 0.01437548828125, 0.014502240180969237, 0.014591520309448242, 0.014340031623840331, 0.014371359825134277, 0.014415007591247558, 0.014387231826782226, 0.014283455848693848, 0.01441702365875244, 0.01427558422088623, 0.014464415550231934, 0.014349087715148926, 0.014198592185974121, 0.01425222396850586, 0.014267680168151855, 0.014271007537841798, 0.014219264030456542, 0.014469120025634765, 0.014231616020202637, 0.01428048038482666, 0.014172320365905761, 0.01419264030456543, 0.014251328468322754, 0.014213600158691407, 0.015099200248718261, 0.014366687774658203, 0.0143635835647583, 0.014380255699157714, 0.014244640350341797, 0.014247936248779297, 0.014287967681884766, 0.014154879570007325, 0.013812255859375, 0.014210432052612305, 0.014207615852355956, 0.014072928428649902, 0.01400921630859375, 0.014059231758117676, 0.014055744171142578, 0.014110719680786133, 0.014202943801879883, 0.014274784088134766, 0.014075615882873534, 0.014121184349060059, 0.014090016365051269, 0.014212575912475586, 0.014506143569946289, 0.014366592407226562, 0.014213472366333009, 0.014153632164001465, 0.01430348777770996, 0.014221343994140625, 0.01420412826538086, 0.014395872116088866, 0.014301471710205078, 0.01421894359588623, 0.014034591674804687, 0.01403059196472168, 0.01410153579711914, 0.014266240119934081, 0.014178303718566895, 0.014346400260925292, 0.014243391990661622, 0.014199071884155274, 0.014149632453918457, 0.014133248329162598, 0.014155648231506347, 0.014090368270874024, 0.014155776023864745, 0.014170111656188965, 0.014155776023864745, 0.014075231552124024, 0.014098527908325196, 0.01425830364227295, 0.014088640213012696, 0.014010368347167968, 0.014202272415161133, 0.014118975639343261, 0.014149408340454101, 0.014158592224121094, 0.014056896209716797, 0.014109151840209961, 0.01409216022491455, 0.014121503829956054, 0.014083776473999023, 0.014233599662780762, 0.01406156826019287, 0.014425888061523437, 0.014364895820617675, 0.014272255897521973, 0.014248288154602051, 0.014411520004272461, 0.015034560203552247, 0.014144576072692872, 0.014199423789978027, 0.013813759803771973, 0.013997728347778321, 0.014057696342468262, 0.014058752059936524, 0.014473919868469238, 0.014147583961486816, 0.014182592391967773, 0.014127103805541993, 0.014162240028381347, 0.014138175964355468, 0.014123904228210448, 0.014139200210571289, 0.014133440017700196, 0.014313471794128419, 0.014106752395629883, 0.014286720275878906, 0.014199007987976074, 0.014485247611999512, 0.015196224212646485, 0.014410016059875488, 0.014233280181884765, 0.014131456375122071, 0.014443519592285157, 0.014318431854248047, 0.01423574447631836, 0.014299072265625, 0.01412287998199463, 0.01447935962677002, 0.01422969627380371, 0.014387200355529785, 0.014307135581970215, 0.014313471794128419, 0.014209280014038086, 0.01444540786743164, 0.014361184120178222, 0.014248255729675294, 0.014231616020202637, 0.01433529567718506, 0.014219903945922851, 0.014329536437988281, 0.014199104309082031, 0.01427455997467041, 0.014532608032226562, 0.014336000442504883, 0.014215231895446778, 0.014167136192321777, 0.014256992340087891, 0.014237664222717284, 0.014464863777160645, 0.014364864349365234, 0.014224831581115722, 0.014294655799865722, 0.014168288230895996, 0.01427353572845459, 0.01420787239074707, 0.014277471542358398, 0.014508319854736327, 0.01441318416595459, 0.014530943870544434, 0.014497759819030762, 0.014487168312072755, 0.014458304405212402, 0.014339008331298829, 0.013884639739990235, 0.014300064086914062, 0.014316864013671875, 0.014369343757629395, 0.014296287536621094, 0.014285728454589844, 0.014354304313659668, 0.01442313575744629, 0.01432646369934082, 0.014372384071350098, 0.014507840156555176, 0.014245920181274414, 0.014248127937316895, 0.014516896247863769, 0.014330975532531738, 0.014201631546020508, 0.014252479553222657, 0.014369471549987793, 0.014383872032165528, 0.01436246395111084, 0.014543104171752929, 0.014556672096252441, 0.01459267234802246, 0.01435632038116455, 0.014381216049194337, 0.014520352363586425, 0.01552995204925537, 0.014411775588989258, 0.014272512435913086, 0.014352000236511231, 0.014324095726013184, 0.014424063682556153, 0.014396767616271972, 0.014326432228088378, 0.014452735900878906, 0.014352383613586426, 0.014439935684204102, 0.01438912010192871, 0.01452012825012207, 0.014342975616455079, 0.014412991523742676, 0.014516480445861816, 0.014370752334594726, 0.01433459186553955, 0.014316608428955078, 0.014352383613586426, 0.014486240386962891, 0.014484767913818359, 0.014361536026000977, 0.014351679801940917, 0.014410079956054688, 0.014413408279418945, 0.014389375686645507, 0.014385408401489257, 0.014356863975524903, 0.014521408081054687, 0.014394304275512696, 0.014340096473693848, 0.014364671707153321, 0.014507488250732422, 0.01434883213043213, 0.014385343551635742, 0.014503104209899902]",tokens/s,69.68289600223387,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 890, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 822, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 373, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 252, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 116, in __init__ self.q_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 120.12 MiB is free. Process 407781 has 14.62 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 2.29 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 890, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 822, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 373, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 116, in __init__ self.q_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 120.12 MiB is free. Process 405915 has 14.62 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 2.29 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1036.30848,10982.653952,0.0,10580.13184,10162.029568,s,1,7.61414697265625,7.61414697265625,0.0,7.61414697265625,7.61414697265625,7.61414697265625,7.61414697265625,[7.61414697265625],,kWh,7.163204066713055e-06,7.828900619077374e-07,3.2891692980097087e-06,1.1235263426630502e-05,,MB,1309.315072,11102.191616,0.0,10689.183744,9358.065152,s,10,8.393236450195312,0.8393236450195312,0.002869803840091578,0.8395683593750001,0.842794677734375,0.8432475891113281,0.8436099182128907,"[0.8327422485351562, 0.8385807495117188, 0.8371638793945313, 0.8385592041015625, 0.8390723266601563, 0.8405418701171875, 0.8437005004882813, 0.8401172485351562, 0.8400643920898437, 0.8426940307617188]",tokens/s,305.00749206707127,kWh,2.4569125960071385e-05,2.7079142448515683e-06,1.6305892674333782e-05,4.358293287925674e-05,tokens/kWh,5873858.941738246,MB,1334.296576,11102.191616,0.0,10689.183744,9397.6704,s,10,30.3186630859375,3.03186630859375,0.007172578636009615,3.0303740234375,3.041907080078125,3.043003955078125,3.0438814550781252,"[3.028551513671875, 3.038030517578125, 3.041663330078125, 3.044100830078125, 3.0235029296875, 3.024722412109375, 3.032196533203125, 3.034852294921875, 3.02336669921875, 3.027676025390625]",tokens/s,20.77928034670528,kWh,8.83919314653455e-05,9.751782066847231e-06,5.856347277666863e-05,0.00015670718630886134,tokens/kWh,402023.6817718776,,s,630,30.314114082336435,0.048117641400534004,0.0003042071957772822,0.048080911636352544,0.04842684936523438,0.0485372314453125,0.04925775985717774,"[0.04887184143066406, 0.04817510223388672, 0.0478047981262207, 0.04794124984741211, 0.047957054138183595, 0.04804886245727539, 0.04795334243774414, 0.04819843292236328, 0.04800921630859375, 0.047915008544921874, 0.04781366348266602, 0.04789894485473633, 0.047930015563964846, 0.04802969741821289, 0.04807680130004883, 0.04808703994750976, 0.04802764892578125, 0.04814828872680664, 0.04790086364746094, 0.0480533447265625, 0.04801411056518555, 0.04831683349609375, 0.048053985595703126, 0.048039936065673826, 0.0478037109375, 0.04774982452392578, 0.04796422576904297, 0.047933536529541014, 0.0478185920715332, 0.048146430969238284, 0.047751167297363284, 0.04800627136230469, 0.04792364883422852, 0.04811001586914063, 0.048056320190429686, 0.048234046936035155, 0.04799686431884766, 0.04856387329101562, 0.047948638916015626, 0.04810249710083008, 0.04808995056152344, 0.0481280632019043, 0.0482193603515625, 0.04824111938476563, 0.04815289688110352, 0.048058113098144534, 0.04792755126953125, 0.04795596694946289, 0.047947200775146484, 0.048101089477539063, 0.048196449279785156, 0.04792057418823242, 0.04799737548828125, 0.048115840911865236, 0.047939487457275394, 0.04808240127563477, 0.04799756622314453, 0.048078304290771486, 0.0483210563659668, 0.048218017578125, 0.04836975860595703, 0.04835475158691406, 0.04831872177124023, 0.04921132659912109, 0.04842486572265625, 0.04827180862426758, 0.04840047836303711, 0.04776131057739258, 0.04799283218383789, 0.047892478942871096, 0.04786380767822265, 0.047803966522216794, 0.04808703994750976, 0.04790531158447266, 0.04810742568969727, 0.04793753433227539, 0.04807475280761719, 0.04787814331054688, 0.047800319671630856, 0.047796512603759764, 0.04860675048828125, 0.04790703964233398, 0.04839129638671875, 0.048341854095458985, 0.048334049224853515, 0.048184097290039064, 0.048205310821533204, 0.04801587295532227, 0.04822016143798828, 0.0481044807434082, 0.04800611114501953, 0.04807177734375, 0.04822454452514648, 0.048078655242919925, 0.048161598205566404, 0.04818531036376953, 0.04812748718261719, 0.04820419311523438, 0.0482347183227539, 0.048086944580078124, 0.04799283218383789, 0.048130046844482424, 0.04822844696044922, 0.048354942321777346, 0.048509056091308594, 0.04829964828491211, 0.04830057525634766, 0.04814956665039063, 0.04831929779052734, 0.04815679931640625, 0.048268383026123046, 0.048182304382324216, 0.04823855972290039, 0.048220062255859376, 0.04819507217407227, 0.04810121536254883, 0.048697120666503904, 0.04850179290771484, 0.04842684936523438, 0.0484448013305664, 0.04837235260009766, 0.04845481491088867, 0.048408607482910156, 0.048675647735595705, 0.04847206497192383, 0.048541694641113284, 0.049156383514404295, 0.0484431037902832, 0.04807680130004883, 0.048121856689453124, 0.04793135833740234, 0.048046112060546875, 0.04786796951293945, 0.04793251037597656, 0.047997886657714844, 0.04802912139892578, 0.04799097442626953, 0.04794771194458008, 0.0479661750793457, 0.0483900146484375, 0.04838246536254883, 0.04812908935546875, 0.04817401504516602, 0.04811577606201172, 0.048068191528320314, 0.04823484802246094, 0.04840857696533203, 0.04849868774414062, 0.04810870361328125, 0.04808176040649414, 0.048091136932373046, 0.048484352111816405, 0.048121856689453124, 0.04817510223388672, 0.04824838256835937, 0.04822060775756836, 0.048164257049560545, 0.04805299377441406, 0.04792303848266601, 0.04827360153198242, 0.04860860824584961, 0.04818991851806641, 0.04826278305053711, 0.04828412628173828, 0.0482520637512207, 0.04838809585571289, 0.048282623291015625, 0.04832640075683594, 0.04824019241333008, 0.04845395278930664, 0.04935692977905273, 0.048408641815185546, 0.048312255859375, 0.04827078247070313, 0.04832524871826172, 0.04848838424682617, 0.048354591369628906, 0.04838063812255859, 0.048230049133300784, 0.04834339141845703, 0.04833280181884766, 0.04825907135009765, 0.048279552459716796, 0.04842425537109375, 0.048427711486816405, 0.04837580871582031, 0.04859257507324219, 0.04853177642822266, 0.04834304046630859, 0.049330558776855465, 0.04842496109008789, 0.04808294296264649, 0.048502784729003906, 0.04787804794311523, 0.048066654205322266, 0.047923198699951174, 0.04797644805908203, 0.048205825805664064, 0.04801331329345703, 0.048213760375976564, 0.04796646499633789, 0.047951873779296876, 0.04808038330078125, 0.04802406311035156, 0.04800223922729492, 0.048007999420166016, 0.04806655883789063, 0.048320510864257815, 0.04843929672241211, 0.04842623901367187, 0.0483007698059082, 0.04808873748779297, 0.0480478401184082, 0.04824140930175781, 0.048403648376464846, 0.04807468795776367, 0.048124702453613284, 0.04840806579589844, 0.048114177703857425, 0.04828979110717774, 0.0481710090637207, 0.04806195068359375, 0.04818806457519531, 0.04814803314208985, 0.04820518493652344, 0.048126880645751956, 0.04818329620361328, 0.04850201416015625, 0.048425857543945315, 0.048494464874267576, 0.048492542266845705, 0.04850262451171875, 0.049275039672851566, 0.04822547149658203, 0.04819830322265625, 0.04823875045776367, 0.04846745681762695, 0.04821180725097656, 0.04907689666748047, 0.048263168334960936, 0.04826521682739258, 0.04842700958251953, 0.04836761474609375, 0.04965337753295898, 0.048242401123046875, 0.048761505126953125, 0.048267070770263674, 0.048336158752441405, 0.04832758331298828, 0.04850688171386719, 0.04858879852294922, 0.04842684936523438, 0.0492154541015625, 0.048656383514404294, 0.04799900817871094, 0.04787510299682617, 0.04785654449462891, 0.04773600006103516, 0.0478271369934082, 0.04777011108398437, 0.04804393768310547, 0.04785587310791015, 0.047830368041992186, 0.04767964935302734, 0.04795363235473633, 0.04804073715209961, 0.047896160125732425, 0.04766966247558594, 0.047981727600097654, 0.04774911880493164, 0.04772700881958008, 0.04786220932006836, 0.04835523223876953, 0.04848239898681641, 0.048189441680908204, 0.048097278594970705, 0.04791043090820313, 0.048024032592773436, 0.04806246566772461, 0.048032833099365235, 0.04793849563598633, 0.04791910552978516, 0.04777568054199219, 0.04784288024902344, 0.04778854370117187, 0.04811468887329102, 0.04767596817016601, 0.047707839965820314, 0.047938304901123045, 0.04792063903808594, 0.047642879486083985, 0.048060737609863284, 0.04792646408081055, 0.04797516632080078, 0.0478306884765625, 0.04799478530883789, 0.04798918533325195, 0.04798873519897461, 0.04789452743530274, 0.04785737609863281, 0.04804022216796875, 0.048091136932373046, 0.04802550506591797, 0.04791484832763672, 0.04792531204223633, 0.04790496063232422, 0.04797439956665039, 0.04781795120239258, 0.047846176147460937, 0.04805836868286133, 0.048092544555664064, 0.0483109130859375, 0.04846899032592773, 0.04818431854248047, 0.048217086791992186, 0.04906790542602539, 0.04822233581542969, 0.04792902374267578, 0.04770848083496094, 0.04765695953369141, 0.04764863967895508, 0.04761612701416015, 0.0476855697631836, 0.04777785491943359, 0.04771228790283203, 0.0476343994140625, 0.048481822967529294, 0.04783321762084961, 0.04780271911621094, 0.04756278228759766, 0.04758927917480469, 0.04757715225219727, 0.0477757453918457, 0.04803583908081055, 0.04807254409790039, 0.04812524795532227, 0.04816681671142578, 0.04785606384277344, 0.04761964797973633, 0.047586273193359375, 0.04781052780151367, 0.04794777679443359, 0.047712257385253906, 0.04786995315551758, 0.04823616027832031, 0.04808486557006836, 0.047921791076660156, 0.04801875305175781, 0.048126529693603516, 0.047896255493164064, 0.048062049865722656, 0.04796044921875, 0.047974143981933594, 0.047931999206542966, 0.04845916748046875, 0.04809142303466797, 0.04837401580810547, 0.04828303909301758, 0.04827817535400391, 0.04803936004638672, 0.048142913818359376, 0.04796211242675781, 0.04796790313720703, 0.047958015441894535, 0.0480687370300293, 0.048476383209228514, 0.04844543838500977, 0.048015102386474606, 0.04811711883544922, 0.04806870269775391, 0.04826806259155274, 0.04791446304321289, 0.04814009475708008, 0.04805295944213867, 0.04826521682739258, 0.04809318542480469, 0.04827151870727539, 0.048232288360595704, 0.049060577392578124, 0.04811718368530273, 0.047928062438964844, 0.0478513298034668, 0.04775321578979492, 0.04769177627563476, 0.04766304016113281, 0.047790145874023436, 0.047564254760742185, 0.048764671325683594, 0.04768438339233398, 0.047730049133300784, 0.04757487869262695, 0.04778268814086914, 0.04786806488037109, 0.04801257705688477, 0.04911980819702148, 0.047882015228271485, 0.048066783905029296, 0.04807702255249023, 0.04810044860839844, 0.04820038223266602, 0.04808863830566406, 0.04806399917602539, 0.04802761459350586, 0.048118751525878904, 0.04795391845703125, 0.047874046325683595, 0.04802463912963867, 0.047952190399169925, 0.049961406707763674, 0.04913520050048828, 0.04816307067871094, 0.04815289688110352, 0.04778192138671875, 0.04813030242919922, 0.04837472152709961, 0.04800390243530273, 0.047968128204345706, 0.0482116813659668, 0.04823081588745117, 0.04828364944458008, 0.048232673645019535, 0.048170528411865234, 0.04804403305053711, 0.0480340461730957, 0.04796211242675781, 0.04795337677001953, 0.04822441482543945, 0.04816934585571289, 0.048137439727783206, 0.04829872131347656, 0.048273342132568356, 0.04832876968383789, 0.0481014404296875, 0.04798195266723633, 0.047866497039794925, 0.048064769744873045, 0.04791270446777344, 0.048146430969238284, 0.04826931381225586, 0.0484167366027832, 0.04843955230712891, 0.0489697265625, 0.04820908737182617, 0.04771923065185547, 0.04796819305419922, 0.047693950653076175, 0.0478304328918457, 0.04782281494140625, 0.04774137496948242, 0.04784262466430664, 0.047976287841796875, 0.047879135131835934, 0.04813929748535156, 0.047825889587402345, 0.048025215148925784, 0.047906463623046874, 0.04788003158569336, 0.047951744079589846, 0.04814694213867188, 0.048003711700439454, 0.04840000152587891, 0.04821939086914063, 0.04811459350585937, 0.04825075149536133, 0.0480052490234375, 0.04789648056030273, 0.047993022918701174, 0.04780851364135742, 0.048189441680908204, 0.04841267013549805, 0.04802560043334961, 0.047983680725097656, 0.04807571029663086, 0.04800716781616211, 0.04798668670654297, 0.047905887603759766, 0.04828467178344727, 0.048072128295898436, 0.047881919860839846, 0.04808143997192383, 0.048309951782226565, 0.048414817810058595, 0.048568225860595705, 0.04830393600463867, 0.04819225692749023, 0.04912947082519531, 0.048395423889160155, 0.04818726348876953, 0.04824367904663086, 0.048345088958740234, 0.0485120964050293, 0.048293888092041014, 0.04837606430053711, 0.048424766540527346, 0.04832547378540039, 0.048261119842529294, 0.04832428741455078, 0.04817964935302734, 0.04826508712768555, 0.04838809585571289, 0.048683006286621096, 0.048376991271972654, 0.04828780746459961, 0.048440097808837894, 0.04927648162841797, 0.04829478454589844, 0.047941150665283205, 0.04790524673461914, 0.04775910568237305, 0.047853057861328124, 0.04799132919311523, 0.04781407928466797, 0.04773731231689453, 0.04799110412597656, 0.047939487457275394, 0.04804617691040039, 0.04791910552978516, 0.04767129516601563, 0.047736190795898435, 0.04789344024658203, 0.047728160858154296, 0.04787625503540039, 0.04763043212890625, 0.04788835144042969, 0.04792063903808594, 0.047975936889648435, 0.04775417709350586, 0.047900672912597655, 0.04781260681152344, 0.0478474235534668, 0.047718399047851565, 0.04767097473144531, 0.04761043167114258, 0.04780976104736328, 0.04780905532836914, 0.04795993423461914, 0.04769750213623047, 0.047925121307373045, 0.04761644744873047, 0.047769824981689454, 0.04786092758178711, 0.048032577514648435, 0.047857662200927735, 0.04783660888671875, 0.0478746223449707, 0.0479180793762207, 0.048075775146484374, 0.04798668670654297, 0.04772016143798828, 0.04770230484008789, 0.048025440216064454, 0.048244895935058596, 0.04810908889770508, 0.04818796920776367, 0.048130046844482424, 0.04823030471801758, 0.048379905700683595, 0.04823468780517578, 0.04815423965454101, 0.048312511444091794, 0.04869862365722656, 0.04822630310058594, 0.04816716766357422, 0.04827961730957031, 0.04830252838134766, 0.048336894989013675, 0.048363521575927736, 0.04938819122314453, 0.04828579330444336, 0.04794678497314453, 0.04803372955322266, 0.0476005744934082, 0.04782236862182617, 0.047710113525390625, 0.04794630432128906, 0.04794732666015625, 0.04803219223022461, 0.04784867095947266, 0.047791038513183594, 0.0479537582397461, 0.047857662200927735, 0.047728641510009766, 0.04809913635253906, 0.047937728881835937, 0.047908863067626956, 0.04793711853027344, 0.047722911834716795, 0.04783103942871094, 0.047965984344482425, 0.047736225128173826, 0.04752838516235352, 0.04783756637573242, 0.047638015747070314, 0.047600128173828124, 0.04834860610961914, 0.04791302490234375, 0.04791747283935547, 0.04784137725830078, 0.04792115020751953, 0.04791238403320312, 0.04788076782226563, 0.04786822509765625, 0.04840179061889648, 0.048009536743164063, 0.04801536178588867, 0.04799488067626953, 0.04810044860839844, 0.04835011291503906, 0.04833846282958985, 0.04823292922973633, 0.04832985687255859, 0.0482146224975586, 0.04809888076782227, 0.048228351593017575, 0.04807548904418945, 0.04819968032836914, 0.048172286987304684, 0.04795651245117188, 0.0483977279663086, 0.04804268646240235, 0.04824825668334961, 0.04827116775512695, 0.048239486694335934, 0.04832665634155273, 0.04840857696533203, 0.04814847946166992, 0.048176513671875, 0.04832729721069336, 0.04834918212890625, 0.04833017730712891]",tokens/s,20.782398531880286,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,785.16224,748.552192,0.0,346.03008,335.0016,s,1,7.66986865234375,7.66986865234375,0.0,7.66986865234375,7.66986865234375,7.66986865234375,7.66986865234375,[7.66986865234375],,kWh,2.309276374990077e-06,2.4622032047242234e-07,9.716674440052309e-07,3.5271641394677302e-06,,MB,1112.375296,771.620864,0.0,358.612992,302.626816,s,20,0.4996797466278077,0.024983987331390383,0.00021354121662508496,0.02491433620452881,0.02520182056427002,0.025335436534881593,0.025672707118988038,"[0.02575702476501465, 0.024903039932250976, 0.024983680725097657, 0.024847488403320312, 0.024896127700805664, 0.024768159866333007, 0.02490278434753418, 0.024962432861328127, 0.024825952529907228, 0.024889312744140624, 0.025313247680664064, 0.024968896865844727, 0.02493219184875488, 0.024952255249023437, 0.024907487869262696, 0.024921184539794923, 0.02486409568786621, 0.024868255615234376, 0.02502668762207031, 0.02518943977355957]",tokens/s,10246.56299270359,kWh,8.125829826851966e-07,8.961402839615508e-08,5.406154016278364e-07,1.442812412709188e-06,tokens/kWh,177431243.1366635,MB,1137.3568,786.300928,0.0,373.293056,302.629376,s,20,9.9093603515625,0.495468017578125,0.004718615036871436,0.4963743896484375,0.5007378173828125,0.5007910247802735,0.5014878924560547,"[0.4949632568359375, 0.49865304565429686, 0.4926600952148438, 0.5007369995117188, 0.49496697998046874, 0.4951472778320313, 0.5004926147460937, 0.49669610595703123, 0.49607171630859376, 0.4996105041503906, 0.4918559265136719, 0.4900807800292969, 0.5007451782226563, 0.4979345703125, 0.49667706298828124, 0.4928561706542969, 0.4848450012207031, 0.4848236389160156, 0.501662109375, 0.4978813171386719]",tokens/s,127.15250584275344,kWh,1.3437106397870037e-05,1.481894423975697e-06,5.704257319133389e-06,2.0623258140979125e-05,tokens/kWh,3054803.4442150937,,s,1260,9.897352099895489,0.007855041349123393,0.00018428820107350562,0.007847616195678711,0.008012829113006591,0.008072727918624878,0.00845911241531372,"[0.007912032127380371, 0.007809023857116699, 0.007774208068847656, 0.00809779167175293, 0.007892320156097412, 0.007920063972473145, 0.00784611177444458, 0.0076902399063110355, 0.007763904094696045, 0.007798655986785889, 0.00780291223526001, 0.007792799949645996, 0.007747583866119385, 0.007751679897308349, 0.007688191890716553, 0.007791935920715332, 0.0077034239768981935, 0.007591743946075439, 0.007614304065704345, 0.007610527992248535, 0.007669760227203369, 0.0076776638031005855, 0.007743775844573975, 0.007679232120513916, 0.0077630081176757815, 0.007652383804321289, 0.007751999855041504, 0.007759935855865478, 0.007700767993927002, 0.007649600028991699, 0.007648672103881836, 0.007696671962738037, 0.00771452808380127, 0.007991583824157714, 0.008040032386779784, 0.008015551567077638, 0.008018655776977539, 0.008036383628845215, 0.007978847980499268, 0.008030048370361328, 0.007995584011077882, 0.007970911979675293, 0.007875679969787597, 0.007924928188323974, 0.007956192016601563, 0.008037823677062989, 0.00802620792388916, 0.00807164764404297, 0.007855552196502686, 0.00887615966796875, 0.007917791843414306, 0.007878975868225098, 0.007782303810119629, 0.007864319801330566, 0.007861792087554931, 0.0077738242149353026, 0.008098719596862793, 0.0077454719543457035, 0.007763999938964844, 0.00780079984664917, 0.007913472175598145, 0.007772223949432373, 0.007813024044036864, 0.007523359775543213, 0.007960319995880127, 0.007766143798828125, 0.008121919631958009, 0.008097248077392578, 0.00973091220855713, 0.00787065601348877, 0.007929599761962891, 0.008769599914550782, 0.007911263942718506, 0.007911168098449706, 0.007800608158111572, 0.00781273603439331, 0.007901472091674805, 0.008134752273559571, 0.008104576110839843, 0.008181759834289551, 0.007876832008361817, 0.007861440181732177, 0.007821919918060303, 0.007847936153411865, 0.0078154878616333, 0.007871359825134277, 0.007883008003234864, 0.00783785581588745, 0.007748127937316895, 0.007857984066009521, 0.007745823860168457, 0.007726719856262207, 0.007815328121185303, 0.007754848003387451, 0.0076943678855896, 0.007801184177398682, 0.007815423965454102, 0.007704544067382813, 0.007783904075622559, 0.007776735782623291, 0.007760479927062989, 0.007818975925445556, 0.007823423862457275, 0.007821311950683594, 0.007890944004058837, 0.007875872135162354, 0.008082143783569336, 0.007871615886688232, 0.00792633581161499, 0.007877984046936036, 0.008036576271057128, 0.007886688232421874, 0.00794921588897705, 0.007940095901489258, 0.00785203218460083, 0.007848063945770263, 0.0077617278099060055, 0.007807007789611816, 0.007915264129638671, 0.0079170241355896, 0.007879168033599853, 0.007737184047698974, 0.007796512126922607, 0.007778079986572266, 0.007774623870849609, 0.00785859203338623, 0.007539904117584229, 0.007895616054534912, 0.007712959766387939, 0.007808351993560791, 0.007827936172485351, 0.007796544075012207, 0.007813695907592774, 0.007787839889526367, 0.007659488201141358, 0.007680287837982178, 0.007778751850128174, 0.007778304100036621, 0.00783903980255127, 0.007834112167358399, 0.007772480010986328, 0.007790463924407959, 0.007847008228302001, 0.0077519679069519044, 0.007727839946746826, 0.00789084815979004, 0.007810976028442383, 0.007744703769683838, 0.007779232025146484, 0.007808063983917236, 0.007825439929962158, 0.007852159976959228, 0.007870431900024413, 0.007807807922363281, 0.007858272075653077, 0.007913375854492187, 0.007832608222961425, 0.007829887866973877, 0.007758272171020508, 0.00787062406539917, 0.007772223949432373, 0.008019552230834961, 0.007743167877197266, 0.007637983798980713, 0.007660255908966064, 0.0077281279563903805, 0.007651296138763428, 0.007594272136688233, 0.007558271884918213, 0.007631455898284912, 0.007792031764984131, 0.007795711994171142, 0.007814176082611084, 0.007807456016540528, 0.007905375957489014, 0.00789683198928833, 0.00787507200241089, 0.008096832275390625, 0.00786412811279297, 0.007883552074432374, 0.007942240238189697, 0.00788259220123291, 0.007820640087127686, 0.007932735919952392, 0.007968768119812012, 0.007919968128204346, 0.007882400035858154, 0.007966720104217529, 0.007903232097625732, 0.007638976097106933, 0.008020031929016113, 0.007843071937561036, 0.007915775775909423, 0.007873248100280762, 0.007876351833343505, 0.00788809585571289, 0.007836480140686036, 0.00800153636932373, 0.00793836784362793, 0.007884704113006591, 0.008283488273620606, 0.008128959655761719, 0.007864319801330566, 0.007982143878936768, 0.008027071952819825, 0.007974912166595459, 0.008011039733886718, 0.007996384143829346, 0.007849792003631591, 0.007909440040588378, 0.007958399772644042, 0.007886847972869874, 0.007882751941680909, 0.007880256175994874, 0.007879104137420654, 0.007924895763397216, 0.007987391948699952, 0.007912320137023926, 0.008046367645263671, 0.007870431900024413, 0.007833119869232177, 0.007922111988067627, 0.007880767822265626, 0.007941279888153076, 0.00790172815322876, 0.008073792457580567, 0.007932991981506347, 0.007884607791900635, 0.007931039810180665, 0.00785584020614624, 0.007921664237976075, 0.007983200073242188, 0.008090911865234375, 0.007996223926544189, 0.007927616119384766, 0.00790937614440918, 0.007912767887115478, 0.007948031902313232, 0.007898047924041748, 0.007948287963867188, 0.00799948787689209, 0.007855455875396728, 0.007930208206176757, 0.007994751930236816, 0.007996416091918946, 0.00812880039215088, 0.00800649642944336, 0.007932735919952392, 0.008079360008239746, 0.00785100793838501, 0.00785100793838501, 0.007856128215789794, 0.007618720054626465, 0.007905280113220215, 0.007841440200805664, 0.00785852813720703, 0.007964735984802246, 0.007829440116882325, 0.007902880191802979, 0.007773983955383301, 0.007845664024353027, 0.007899775981903076, 0.007852479934692383, 0.007915040016174317, 0.007860383987426757, 0.007854080200195313, 0.007890272140502929, 0.007993919849395752, 0.007852384090423584, 0.007765439987182618, 0.007985504150390624, 0.007813119888305664, 0.007964672088623047, 0.007929855823516846, 0.007887199878692627, 0.00798684787750244, 0.00788479995727539, 0.007880224227905273, 0.007905759811401368, 0.007853792190551758, 0.0077990078926086425, 0.008042688369750977, 0.007976448059082031, 0.007838079929351807, 0.007966432094573974, 0.007776607990264892, 0.00785532808303833, 0.007770463943481445, 0.00775984001159668, 0.0077766718864440915, 0.007765664100646973, 0.007840288162231446, 0.0077576317787170414, 0.007727104187011719, 0.007771840095520019, 0.007805280208587647, 0.007786623954772949, 0.008025983810424804, 0.007944191932678223, 0.007911392211914062, 0.00802406406402588, 0.007893055915832519, 0.00777350378036499, 0.007766784191131592, 0.007753600120544433, 0.007826816082000733, 0.0077729601860046385, 0.007783936023712158, 0.00772540807723999, 0.007755775928497314, 0.007739391803741455, 0.007782400131225586, 0.007781504154205322, 0.00777510404586792, 0.007846975803375244, 0.007638976097106933, 0.00791590404510498, 0.007882880210876466, 0.0077760319709777835, 0.007768159866333008, 0.007821119785308838, 0.008417856216430665, 0.00785920000076294, 0.008098336219787597, 0.00783516788482666, 0.007783328056335449, 0.00783081579208374, 0.007754144191741944, 0.007830880165100097, 0.007812096118927002, 0.007790143966674805, 0.007690688133239746, 0.007743807792663574, 0.007716544151306152, 0.007719071865081787, 0.0077330880165100094, 0.007843200206756593, 0.007861184120178224, 0.007749311923980713, 0.007798048019409179, 0.007887167930603027, 0.0077992000579833985, 0.007778240203857422, 0.007761888027191162, 0.007868415832519531, 0.007725312232971192, 0.007786208152770996, 0.007708096027374268, 0.007910079956054687, 0.007731264114379883, 0.007753727912902832, 0.0078048639297485355, 0.007766208171844482, 0.007787519931793213, 0.007858975887298584, 0.007816768169403077, 0.007862592220306396, 0.007882688045501709, 0.00796889591217041, 0.0078787522315979, 0.007864511966705323, 0.007874591827392578, 0.007916927814483643, 0.00794159984588623, 0.008012736320495605, 0.007947936058044433, 0.007820672035217285, 0.007815839767456055, 0.007807295799255371, 0.008127743721008301, 0.007977312088012695, 0.007898623943328858, 0.007904160022735595, 0.00800767993927002, 0.007860223770141601, 0.007901311874389649, 0.007846943855285645, 0.007988096237182617, 0.007528319835662842, 0.00784009599685669, 0.007762176036834717, 0.00779423999786377, 0.00794262409210205, 0.007942431926727295, 0.00792086410522461, 0.007940832138061524, 0.007914463996887207, 0.007934783935546875, 0.007905280113220215, 0.007878655910491944, 0.007974815845489503, 0.008008831977844239, 0.00805782413482666, 0.008002911567687988, 0.008065695762634278, 0.007969823837280274, 0.007913695812225343, 0.007983871936798096, 0.007921472072601318, 0.007913663864135742, 0.007868768215179444, 0.007858176231384278, 0.008154784202575684, 0.008111136436462402, 0.008123519897460938, 0.008005696296691895, 0.00795743989944458, 0.007916384220123291, 0.007940095901489258, 0.008007391929626465, 0.008007904052734376, 0.007927743911743165, 0.00794432020187378, 0.007956480026245117, 0.007842144012451173, 0.007929120063781738, 0.007926144123077393, 0.00787660789489746, 0.008132608413696289, 0.007923711776733398, 0.007938176155090333, 0.007933087825775146, 0.00789363193511963, 0.007873695850372314, 0.008063936233520507, 0.007880703926086426, 0.007950496196746827, 0.007861792087554931, 0.00787395191192627, 0.008524224281311036, 0.00797654390335083, 0.00788374376296997, 0.007794591903686523, 0.00787436819076538, 0.007887040138244629, 0.00787062406539917, 0.007951488018035888, 0.007820000171661377, 0.007860223770141601, 0.007847936153411865, 0.0078123841285705565, 0.007558144092559814, 0.008112128257751466, 0.008206656455993652, 0.008101568222045899, 0.00792307186126709, 0.00786844778060913, 0.007844384193420411, 0.007984352111816407, 0.00800649642944336, 0.007911295890808105, 0.008005760192871093, 0.007993184089660644, 0.008101216316223145, 0.008044992446899414, 0.007969215869903564, 0.008028096199035644, 0.007915200233459473, 0.007821631908416749, 0.007815264225006103, 0.007954463958740235, 0.007825535774230957, 0.007808191776275635, 0.007867167949676514, 0.007870304107666015, 0.007847871780395508, 0.007847936153411865, 0.007802688121795655, 0.00780841588973999, 0.00786627197265625, 0.007979904174804688, 0.00787007999420166, 0.007825791835784912, 0.007815167903900147, 0.007765344142913818, 0.007840447902679444, 0.007849760055541992, 0.008014016151428223, 0.00790067195892334, 0.007870975971221925, 0.007774208068847656, 0.007776256084442139, 0.007714687824249267, 0.00781440019607544, 0.007875455856323241, 0.007813119888305664, 0.007800479888916015, 0.007778304100036621, 0.007708159923553467, 0.008053855895996094, 0.007976128101348876, 0.007840415954589845, 0.007727007865905761, 0.007792031764984131, 0.007872896194458008, 0.007878047943115234, 0.00787334394454956, 0.007849952220916749, 0.007777664184570312, 0.007842463970184326, 0.007783552169799804, 0.00771779203414917, 0.0077762241363525395, 0.007837183952331543, 0.007625311851501465, 0.00786182403564453, 0.007784128189086914, 0.007830560207366943, 0.007938015937805175, 0.007774335861206055, 0.007701951980590821, 0.007732992172241211, 0.007881279945373534, 0.007852159976959228, 0.00780083179473877, 0.007821311950683594, 0.00785203218460083, 0.0079301438331604, 0.00786198377609253, 0.008017919540405273, 0.007968768119812012, 0.00792729616165161, 0.007953023910522461, 0.00782153606414795, 0.007921311855316162, 0.008027615547180176, 0.008008159637451173, 0.007927743911743165, 0.00790499210357666, 0.007882495880126954, 0.007837728023529052, 0.007823840141296386, 0.00781654405593872, 0.007934783935546875, 0.007933663845062255, 0.007821824073791504, 0.007946271896362305, 0.007868159770965576, 0.00799129581451416, 0.007782400131225586, 0.007710048198699951, 0.007807551860809326, 0.007800447940826416, 0.00779311990737915, 0.007799903869628907, 0.00792790412902832, 0.007833631992340087, 0.007882976055145263, 0.007911903858184815, 0.007900991916656493, 0.007973152160644531, 0.007826528072357179, 0.00789792013168335, 0.007911488056182862, 0.007890975952148438, 0.007862336158752441, 0.007812096118927002, 0.007817823886871338, 0.007796927928924561, 0.007845183849334718, 0.007811935901641845, 0.007788544178009033, 0.007828479766845703, 0.007869184017181396, 0.00784000015258789, 0.008173631668090821, 0.007810336112976074, 0.007572735786437988, 0.00795904016494751, 0.007915775775909423, 0.007876927852630616, 0.007923583984375, 0.007920608043670655, 0.007994048118591309, 0.007884960174560546, 0.007997280120849609, 0.007834784030914307, 0.007827904224395752, 0.00790387201309204, 0.007834911823272705, 0.007875232219696045, 0.007886240005493164, 0.007971104145050048, 0.007891424179077148, 0.00779369592666626, 0.008973119735717773, 0.007890944004058837, 0.007907328128814697, 0.008025983810424804, 0.00791155195236206, 0.007804768085479736, 0.007915616035461426, 0.007906400203704833, 0.00793235206604004, 0.007899839878082276, 0.0077981438636779785, 0.007856736183166504, 0.007845759868621826, 0.007782464027404785, 0.008480704307556153, 0.007765056133270264, 0.007922624111175537, 0.00786198377609253, 0.00790342378616333, 0.007927072048187257, 0.007895999908447266, 0.007854015827178954, 0.00790665578842163, 0.007901535987854005, 0.007829343795776367, 0.00796623992919922, 0.007760000228881836, 0.007771135807037354, 0.007749087810516357, 0.007819519996643067, 0.007829535961151123, 0.007806975841522217, 0.007860223770141601, 0.007816703796386718, 0.007774400234222412, 0.007780416011810303, 0.0077396478652954105, 0.007786496162414551, 0.008013664245605468, 0.008169119834899902, 0.007813632011413574, 0.007753727912902832, 0.007761888027191162, 0.008132639884948731, 0.009338879585266113, 0.0076399679183959965, 0.007822783946990966, 0.007801407814025879, 0.007710847854614258, 0.0077675838470458985, 0.00784607982635498, 0.007768511772155762, 0.007787775993347168, 0.007815231800079345, 0.0077411518096923825, 0.007841536045074463, 0.007905824184417725, 0.007794623851776123, 0.007765759944915771, 0.007731200218200684, 0.00769916820526123, 0.00781824016571045, 0.007846015930175781, 0.007814015865325928, 0.007800320148468018, 0.0077807998657226565, 0.00782102394104004, 0.007768320083618164, 0.00786409616470337, 0.007753056049346924, 0.007758336067199707, 0.0077255678176879885, 0.007714655876159668, 0.007778016090393067, 0.007768095970153809, 0.007788095951080322, 0.007765952110290527, 0.0077604160308837895, 0.007674464225769043, 0.007691711902618408, 0.007661503791809082, 0.00771347188949585, 0.007715775966644287, 0.007727871894836426, 0.007765952110290527, 0.007798848152160645, 0.00786636781692505, 0.007818560123443603, 0.007765920162200928, 0.007801631927490235, 0.007745600223541259, 0.007819200038909912, 0.008019968032836914, 0.007970016002655029, 0.007920447826385499, 0.007892000198364257, 0.007795648097991943, 0.00782467222213745, 0.0077114558219909665, 0.0077740478515625, 0.007778463840484619, 0.007751679897308349, 0.008112128257751466, 0.007734432220458985, 0.007747615814208984, 0.00789299201965332, 0.00794707202911377, 0.008028160095214844, 0.0075435199737548825, 0.007761663913726806, 0.007724736213684082, 0.007786816120147705, 0.0077983360290527345, 0.007762335777282715, 0.007736512184143066, 0.007682559967041015, 0.00773360013961792, 0.007708672046661377, 0.0077346558570861815, 0.007695136070251465, 0.007749472141265869, 0.007772448062896728, 0.007687647819519043, 0.0076802558898925784, 0.007723040103912353, 0.0077127361297607425, 0.007610367774963379, 0.007651328086853027, 0.0077619199752807615, 0.007768064022064209, 0.007682047843933106, 0.007641088008880615, 0.007792640209197998, 0.007718688011169434, 0.007686016082763672, 0.007684447765350342, 0.007722976207733154, 0.007657248020172119, 0.007637216091156006, 0.00762883186340332, 0.007639039993286132, 0.00764521598815918, 0.0076269440650939945, 0.007644927978515625, 0.00763647985458374, 0.0076334400177001955, 0.007708703994750977, 0.008015775680541992, 0.007833951950073243, 0.00785974407196045, 0.00788908815383911, 0.007849472045898438, 0.007788415908813477, 0.007965312004089356, 0.007805984020233154, 0.007777503967285156, 0.007934048175811767, 0.007864223957061768, 0.007831456184387207, 0.007825247764587403, 0.007790847778320313, 0.007732992172241211, 0.007693696022033692, 0.0078089280128479, 0.0078076162338256835, 0.007797920227050782, 0.00789728021621704, 0.007908448219299317, 0.007804416179656983, 0.0087041597366333, 0.008143168449401855, 0.007565408229827881, 0.007749631881713868, 0.007770112037658691, 0.00783353614807129, 0.007931968212127686, 0.007970431804656982, 0.007901023864746094, 0.007891488075256348, 0.008159232139587403, 0.008050687789916992, 0.00811945629119873, 0.008043168067932129, 0.007987391948699952, 0.00801798439025879, 0.008140735626220703, 0.00797488021850586, 0.008031840324401856, 0.007839839935302734, 0.007901792049407958, 0.007992191791534424, 0.008022496223449707, 0.007963039875030518, 0.007908639907836914, 0.007838431835174561, 0.007929247856140137, 0.007979551792144775, 0.00802569580078125, 0.007920032024383544, 0.007875872135162354, 0.00788150405883789, 0.007985151767730713, 0.0080447998046875, 0.007970079898834229, 0.007912000179290772, 0.007891039848327636, 0.00787395191192627, 0.007931424140930175, 0.008040800094604492, 0.007932064056396484, 0.007921855926513672, 0.008069375991821288, 0.00802400016784668, 0.008077280044555664, 0.007870207786560059, 0.0079301438331604, 0.00792406415939331, 0.00787011194229126, 0.007904640197753906, 0.007903647899627686, 0.007905504226684571, 0.00802406406402588, 0.008015423774719239, 0.007980480194091796, 0.008043199539184571, 0.007921472072601318, 0.007950016021728516, 0.007908160209655762, 0.007938079833984375, 0.007839263916015625, 0.007761343955993653, 0.007856959819793702, 0.007854112148284912, 0.007829055786132813, 0.007643328189849853, 0.007958591938018799, 0.007950079917907716, 0.007847936153411865, 0.007847936153411865, 0.007829504013061523, 0.007815167903900147, 0.007878367900848388, 0.007911104202270508, 0.007848447799682617, 0.007960671901702882, 0.008157183647155761, 0.007812704086303711, 0.007884384155273438, 0.00787334394454956, 0.007943200111389161, 0.007965663909912109, 0.00790118408203125, 0.007921664237976075, 0.008068639755249024, 0.007991519927978516, 0.007999743938446045, 0.008017919540405273, 0.007950335979461669, 0.007935008049011231, 0.00800438404083252, 0.007869984149932861, 0.007877600193023682, 0.007917247772216796, 0.008013824462890624, 0.007843200206756593, 0.007883359909057617, 0.007886208057403565, 0.008290975570678712, 0.007873760223388673, 0.007842463970184326, 0.007942336082458496, 0.007819200038909912, 0.007960800170898438, 0.007873568058013916, 0.00783846378326416, 0.007900479793548585, 0.007871327877044677, 0.007856063842773438, 0.007839168071746826, 0.00784441614151001, 0.007880608081817626, 0.007802624225616455, 0.00783788776397705, 0.007853184223175049, 0.007912384033203126, 0.007823359966278077, 0.00786636781692505, 0.0077816638946533205, 0.007837984085083008, 0.00796239995956421, 0.007890944004058837, 0.007844351768493652, 0.007784671783447265, 0.007815455913543701, 0.007889887809753419, 0.007858880043029785, 0.007739007949829102, 0.007745408058166504, 0.008015616416931153, 0.007811583995819092, 0.007830336093902587, 0.007773056030273438, 0.0081430082321167, 0.009371616363525391, 0.007842688083648682, 0.007898111820220948, 0.007896543979644776, 0.007753983974456787, 0.0077110080718994145, 0.007798975944519043, 0.0077432641983032225, 0.007802559852600098, 0.007892864227294922, 0.007774752140045166, 0.007765920162200928, 0.007766047954559326, 0.007896512031555177, 0.00783622407913208, 0.007790592193603516, 0.007984127998352051, 0.007927135944366455, 0.007882400035858154, 0.008056832313537597, 0.007931327819824219, 0.007909952163696289, 0.007970816135406494, 0.007875999927520753, 0.007793248176574707, 0.007831071853637695, 0.007827744007110596, 0.007875904083251953, 0.007809919834136963, 0.007825695991516113, 0.007735007762908935, 0.007708479881286621, 0.007869728088378906, 0.007953536033630372, 0.007890528202056884, 0.007960991859436035, 0.007867487907409668, 0.007889599800109863, 0.007866687774658203, 0.00782742404937744, 0.007800479888916015, 0.007890495777130127, 0.007838208198547364, 0.007825407981872558, 0.007794688224792481, 0.00786243200302124, 0.007894944190979004, 0.007800767898559571, 0.007847616195678711, 0.007787072181701661, 0.007745503902435303, 0.007747424125671387, 0.007772287845611572, 0.007868224143981934, 0.007846015930175781, 0.008023584365844726, 0.007784639835357666, 0.007600319862365723, 0.00810422420501709, 0.0077563199996948246, 0.007809023857116699, 0.007718400001525879, 0.007705088138580322, 0.007796735763549805, 0.008148159980773926, 0.007783552169799804, 0.007708191871643066, 0.007714975833892823, 0.0077190399169921875, 0.007749631881713868, 0.007716671943664551, 0.007639296054840088, 0.007696479797363281, 0.0077127361297607425, 0.007665408134460449, 0.007999072074890137, 0.007763711929321289, 0.007736032009124756, 0.007712831974029541, 0.007636864185333252, 0.007681951999664306, 0.007802112102508545, 0.00767903995513916, 0.007780032157897949, 0.009535231590270997, 0.0099782075881958, 0.008072671890258789, 0.008047136306762695, 0.007948224067687987, 0.007978240013122558, 0.007897215843200684, 0.0077955198287963864, 0.007761792182922363, 0.007778304100036621, 0.007723135948181152, 0.007655295848846435, 0.007600319862365723, 0.007710527896881103, 0.007714816093444824, 0.007665664196014404, 0.007792160034179687, 0.007646687984466553, 0.007668032169342041, 0.007744192123413086, 0.0076984319686889645, 0.007648255825042725, 0.0076687359809875484, 0.007704576015472412, 0.007679999828338623, 0.007669760227203369, 0.0077571840286254886, 0.007661312103271484, 0.007601280212402344, 0.0077719039916992185, 0.00769158411026001, 0.00773363208770752, 0.007697055816650391, 0.007745408058166504, 0.007667488098144532, 0.0076752638816833495, 0.007528448104858398, 0.007749631881713868, 0.007696383953094482, 0.0077571840286254886, 0.00772160005569458, 0.007671807765960693, 0.007663616180419922, 0.0076228160858154296, 0.007642975807189941, 0.0075980801582336424, 0.0076953921318054196, 0.007600736141204834, 0.007524288177490234, 0.007921984195709228, 0.007870143890380859, 0.007799232006072998, 0.007798719882965088, 0.008345919609069824, 0.007706367969512939, 0.007648896217346191, 0.007729536056518555, 0.00769209623336792, 0.00762284803390503, 0.007725056171417236, 0.007612256050109864, 0.007587168216705322, 0.007643968105316162, 0.007651552200317383, 0.007641183853149414, 0.00766534423828125, 0.007583744049072265, 0.0076097922325134276, 0.007582015991210937, 0.007547135829925537, 0.0076117758750915525, 0.007588064193725586, 0.007625120162963867, 0.007706463813781738, 0.0076269121170043945, 0.007610591888427734, 0.007616511821746826, 0.008064800262451172, 0.008370304107666015, 0.0076975998878479005, 0.007610400199890136, 0.007625376224517822, 0.0076241598129272465, 0.007741983890533447, 0.007725183963775635, 0.00762662410736084, 0.007538112163543701, 0.007637152194976807, 0.007706528186798096, 0.007652063846588134, 0.007683839797973633, 0.007769408226013183, 0.007737728118896484, 0.007729599952697754, 0.0076342720985412595, 0.007588128089904785, 0.007532832145690918, 0.007581696033477783, 0.007583744049072265, 0.007334559917449951, 0.007618559837341309, 0.0075790400505065915, 0.007696991920471192, 0.007673376083374024, 0.007609983921051025, 0.007572288036346435, 0.007600160121917725, 0.007608320236206055, 0.00760422420501709, 0.007553023815155029, 0.007589727878570557, 0.007493504047393799, 0.007535136222839355, 0.007479135990142822, 0.007502048015594482, 0.007503551959991455, 0.007542784214019775, 0.00760211181640625, 0.007615968227386475, 0.0075346240997314455, 0.007592639923095703, 0.007622015953063965, 0.0075985918045043946, 0.007613759994506836, 0.007737311840057373, 0.0077933440208435055, 0.007622655868530274, 0.007607776165008545, 0.007577280044555664, 0.007677824020385742, 0.007742144107818603, 0.00781273603439331, 0.007922368049621582, 0.007923840045928955, 0.007849855899810791, 0.007827231884002686, 0.007701951980590821, 0.007668511867523193, 0.007660960197448731, 0.0077497920989990235, 0.0076804480552673336, 0.007677311897277832, 0.0076542081832885744, 0.007634751796722412, 0.007620607852935791, 0.007594240188598633, 0.007608384132385254, 0.007628191947937011, 0.007803167819976807, 0.007726111888885498, 0.007665919780731201, 0.007731455802917481, 0.0078220157623291, 0.008078911781311034, 0.007889120101928711, 0.007858176231384278, 0.007787936210632325, 0.0077768640518188475, 0.00790937614440918, 0.007903232097625732, 0.007954271793365478, 0.008060447692871094, 0.007646527767181397, 0.007805952072143555, 0.007847616195678711, 0.00785203218460083, 0.007993343830108643, 0.008052736282348634, 0.00793996810913086, 0.007916800022125244, 0.007973631858825684, 0.007870751857757569, 0.00798691177368164, 0.007861408233642578, 0.00785433578491211, 0.007863264083862304, 0.007894239902496337, 0.007963391780853271, 0.007917344093322754, 0.007847936153411865, 0.007970623970031739, 0.008032447814941406, 0.007985151767730713, 0.007857728004455566, 0.008460736274719239, 0.008368127822875977, 0.00845798397064209, 0.0085731840133667, 0.008060416221618653, 0.007956064224243165, 0.007922048091888428, 0.00790992021560669, 0.007886847972869874, 0.007813375949859619, 0.007853600025177002, 0.007876832008361817, 0.007854080200195313, 0.007942143917083741, 0.00809500789642334, 0.007829504013061523, 0.007773183822631836, 0.007913087844848633, 0.0079300799369812, 0.007968639850616455, 0.007962624073028564, 0.007931327819824219, 0.007890528202056884, 0.0077629761695861815, 0.007862495899200439, 0.008031904220581055, 0.00788486385345459, 0.00785644817352295, 0.008041760444641114, 0.00804700756072998, 0.007983104228973388, 0.007843743801116944, 0.007911520004272461, 0.007927807807922363, 0.007837567806243896, 0.008030336380004882, 0.007890751838684082, 0.007860095977783203, 0.007893311977386474, 0.007886335849761963, 0.008186623573303223, 0.007942143917083741, 0.007976960182189942, 0.007976640224456787, 0.007862112045288087, 0.007839712142944336, 0.007812928199768067, 0.007857952117919922, 0.007890912055969238, 0.007893824100494384, 0.007810944080352783, 0.007861792087554931, 0.00810371208190918, 0.00794924783706665, 0.007942143917083741, 0.00789084815979004, 0.007949567794799805, 0.007897952079772948, 0.007868192195892333, 0.00802019214630127, 0.008097760200500488, 0.00825551986694336, 0.007954432010650634, 0.008010751724243164, 0.008032832145690918, 0.007827744007110596, 0.00793936014175415, 0.007945087909698487, 0.008052096366882325, 0.007918208122253419, 0.007827455997467042, 0.007864319801330566, 0.007925888061523438, 0.00797273588180542, 0.007892255783081054, 0.007792768001556397, 0.007760479927062989, 0.007883071899414062, 0.007793951988220215, 0.007860864162445068, 0.007913248062133789, 0.007825151920318603, 0.007913695812225343, 0.007816927909851074, 0.007784768104553223, 0.00783564805984497, 0.007808320045471192, 0.007856383800506591, 0.007794655799865722, 0.007738111972808838, 0.007796544075012207, 0.007814271926879883, 0.007791488170623779, 0.00784988784790039, 0.007870495796203613, 0.007818624019622803, 0.007770815849304199, 0.007776159763336182, 0.007826623916625976, 0.007885983943939209, 0.007876255989074706, 0.008011775970458984, 0.007974912166595459, 0.007966720104217529]",tokens/s,127.30677733626418,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 890, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 822, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 373, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 252, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 115, in __init__ self.v_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 402135 has 14.73 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 3.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,803.684352,748.552192,0.0,346.03008,335.0016,s,1,8.831220703125,8.831220703125,0.0,8.831220703125,8.831220703125,8.831220703125,8.831220703125,[8.831220703125],,kWh,2.6138623875027633e-06,2.812471314925308e-07,0.0,2.8951095189952942e-06,,MB,1163.767808,771.620864,0.0,358.612992,327.706112,s,20,0.5020568294525146,0.02510284147262573,0.00026341063754496864,0.02505067253112793,0.025128611373901367,0.025195318126678468,0.02600746114730835,"[0.02621049690246582, 0.025043872833251952, 0.025108959197998045, 0.02504550361633301, 0.02504972839355469, 0.02512713623046875, 0.02512575912475586, 0.024952896118164064, 0.025141887664794922, 0.0249300479888916, 0.02497283172607422, 0.024931808471679688, 0.02508598327636719, 0.0249149112701416, 0.02508518409729004, 0.02504003143310547, 0.025101055145263673, 0.025051616668701173, 0.02501753616333008, 0.025119583129882814]",tokens/s,10198.048706126123,kWh,8.25900316737282e-07,9.108229740512333e-08,5.459444103898285e-07,1.4629270245322338e-06,tokens/kWh,174991640.53097945,MB,1188.749312,788.39808,0.0,375.390208,327.708672,s,20,9.763895416259766,0.4881947708129883,0.007884737248814215,0.48658520507812497,0.498979541015625,0.4994778106689453,0.5002357232666016,"[0.4954401550292969, 0.4989297180175781, 0.4940458679199219, 0.49942794799804685, 0.48962954711914064, 0.4829385986328125, 0.48203961181640625, 0.4795682067871094, 0.48015435791015626, 0.47783123779296877, 0.4829739074707031, 0.47832363891601565, 0.47899237060546873, 0.48354086303710936, 0.48253067016601564, 0.4917558898925781, 0.49796942138671874, 0.4905362548828125, 0.5004252014160157, 0.4968419494628906]",tokens/s,129.046855407907,kWh,1.4007331631179186e-05,1.544754982986629e-06,6.062894210010166e-06,2.1614980824175988e-05,tokens/kWh,2914645.1950369338,,s,1260,9.753908683776846,0.00774119736807687,0.0002430830777126046,0.0076822080612182625,0.008014173221588136,0.008101905298233032,0.00835099105834961,"[0.007770656108856201, 0.008018143653869629, 0.008246912002563476, 0.00786243200302124, 0.007858047962188721, 0.007778079986572266, 0.0077274560928344725, 0.007883808135986329, 0.007734240055084229, 0.0077959361076354984, 0.007803679943084717, 0.007786496162414551, 0.007690080165863037, 0.007677152156829834, 0.007822175979614258, 0.007745664119720459, 0.0077127361297607425, 0.007706624031066894, 0.007745535850524903, 0.007700064182281494, 0.007717279911041259, 0.007739552021026611, 0.007693151950836182, 0.007672128200531006, 0.007787487983703613, 0.007796448230743408, 0.00770201587677002, 0.007796576023101806, 0.007760543823242188, 0.007723040103912353, 0.007776415824890136, 0.007754975795745849, 0.007733344078063965, 0.007879168033599853, 0.008265215873718262, 0.008178175926208496, 0.007912767887115478, 0.008228575706481934, 0.008039584159851074, 0.008045472145080567, 0.008051615715026855, 0.008084608078002929, 0.008159584045410156, 0.008220640182495118, 0.008161503791809082, 0.008110015869140626, 0.008016287803649902, 0.007952383995056152, 0.008021568298339844, 0.007905824184417725, 0.007862175941467285, 0.00796889591217041, 0.007844992160797119, 0.007823200225830078, 0.007713696002960205, 0.007806975841522217, 0.007753471851348877, 0.007682047843933106, 0.007708928108215332, 0.007732831954956055, 0.007741856098175049, 0.007653632164001465, 0.007673600196838379, 0.00773529577255249, 0.007982751846313477, 0.007886816024780273, 0.007909759998321534, 0.007868415832519531, 0.007854080200195313, 0.007749631881713868, 0.007777919769287109, 0.007727551937103272, 0.007810656070709228, 0.007810751914978028, 0.007907999992370605, 0.007730432033538818, 0.007848959922790527, 0.007892735958099365, 0.007839776039123536, 0.007877984046936036, 0.007993984222412109, 0.00798912000656128, 0.008155263900756837, 0.008167424201965333, 0.008146112442016601, 0.008065664291381835, 0.008036224365234376, 0.008098112106323243, 0.008171775817871093, 0.008037599563598632, 0.008083807945251464, 0.008099712371826173, 0.008031904220581055, 0.00823318386077881, 0.008182208061218261, 0.008049792289733886, 0.007981728076934815, 0.007878880023956298, 0.007906816005706786, 0.00803059196472168, 0.008024191856384278, 0.007933951854705811, 0.008030207633972167, 0.00808944034576416, 0.008056991577148438, 0.008005632400512695, 0.007851903915405273, 0.0077695040702819825, 0.007786719799041748, 0.007811583995819092, 0.007893152236938476, 0.007677375793457031, 0.00782099199295044, 0.007770751953125, 0.007700575828552246, 0.007773407936096191, 0.0077907519340515135, 0.007672095775604248, 0.007743552207946778, 0.007766304016113282, 0.007852128028869629, 0.007778207778930664, 0.007836991786956788, 0.00783839988708496, 0.007786496162414551, 0.007765024185180664, 0.00809660816192627, 0.008529919624328614, 0.0080480318069458, 0.008033184051513672, 0.00802783966064453, 0.008156543731689453, 0.008094335556030273, 0.008023551940917969, 0.008015935897827148, 0.00792416000366211, 0.007919424057006836, 0.00790342378616333, 0.007847936153411865, 0.007954271793365478, 0.007956639766693115, 0.007994847774505615, 0.007980639934539796, 0.008342240333557129, 0.007881152153015136, 0.007861120223999023, 0.0078471360206604, 0.007959487915039063, 0.007918111801147461, 0.007868639945983887, 0.007857600212097167, 0.007787072181701661, 0.007699967861175537, 0.007802944183349609, 0.007656064033508301, 0.007630720138549805, 0.0077025599479675294, 0.007631999969482422, 0.007576352119445801, 0.007784768104553223, 0.007646783828735352, 0.007657599925994873, 0.007691904067993164, 0.00770633602142334, 0.007753376007080078, 0.0077547521591186525, 0.007668992042541504, 0.0077259521484375, 0.007720736026763916, 0.00764463996887207, 0.007746240139007568, 0.007658976078033447, 0.0076436161994934086, 0.007745151996612549, 0.007813504219055176, 0.007721119880676269, 0.007714719772338867, 0.007874495983123779, 0.007865983963012696, 0.007852416038513183, 0.007792640209197998, 0.007769536018371582, 0.0077274880409240725, 0.007675519943237305, 0.007681856155395508, 0.007694752216339112, 0.007720352172851563, 0.0077641921043396, 0.007776991844177246, 0.007685696125030518, 0.007918015956878662, 0.007831552028656007, 0.007796735763549805, 0.007869535923004151, 0.007921567916870117, 0.008004608154296875, 0.008024031639099122, 0.007769536018371582, 0.007860991954803467, 0.007812960147857666, 0.007798943996429443, 0.007821152210235596, 0.0078373122215271, 0.007826943874359131, 0.007783423900604248, 0.007845759868621826, 0.007798880100250244, 0.007779232025146484, 0.00782966423034668, 0.007934751987457276, 0.007853536128997803, 0.00787721586227417, 0.007952032089233398, 0.007993087768554688, 0.007982912063598633, 0.007916255950927735, 0.008175680160522461, 0.008144288063049317, 0.007988895893096924, 0.008022272109985351, 0.00801471996307373, 0.007948095798492432, 0.008048480033874511, 0.008390527725219726, 0.008263168334960937, 0.008011872291564942, 0.008007455825805664, 0.008022944450378417, 0.008044544219970704, 0.007952383995056152, 0.007962143898010254, 0.007995903968811035, 0.008000800132751465, 0.008149727821350098, 0.008119711875915528, 0.008024224281311035, 0.007921792030334473, 0.007984864234924316, 0.008026047706604004, 0.00789363193511963, 0.008080960273742676, 0.007909823894500732, 0.00790233612060547, 0.0077341442108154294, 0.007733248233795166, 0.007750815868377685, 0.00797388792037964, 0.007673408031463623, 0.007689824104309082, 0.007585792064666748, 0.0076109118461608885, 0.00782758378982544, 0.007378943920135498, 0.007603583812713623, 0.007570047855377197, 0.007702527999877929, 0.007727104187011719, 0.007728864192962646, 0.008161151885986328, 0.00815548801422119, 0.008044032096862793, 0.007864895820617676, 0.007804448127746582, 0.007817599773406982, 0.007733344078063965, 0.00770198392868042, 0.0077320637702941895, 0.00768393611907959, 0.0076388797760009765, 0.007603231906890869, 0.007560160160064697, 0.007540736198425293, 0.007593632221221924, 0.007620992183685303, 0.007708384037017822, 0.007606527805328369, 0.007628064155578613, 0.007668447971343994, 0.0078113598823547365, 0.007752480030059815, 0.007761023998260498, 0.0075569281578063965, 0.007559167861938477, 0.007562528133392334, 0.007664224147796631, 0.00761033582687378, 0.0075894718170166015, 0.007625599861145019, 0.007607583999633789, 0.0076193280220031735, 0.007586688041687012, 0.007666463851928711, 0.0076557121276855464, 0.007595744132995606, 0.0076037440299987795, 0.007600607872009277, 0.007583072185516357, 0.007580319881439209, 0.007663616180419922, 0.00767084789276123, 0.0077068800926208495, 0.007842463970184326, 0.007664991855621338, 0.007709375858306885, 0.007741695880889893, 0.0077695040702819825, 0.007743840217590332, 0.008101887702941894, 0.008628576278686524, 0.00861353588104248, 0.009432064056396485, 0.00809062385559082, 0.00790841579437256, 0.007814080238342284, 0.0078438401222229, 0.007581696033477783, 0.007952383995056152, 0.007639296054840088, 0.007802624225616455, 0.007648608207702637, 0.007611040115356445, 0.007609344005584716, 0.007674784183502197, 0.007609824180603028, 0.007633535861968994, 0.007608416080474853, 0.0076078720092773435, 0.007552735805511475, 0.007631775856018067, 0.0076735677719116215, 0.007759871959686279, 0.007870463848114014, 0.007716512203216553, 0.007641439914703369, 0.007761216163635254, 0.007729856014251709, 0.007657408237457275, 0.00757692813873291, 0.007658368110656739, 0.007566463947296142, 0.0075985918045043946, 0.007612959861755371, 0.007656256198883057, 0.007596864223480225, 0.007655488014221191, 0.007680064201354981, 0.007598015785217285, 0.007648767948150635, 0.007630496025085449, 0.007567455768585205, 0.007617280006408691, 0.0076332159042358395, 0.007724736213684082, 0.007633120059967041, 0.007638559818267822, 0.007624959945678711, 0.007635295867919922, 0.0076462721824646, 0.007651519775390625, 0.007558976173400879, 0.007573599815368652, 0.00760694408416748, 0.007638527870178223, 0.007739744186401367, 0.007623775959014893, 0.007557536125183105, 0.007672448158264161, 0.0076327681541442875, 0.007562880039215088, 0.007598239898681641, 0.007669983863830566, 0.0076308479309082035, 0.007640736103057861, 0.007737696170806885, 0.008019007682800294, 0.007748223781585693, 0.007719423770904541, 0.0076893758773803715, 0.007561183929443359, 0.007699007987976074, 0.007640960216522216, 0.007651391983032226, 0.007571040153503418, 0.007577951908111572, 0.007590240001678467, 0.007606016159057617, 0.007577631950378418, 0.007946112155914307, 0.007673376083374024, 0.0075985598564147945, 0.007612287998199463, 0.007626880168914795, 0.007678112030029297, 0.007645023822784424, 0.007616511821746826, 0.007661439895629883, 0.007557248115539551, 0.0075345921516418455, 0.00764415979385376, 0.007539711952209473, 0.007584928035736084, 0.007555136203765869, 0.007642911911010742, 0.007578015804290772, 0.00757206392288208, 0.007600128173828125, 0.007622399806976318, 0.0075801281929016116, 0.007528351783752441, 0.007544703960418701, 0.007571263790130615, 0.007610559940338135, 0.007702752113342285, 0.007814080238342284, 0.007789408206939697, 0.0077396478652954105, 0.007870207786560059, 0.008296159744262696, 0.008038016319274902, 0.007813727855682373, 0.007685696125030518, 0.007574016094207763, 0.007608320236206055, 0.007565599918365478, 0.007759103775024414, 0.007563327789306641, 0.00761187219619751, 0.007559199810028076, 0.0076351361274719235, 0.007610496044158936, 0.0075857281684875485, 0.007610015869140625, 0.007582399845123291, 0.007541408061981201, 0.0075383358001708985, 0.007592000007629394, 0.007585984230041504, 0.007579391956329346, 0.007646944046020508, 0.007548768043518067, 0.007715263843536377, 0.007291232109069824, 0.007596255779266358, 0.0076512317657470704, 0.007606527805328369, 0.007567200183868408, 0.007585440158843994, 0.00752288007736206, 0.007587615966796875, 0.007562335968017578, 0.007541664123535156, 0.007544832229614258, 0.007556416034698486, 0.007596735954284668, 0.007573503971099854, 0.007515935897827148, 0.007528512001037597, 0.00765283203125, 0.007582335948944092, 0.007612480163574219, 0.007696383953094482, 0.007559167861938477, 0.007544672012329102, 0.007579808235168457, 0.007638688087463379, 0.007749184131622315, 0.007787487983703613, 0.007668896198272705, 0.007705440044403076, 0.007669312000274658, 0.0076293120384216305, 0.007620351791381836, 0.007606272220611572, 0.00760422420501709, 0.00760422420501709, 0.007650815963745118, 0.00755353593826294, 0.0075632638931274416, 0.007551263809204101, 0.007566271781921386, 0.007561855792999268, 0.0075327038764953615, 0.007587296009063721, 0.007545375823974609, 0.007548927783966064, 0.0075625600814819335, 0.007643040180206299, 0.007640160083770752, 0.007637856006622315, 0.007569471836090088, 0.007557375907897949, 0.0075755839347839355, 0.007576064109802246, 0.007584832191467285, 0.007760128021240234, 0.007645440101623535, 0.007614912033081055, 0.007489535808563232, 0.007632800102233887, 0.007920928001403808, 0.00757587194442749, 0.007599743843078613, 0.0076904001235961915, 0.0078096318244934085, 0.007371935844421387, 0.00761897611618042, 0.007608767986297608, 0.00776582384109497, 0.00764572811126709, 0.007708320140838623, 0.007689407825469971, 0.00764192008972168, 0.007730815887451172, 0.007674240112304688, 0.007604191780090332, 0.007708703994750977, 0.007800640106201172, 0.007783648014068604, 0.007762911796569824, 0.00770585584640503, 0.007679008007049561, 0.007653088092803955, 0.0077738242149353026, 0.007639423847198486, 0.007536640167236328, 0.007706624031066894, 0.007647039890289306, 0.00751635217666626, 0.007628128051757812, 0.007566239833831787, 0.007532288074493408, 0.007581696033477783, 0.007576767921447754, 0.007524896144866944, 0.007549215793609619, 0.0075220799446105956, 0.007620831966400146, 0.007600128173828125, 0.007612224102020264, 0.007677375793457031, 0.007635583877563476, 0.0076080961227417, 0.007583871841430664, 0.007708000183105469, 0.007574368000030518, 0.007564352035522461, 0.007635392189025879, 0.007584288120269776, 0.007608575820922852, 0.007710368156433106, 0.00762992000579834, 0.007588096141815185, 0.007545023918151855, 0.00757532787322998, 0.0075335679054260255, 0.007685728073120117, 0.007516640186309815, 0.007509727954864502, 0.007619872093200684, 0.0075617280006408695, 0.007519552230834961, 0.007552127838134765, 0.007533472061157226, 0.00753107213973999, 0.007536960124969482, 0.007542175769805908, 0.0075188159942626955, 0.00733788776397705, 0.0075339522361755375, 0.007713503837585449, 0.007631072044372559, 0.0076059517860412595, 0.0075797119140625, 0.007565695762634278, 0.007544479846954346, 0.007572512149810791, 0.007599071979522705, 0.007532544136047363, 0.007540736198425293, 0.007597792148590088, 0.007559679985046387, 0.007638944149017334, 0.00761407995223999, 0.0075831360816955564, 0.0076583361625671385, 0.007535999774932862, 0.007600768089294434, 0.007540800094604492, 0.007597216129302978, 0.007590688228607177, 0.007546239852905273, 0.007611008167266846, 0.007561215877532959, 0.0075428481101989745, 0.007512191772460938, 0.007569215774536133, 0.007535999774932862, 0.0075864319801330566, 0.007548927783966064, 0.007542784214019775, 0.007572735786437988, 0.007526400089263916, 0.0075495681762695315, 0.0075625920295715335, 0.007544864177703858, 0.007555615901947021, 0.0074765758514404295, 0.0075395841598510744, 0.007674880027770996, 0.007574528217315674, 0.007567168235778809, 0.007596223831176758, 0.0076447358131408695, 0.007682112216949463, 0.007518271923065186, 0.0074791679382324215, 0.007542912006378174, 0.007610591888427734, 0.007592031955718994, 0.007590112209320069, 0.007599711894989014, 0.007559360027313233, 0.007566368103027344, 0.007551263809204101, 0.007803904056549072, 0.00752403211593628, 0.007628128051757812, 0.007613088130950928, 0.007696383953094482, 0.007568607807159424, 0.007334112167358398, 0.007665247917175293, 0.007593376159667969, 0.0075821118354797365, 0.007561247825622558, 0.007551775932312012, 0.00771065616607666, 0.007722847938537598, 0.007617856025695801, 0.007580351829528808, 0.007600416183471679, 0.007587552070617676, 0.007592351913452149, 0.007581503868103028, 0.0075630397796630855, 0.007589024066925049, 0.007600543975830078, 0.007606719970703125, 0.007529664039611817, 0.007574336051940918, 0.007569407939910889, 0.007595424175262451, 0.007526976108551025, 0.0076063041687011715, 0.007660895824432373, 0.007579487800598144, 0.007578432083129883, 0.007543968200683594, 0.007510879993438721, 0.007606272220611572, 0.0076162881851196285, 0.007553247928619384, 0.007696383953094482, 0.007667200088500976, 0.007926591873168945, 0.007818272113800049, 0.007678624153137207, 0.007665855884552002, 0.0076798081398010256, 0.007652416229248047, 0.007571519851684571, 0.007608287811279297, 0.007758944034576416, 0.0076685757637023925, 0.0076744318008422855, 0.0083635835647583, 0.0076583361625671385, 0.007614463806152344, 0.008297792434692383, 0.00822710418701172, 0.008488384246826172, 0.007648223876953125, 0.007645472049713135, 0.007591936111450195, 0.007629568099975586, 0.007564576148986817, 0.007554751873016357, 0.007559167861938477, 0.007553023815155029, 0.007561215877532959, 0.007564864158630371, 0.007555039882659912, 0.007655168056488037, 0.007417856216430664, 0.0076984319686889645, 0.007591519832611084, 0.0075762557983398435, 0.00763865613937378, 0.007567455768585205, 0.007665023803710937, 0.007522496223449707, 0.007678400039672852, 0.007526495933532715, 0.0075847039222717285, 0.0075714879035949706, 0.007582623958587647, 0.007546207904815674, 0.0075838079452514645, 0.007564032077789306, 0.007659327983856201, 0.007554624080657959, 0.007519904136657715, 0.007570271968841553, 0.007562687873840332, 0.007582592010498047, 0.007714752197265625, 0.007609951972961426, 0.007571616172790527, 0.007574816226959228, 0.007541215896606445, 0.007515679836273193, 0.007592671871185302, 0.007554272174835205, 0.007582015991210937, 0.00754531192779541, 0.007591936111450195, 0.007558752059936524, 0.007528863906860352, 0.007554751873016357, 0.007508287906646729, 0.007546879768371582, 0.007583936214447021, 0.007522111892700196, 0.007503039836883545, 0.007525152206420899, 0.007615551948547363, 0.007571712017059327, 0.007657599925994873, 0.0075556797981262205, 0.007583744049072265, 0.007540671825408935, 0.007532608032226562, 0.007555136203765869, 0.007730400085449219, 0.007574240207672119, 0.007577888011932373, 0.0075773119926452635, 0.007539775848388672, 0.007519167900085449, 0.007819263935089112, 0.0075939841270446775, 0.0075545282363891604, 0.007764224052429199, 0.007545119762420655, 0.0077990078926086425, 0.0076425600051879886, 0.007303391933441162, 0.007667712211608887, 0.007639008045196533, 0.00759219217300415, 0.007612415790557861, 0.007642144203186035, 0.0075636482238769535, 0.007543168067932129, 0.007666719913482666, 0.008010720252990723, 0.0075673599243164065, 0.007563072204589844, 0.007581888198852539, 0.007630623817443847, 0.007571328163146973, 0.007637311935424804, 0.007614496231079101, 0.007528448104858398, 0.007507872104644775, 0.0075510721206665035, 0.007591455936431885, 0.007547359943389892, 0.0075443840026855466, 0.0076221442222595215, 0.007580607891082764, 0.007544832229614258, 0.007508096218109131, 0.007563136100769043, 0.007546879768371582, 0.007540736198425293, 0.007546879768371582, 0.007521984100341797, 0.007579103946685791, 0.007586143970489502, 0.007982848167419433, 0.007847904205322265, 0.007559967994689941, 0.0075318398475646975, 0.007600160121917725, 0.00760694408416748, 0.00773529577255249, 0.007620607852935791, 0.007591936111450195, 0.0076308479309082035, 0.007544703960418701, 0.007573631763458252, 0.007509088039398194, 0.007617440223693848, 0.007608320236206055, 0.007684095859527588, 0.0075840320587158205, 0.007588863849639893, 0.007582431793212891, 0.007536640167236328, 0.007569407939910889, 0.00753868818283081, 0.007542335987091064, 0.007504320144653321, 0.007546879768371582, 0.00756547212600708, 0.0076241598129272465, 0.007698783874511719, 0.007568831920623779, 0.00728275203704834, 0.00769532823562622, 0.007770143985748291, 0.0076121602058410644, 0.0076063041687011715, 0.007593503952026367, 0.00755350399017334, 0.007696383953094482, 0.0077127041816711425, 0.007758975982666016, 0.0075755839347839355, 0.007614399909973145, 0.007555808067321777, 0.007580160140991211, 0.007581056118011475, 0.007653920173645019, 0.007601888179779053, 0.007501952171325684, 0.007522304058074952, 0.0075684161186218265, 0.007521247863769532, 0.0076286401748657225, 0.007587007999420166, 0.007583936214447021, 0.007522751808166504, 0.007551328182220459, 0.00754041576385498, 0.007522624015808105, 0.007524672031402588, 0.007558015823364258, 0.007536863803863526, 0.0075937919616699216, 0.007654143810272217, 0.0077041277885437015, 0.0076929922103881835, 0.007699359893798828, 0.0077684478759765625, 0.0076713600158691405, 0.007658432006835938, 0.007775936126708984, 0.007665535926818848, 0.007655871868133545, 0.007587711811065673, 0.0077313919067382815, 0.007523263931274414, 0.007582367897033692, 0.007637343883514404, 0.007731200218200684, 0.0076126718521118165, 0.007589183807373047, 0.0075568962097167965, 0.008043199539184571, 0.007896575927734375, 0.008337087631225586, 0.007613215923309326, 0.008480768203735351, 0.008103391647338868, 0.007628992080688477, 0.007722976207733154, 0.00783945608139038, 0.007852704048156738, 0.007731200218200684, 0.007606272220611572, 0.007289120197296143, 0.007608320236206055, 0.007577824115753174, 0.008036128044128418, 0.007620607852935791, 0.00763424015045166, 0.007625408172607422, 0.007665664196014404, 0.007588064193725586, 0.00763804817199707, 0.007574272155761719, 0.007555168151855468, 0.007577375888824463, 0.00767193603515625, 0.007664991855621338, 0.007609024047851563, 0.00765334415435791, 0.007601632118225098, 0.007584288120269776, 0.00757862377166748, 0.007558144092559814, 0.007560544013977051, 0.007575967788696289, 0.007682303905487061, 0.007577600002288819, 0.007652512073516846, 0.007582752227783203, 0.008265536308288574, 0.008234368324279786, 0.0076004800796508785, 0.007596352100372314, 0.007601952075958252, 0.007550399780273437, 0.007881472110748291, 0.00774348783493042, 0.0077391037940979, 0.00765718412399292, 0.007729055881500244, 0.007814976215362548, 0.0078056321144104, 0.007886144161224365, 0.0077933440208435055, 0.007587647914886474, 0.0075944638252258305, 0.007559040069580078, 0.0075933442115783695, 0.007637631893157959, 0.007569471836090088, 0.00757260799407959, 0.0075866241455078125, 0.007576767921447754, 0.0076566081047058105, 0.007658368110656739, 0.007545631885528564, 0.007583072185516357, 0.007576064109802246, 0.007549280166625976, 0.007567168235778809, 0.007591040134429932, 0.007779136180877686, 0.007606272220611572, 0.007614528179168701, 0.0075829758644104005, 0.007718912124633789, 0.00789299201965332, 0.0077963838577270505, 0.007763775825500488, 0.00773583984375, 0.007729311943054199, 0.007742656230926514, 0.007719840049743652, 0.0077292160987854006, 0.007785664081573486, 0.007817887783050537, 0.007958367824554443, 0.00798902416229248, 0.007970335960388184, 0.0078220157623291, 0.007807072162628174, 0.007813024044036864, 0.00788640022277832, 0.00787500810623169, 0.007904672145843505, 0.007981056213378907, 0.00796937608718872, 0.007888927936553955, 0.007856095790863037, 0.007790431976318359, 0.007757023811340332, 0.007695295810699463, 0.0076308479309082035, 0.007659615993499756, 0.007765888214111328, 0.007704927921295166, 0.007734975814819336, 0.007899136066436767, 0.00790937614440918, 0.007782400131225586, 0.007759871959686279, 0.007679296016693115, 0.007593728065490722, 0.007629792213439942, 0.0076697921752929684, 0.0077413759231567385, 0.007688191890716553, 0.007786111831665039, 0.008015520095825195, 0.007848000049591065, 0.007962431907653809, 0.008022336006164551, 0.007875103950500489, 0.007806528091430664, 0.0078503999710083, 0.007840832233428956, 0.007781472206115723, 0.007933152198791505, 0.007711328029632568, 0.0076752958297729495, 0.007764639854431152, 0.007753024101257324, 0.007860223770141601, 0.007715744018554687, 0.007715744018554687, 0.007663487911224365, 0.007738687992095947, 0.0076979842185974125, 0.007892864227294922, 0.008120927810668945, 0.008042655944824218, 0.008066559791564941, 0.007891647815704346, 0.007775968074798584, 0.007814623832702637, 0.007818048000335693, 0.007718495845794678, 0.007688608169555664, 0.008013824462890624, 0.008062335968017578, 0.007915328025817871, 0.007948575973510742, 0.007995296001434326, 0.007913440227508546, 0.007918240070343018, 0.007872032165527345, 0.008032416343688965, 0.007973440170288086, 0.008195839881896972, 0.00795798397064209, 0.007853951930999756, 0.007799456119537354, 0.007827455997467042, 0.007925695896148681, 0.007813183784484864, 0.008042271614074708, 0.008104096412658692, 0.00908073616027832, 0.008138912200927734, 0.008158944129943847, 0.00808176040649414, 0.00801375961303711, 0.007949728012084961, 0.007900000095367432, 0.00790502405166626, 0.007792831897735596, 0.007851840019226074, 0.007837696075439453, 0.007968224048614502, 0.007851903915405273, 0.00777459192276001, 0.007827839851379394, 0.007753632068634033, 0.007742656230926514, 0.007766592025756836, 0.007749023914337158, 0.007803743839263916, 0.007824416160583497, 0.007804992198944091, 0.007895967960357665, 0.007818463802337647, 0.007807744026184082, 0.007890336036682129, 0.0077686400413513185, 0.007681951999664306, 0.007702335834503173, 0.007665408134460449, 0.007634655952453613, 0.00764412784576416, 0.007634848117828369, 0.007751679897308349, 0.007921792030334473, 0.007944416046142579, 0.007735072135925293, 0.007728415966033936, 0.007718751907348633, 0.007807456016540528, 0.007698847770690918, 0.007773856163024903, 0.007743904113769531, 0.007707776069641114, 0.007678847789764404, 0.007638016223907471, 0.007662879943847656, 0.007593632221221924, 0.007665664196014404, 0.007722911834716797, 0.007775424003601074, 0.0077976641654968265, 0.008127776145935059, 0.007936768054962158, 0.007760032176971436, 0.007714144229888916, 0.0077274560928344725, 0.007628511905670166, 0.007772575855255127, 0.007819263935089112, 0.007990880012512207, 0.00780457592010498, 0.007750527858734131, 0.007739232063293457, 0.007707808017730713, 0.0076808958053588864, 0.0077578239440917966, 0.0077472000122070315, 0.007671199798583985, 0.007861216068267823, 0.007677951812744141, 0.007674911975860596, 0.007703519821166992, 0.007839744091033935, 0.0077469758987426755, 0.007670112133026123, 0.007980544090270996, 0.007848896026611329, 0.0077783999443054195, 0.007704351902008057, 0.007792223930358886, 0.007991648197174073, 0.007779903888702392, 0.007993567943572998, 0.007864543914794922, 0.007688416004180908, 0.007826240062713622, 0.007767007827758789, 0.007817215919494629, 0.007798783779144287, 0.007766016006469726, 0.007768064022064209, 0.0077062082290649415, 0.007694464206695556, 0.007707039833068848, 0.008138175964355468, 0.007814720153808593, 0.007355231761932373, 0.007696000099182129, 0.007645343780517578, 0.007659103870391846, 0.007772575855255127, 0.00781820821762085, 0.007877503871917725, 0.007756832122802734, 0.007689184188842774, 0.007650303840637207, 0.0076743998527526855, 0.007624735832214355, 0.0077133760452270505, 0.007650752067565918, 0.007666079998016357, 0.007606272220611572, 0.007745535850524903, 0.007606272220611572, 0.007655424118041992, 0.0076037440299987795, 0.0075842242240905765, 0.007601696014404297, 0.007551455974578857, 0.007577600002288819, 0.007618336200714111, 0.007659039974212647, 0.007646048069000244, 0.007676000118255615, 0.007642879962921143, 0.007708672046661377, 0.00768009614944458, 0.0075830078125, 0.007664256095886231, 0.007742752075195312, 0.007785183906555176, 0.00902348804473877, 0.008965920448303223, 0.012054656028747559, 0.010095808029174804, 0.007821760177612304, 0.007773856163024903, 0.0077638401985168455, 0.007846848011016845, 0.007828544139862061, 0.007772319793701172, 0.007754528045654297, 0.007798495769500732, 0.007934239864349366, 0.008068479537963868, 0.008267487525939941, 0.008071136474609376, 0.00813766384124756, 0.008007967948913574, 0.008017663955688476, 0.008026080131530762, 0.008038304328918456, 0.008085599899291992, 0.00816368007659912, 0.008203935623168946, 0.007939712047576904, 0.008143424034118652, 0.008023967742919922, 0.008050432205200195, 0.0077495999336242676, 0.007946271896362305, 0.007942431926727295, 0.007917280197143555, 0.008214400291442872, 0.00788095998764038, 0.007776127815246582, 0.007862271785736084, 0.007873760223388673, 0.007767104148864746, 0.007804287910461426, 0.007934304237365722, 0.007886720180511475, 0.007847616195678711, 0.007856768131256103, 0.008079168319702148, 0.008157183647155761, 0.007983168125152589, 0.007866464138031005, 0.007744703769683838, 0.00781993579864502, 0.007714591979980469, 0.0077604160308837895, 0.007718592166900635, 0.007823391914367676, 0.00797488021850586, 0.008009440422058105, 0.00801411247253418, 0.007925663948059082, 0.007908736228942871, 0.007656576156616211, 0.0077164478302001955, 0.007756864070892334, 0.00786844778060913, 0.007726272106170654, 0.008310751914978027, 0.0077430720329284665, 0.007647232055664062, 0.007733407974243164, 0.007696256160736084, 0.007700607776641845, 0.007901152133941651, 0.00790940809249878, 0.008102239608764649, 0.008066720008850097, 0.008078592300415039, 0.008105759620666503, 0.008231904029846191, 0.008109375953674317, 0.008029151916503906, 0.008117024421691894, 0.007975872039794923, 0.007914688110351563, 0.007895872116088868, 0.007855487823486327, 0.0077985281944274905, 0.00781606388092041, 0.007765312194824219, 0.007643040180206299, 0.0076431999206542965, 0.007632863998413086, 0.007758368015289307, 0.007674335956573486]",tokens/s,129.17898258527777,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpw5ng8n3b/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 890, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 822, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 382, in __init__ self.fc1 = nn.Linear(self.embed_dim, config.ffn_dim, bias=config.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.27 GiB. GPU 0 has a total capacity of 14.74 GiB of which 172.12 MiB is free. Process 405528 has 14.57 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 2.28 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp04_mt3jv/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,786.694144,6169.690112,0.0,5767.168,5561.701376,s,1,7.44520556640625,7.44520556640625,0.0,7.44520556640625,7.44520556640625,7.44520556640625,7.44520556640625,[7.44520556640625],,kWh,4.523462883238002e-06,4.912133826626573e-07,2.7980577939901163e-06,7.812734059890776e-06,,MB,1118.027776,6180.175872,0.0,5767.168,5440.258048,s,10,4.762443023681642,0.47624430236816406,0.002768908111950733,0.47706011962890627,0.47926879577636716,0.4792790817260742,0.47928731048583983,"[0.47047503662109375, 0.47548358154296877, 0.4754042663574219, 0.47238568115234375, 0.477148681640625, 0.4787928466796875, 0.47928936767578123, 0.47722549438476564, 0.4792665100097656, 0.4769715576171875]",tokens/s,537.5392392665253,kWh,1.3862623716665272e-05,1.5288138018702805e-06,9.223201822999092e-06,2.4614639341534645e-05,tokens/kWh,10400314.88773539,MB,1143.00928,6182.273024,0.0,5769.265152,5523.463168,s,10,18.221845825195317,1.8221845825195317,0.006728241840755271,1.8203175659179687,1.8316770141601562,1.8321632141113282,1.8325521740722657,"[1.8252877197265625, 1.81905908203125, 1.8159942626953125, 1.8186470947265625, 1.8315689697265625, 1.8289190673828124, 1.8326494140625, 1.8215760498046876, 1.8121314697265625, 1.8160126953125]",tokens/s,34.57388488760562,kWh,5.333154017625159e-05,5.882483197095036e-06,3.531288936140318e-05,9.45269127347498e-05,tokens/kWh,666476.8601592133,,s,630,18.218259956359873,0.02891787294660296,0.00024714676926891935,0.028908479690551755,0.02915743942260742,0.02924870138168335,0.029783926525115974,"[0.029412895202636718, 0.029165855407714845, 0.028971200942993165, 0.028841983795166014, 0.028880895614624022, 0.02884809684753418, 0.028713119506835937, 0.02870992088317871, 0.02890950393676758, 0.028811647415161134, 0.028865055084228517, 0.02875596809387207, 0.02898975944519043, 0.028841663360595703, 0.028848127365112306, 0.02880512046813965, 0.02893414306640625, 0.028829696655273438, 0.028899328231811523, 0.028882240295410155, 0.028945087432861328, 0.02894233512878418, 0.02896281623840332, 0.028845535278320313, 0.029116960525512697, 0.02896895980834961, 0.028976543426513672, 0.028900991439819335, 0.0288941764831543, 0.028985151290893553, 0.029044031143188476, 0.029488000869750976, 0.02895622444152832, 0.02894483184814453, 0.02921062469482422, 0.029030399322509767, 0.029070463180541992, 0.0289902400970459, 0.029042207717895507, 0.028885568618774414, 0.0289290885925293, 0.028916255950927734, 0.02899600028991699, 0.028978208541870117, 0.028939231872558594, 0.02892089653015137, 0.029006784439086913, 0.028896480560302733, 0.029146976470947265, 0.028905824661254884, 0.029004383087158202, 0.029013664245605468, 0.029046112060546875, 0.029037055969238282, 0.028971519470214844, 0.02903366470336914, 0.028994112014770507, 0.028938112258911134, 0.02888332748413086, 0.029042335510253908, 0.028920352935791014, 0.029112127304077147, 0.02917987251281738, 0.02933964729309082, 0.029173759460449217, 0.02898124885559082, 0.028725248336791992, 0.028688383102416993, 0.028677600860595703, 0.02883024024963379, 0.02871500778198242, 0.028635072708129882, 0.028741216659545897, 0.02857801628112793, 0.028458463668823243, 0.028549856185913085, 0.028786752700805666, 0.028649471282958985, 0.028437984466552733, 0.028616607666015623, 0.028680448532104493, 0.02872064018249512, 0.028569503784179686, 0.028642175674438476, 0.028941728591918944, 0.028959199905395507, 0.028939935684204103, 0.029018688201904296, 0.029191360473632813, 0.028793664932250978, 0.02865545654296875, 0.02894435119628906, 0.028666048049926757, 0.028672000885009766, 0.028716800689697265, 0.028584192276000977, 0.028925535202026367, 0.029024415969848633, 0.029245311737060548, 0.029432064056396486, 0.028889215469360352, 0.028864511489868162, 0.028767776489257813, 0.028756383895874024, 0.02874736022949219, 0.028794784545898438, 0.02889788818359375, 0.028874399185180664, 0.02894060707092285, 0.02876006317138672, 0.029040639877319335, 0.02871023941040039, 0.028862720489501954, 0.028664224624633788, 0.028788127899169923, 0.02881718444824219, 0.028907487869262696, 0.02887731170654297, 0.029014015197753908, 0.029452640533447264, 0.029527776718139647, 0.028967199325561525, 0.02947635269165039, 0.02939548873901367, 0.02894745635986328, 0.02902751922607422, 0.029626367568969726, 0.029249536514282228, 0.028925952911376954, 0.028817407608032225, 0.02879692840576172, 0.028807424545288087, 0.02858076858520508, 0.028579776763916015, 0.02870979118347168, 0.02877574348449707, 0.02909254455566406, 0.02880940818786621, 0.02923472023010254, 0.02891804885864258, 0.028818752288818358, 0.02890572738647461, 0.029030847549438476, 0.02880512046813965, 0.02874777603149414, 0.028753503799438477, 0.028590496063232423, 0.02883945655822754, 0.02891619110107422, 0.028808191299438478, 0.028828672409057617, 0.028727296829223634, 0.02877644729614258, 0.02884566307067871, 0.028739744186401368, 0.02851020812988281, 0.028383487701416014, 0.02830668830871582, 0.028379392623901368, 0.028360960006713867, 0.028682464599609374, 0.028618400573730468, 0.028966976165771485, 0.028969535827636717, 0.02890675163269043, 0.028649984359741212, 0.02869625663757324, 0.028883295059204103, 0.028594047546386718, 0.02894038391113281, 0.02897273635864258, 0.02892406463623047, 0.028833951950073242, 0.02880512046813965, 0.02879680061340332, 0.02897318458557129, 0.028872703552246092, 0.028989408493041994, 0.028911615371704103, 0.028776479721069337, 0.028721151351928712, 0.028724416732788086, 0.02891763114929199, 0.02883884811401367, 0.028824960708618164, 0.028742271423339842, 0.028976448059082033, 0.029043392181396486, 0.029106048583984374, 0.029543840408325195, 0.02907811164855957, 0.028729343414306642, 0.0286343994140625, 0.028602943420410157, 0.028340383529663084, 0.028411903381347657, 0.028225439071655273, 0.028490848541259765, 0.02856038475036621, 0.02853619194030762, 0.02869049644470215, 0.02862272071838379, 0.02894921684265137, 0.028800575256347657, 0.028770559310913085, 0.02863849639892578, 0.0287957763671875, 0.028712095260620116, 0.028862560272216797, 0.028740352630615234, 0.02856755256652832, 0.028823551177978517, 0.028672000885009766, 0.028604415893554686, 0.028736896514892578, 0.02862758445739746, 0.02857369613647461, 0.028618751525878908, 0.02887833595275879, 0.028762304306030273, 0.029015615463256837, 0.028727519989013673, 0.028873151779174804, 0.028893056869506835, 0.029143072128295897, 0.0289814395904541, 0.029022207260131837, 0.029163455963134764, 0.029005887985229493, 0.028835840225219726, 0.0289150390625, 0.028859039306640626, 0.028925952911376954, 0.028848127365112306, 0.02883987236022949, 0.02889251136779785, 0.028849920272827147, 0.028918752670288084, 0.028891136169433593, 0.02918329620361328, 0.029019008636474608, 0.029017280578613282, 0.028863136291503905, 0.028966880798339843, 0.029061119079589845, 0.02902195167541504, 0.029186304092407227, 0.028967039108276367, 0.029066112518310545, 0.029169727325439453, 0.0290696964263916, 0.030300031661987303, 0.02982476806640625, 0.029140928268432616, 0.029006240844726562, 0.02891366386413574, 0.02883785629272461, 0.028815391540527344, 0.02880899238586426, 0.028898815155029296, 0.028975839614868163, 0.028860416412353516, 0.028998815536499023, 0.02880499267578125, 0.02888582420349121, 0.02889132881164551, 0.02886419105529785, 0.02887299156188965, 0.028895231246948243, 0.029106176376342774, 0.0289169921875, 0.028883712768554688, 0.02892799949645996, 0.02913484764099121, 0.028917312622070312, 0.028895679473876952, 0.029148704528808595, 0.029034975051879883, 0.028939807891845703, 0.02921763229370117, 0.02897267150878906, 0.02895788764953613, 0.029045568466186524, 0.02903366470336914, 0.029272127151489257, 0.029052703857421876, 0.02920044708251953, 0.029279136657714845, 0.029310016632080077, 0.029100992202758788, 0.029128704071044922, 0.029156896591186525, 0.028973344802856446, 0.0292476806640625, 0.029109600067138672, 0.029022655487060546, 0.02910406494140625, 0.028958080291748046, 0.028965791702270507, 0.02898246383666992, 0.02912339210510254, 0.02900377655029297, 0.029112319946289062, 0.029287967681884765, 0.029196767807006835, 0.02907686424255371, 0.029119104385375977, 0.02917500877380371, 0.02910492706298828, 0.029256927490234376, 0.02920265579223633, 0.029913663864135742, 0.029069311141967775, 0.029090848922729493, 0.02916579246520996, 0.029654464721679687, 0.029180479049682618, 0.029046335220336915, 0.02875004768371582, 0.0287992000579834, 0.02869455909729004, 0.02883344078063965, 0.028774528503417968, 0.02881711959838867, 0.028834272384643554, 0.028846080780029298, 0.02880102348327637, 0.028819295883178712, 0.028950687408447265, 0.02930073547363281, 0.029157375335693358, 0.028968416213989257, 0.02888377571105957, 0.029079072952270506, 0.029031808853149415, 0.028954591751098633, 0.029205184936523437, 0.02885024070739746, 0.028861919403076173, 0.028840576171875, 0.029073440551757812, 0.028877920150756835, 0.02900057601928711, 0.028923072814941407, 0.028893951416015626, 0.028960832595825194, 0.028948480606079102, 0.028951871871948243, 0.0290695686340332, 0.029000064849853516, 0.029198400497436522, 0.02906118392944336, 0.028971168518066408, 0.028901151657104492, 0.028941631317138672, 0.02892255973815918, 0.028940256118774415, 0.029151264190673827, 0.02901433563232422, 0.028960416793823242, 0.029076896667480468, 0.028965503692626952, 0.02887238311767578, 0.029114688873291016, 0.028993535995483398, 0.028977088928222657, 0.029046848297119142, 0.030750783920288086, 0.029151168823242188, 0.02912665557861328, 0.029128704071044922, 0.029074623107910157, 0.02911039924621582, 0.02909222412109375, 0.02910972785949707, 0.028988256454467773, 0.029132799148559572, 0.02913689613342285, 0.029949951171875, 0.029222911834716796, 0.029093599319458006, 0.02898150444030762, 0.02885862350463867, 0.029097663879394532, 0.030064416885375977, 0.030179391860961913, 0.0294748477935791, 0.029001951217651367, 0.028983104705810548, 0.02880940818786621, 0.028823423385620116, 0.028833919525146485, 0.029005760192871093, 0.028798624038696288, 0.028752288818359374, 0.029042144775390626, 0.029301279067993163, 0.02897715187072754, 0.02879897689819336, 0.02895564842224121, 0.02888515281677246, 0.02886128044128418, 0.02879692840576172, 0.02893401527404785, 0.028975616455078124, 0.02888991928100586, 0.028857152938842775, 0.029085088729858398, 0.028871456146240235, 0.028854015350341797, 0.02907254409790039, 0.029156255722045898, 0.02895462417602539, 0.02899318313598633, 0.029048736572265626, 0.029071807861328125, 0.029186336517333985, 0.029023519515991213, 0.02914758491516113, 0.029056768417358398, 0.029053184509277345, 0.02913699150085449, 0.028970239639282226, 0.029158016204833985, 0.029049888610839843, 0.028992511749267577, 0.029191871643066407, 0.029085119247436522, 0.02903932762145996, 0.029040191650390627, 0.02908220863342285, 0.02910207939147949, 0.029239295959472656, 0.029306880950927733, 0.029162879943847655, 0.02930735969543457, 0.029093599319458006, 0.029202880859375, 0.02918809509277344, 0.02902969551086426, 0.029130815505981445, 0.029683935165405274, 0.0290849609375, 0.029033056259155275, 0.02896691131591797, 0.02873766326904297, 0.028815071105957032, 0.02862518310546875, 0.02880233573913574, 0.028699359893798827, 0.028688383102416993, 0.02883350372314453, 0.028793119430541993, 0.02883795166015625, 0.02870377540588379, 0.02873641586303711, 0.028878623962402344, 0.028767616271972656, 0.02888515281677246, 0.02872380828857422, 0.02885772705078125, 0.028801759719848632, 0.02890947151184082, 0.028885087966918944, 0.02902835273742676, 0.028876256942749024, 0.028758560180664063, 0.02869798469543457, 0.028987871170043946, 0.028813472747802736, 0.02886172866821289, 0.028789440155029298, 0.028900896072387695, 0.028768287658691407, 0.029108640670776367, 0.029083711624145508, 0.02902016067504883, 0.02891366386413574, 0.028991487503051756, 0.02894643211364746, 0.028962688446044924, 0.028878976821899414, 0.02883967971801758, 0.028987680435180664, 0.02889263916015625, 0.02903286361694336, 0.028903520584106446, 0.028869983673095703, 0.028928415298461914, 0.0289036808013916, 0.02901798439025879, 0.028845760345458986, 0.028938688278198243, 0.028945503234863282, 0.028877248764038087, 0.02896294403076172, 0.029014144897460938, 0.028989311218261718, 0.02897747230529785, 0.029169023513793944, 0.0289716796875, 0.028897184371948242, 0.0290468807220459, 0.029091039657592774, 0.029458623886108398, 0.029130559921264648, 0.028780736923217774, 0.028614656448364258, 0.028663808822631837, 0.02854911994934082, 0.028289024353027343, 0.02821072006225586, 0.028397504806518554, 0.028533279418945314, 0.028743167877197266, 0.028751680374145508, 0.02872547149658203, 0.02872368049621582, 0.028692480087280273, 0.02874163246154785, 0.028694528579711914, 0.028807167053222657, 0.028651424407958984, 0.028536256790161134, 0.028483264923095702, 0.028511199951171875, 0.028814592361450196, 0.028745664596557616, 0.028655839920043946, 0.028868831634521485, 0.0289420166015625, 0.02897990417480469, 0.029009248733520506, 0.02879555130004883, 0.028702720642089844, 0.028637184143066406, 0.028558431625366212, 0.028445600509643554, 0.028678367614746094, 0.029050655364990234, 0.028970399856567384, 0.028907264709472656, 0.029086559295654298, 0.028915712356567383, 0.02877644729614258, 0.028819072723388673, 0.02898192024230957, 0.028880607604980468, 0.028762111663818358, 0.028821311950683593, 0.028722911834716796, 0.028624767303466796, 0.028756160736083985, 0.028708959579467775, 0.028696895599365235, 0.028788639068603517, 0.02900521659851074, 0.028932735443115233, 0.02880624008178711, 0.028737951278686523, 0.028682815551757813, 0.028690431594848635, 0.029046783447265623, 0.028837568283081056, 0.028739328384399413, 0.028736064910888672, 0.028870399475097657, 0.02960492706298828, 0.02905763244628906, 0.028801376342773438, 0.02858720016479492, 0.028400447845458983, 0.028356704711914062, 0.028344160079956056, 0.028886335372924805, 0.02852236747741699, 0.028738367080688478, 0.028610624313354493, 0.028479488372802734, 0.028486879348754882, 0.028510303497314454, 0.028670656204223634, 0.028916959762573243, 0.028572351455688476, 0.02860041618347168, 0.028654815673828125, 0.028685184478759767, 0.02858163261413574, 0.028790143966674803, 0.028968992233276366, 0.028664447784423827, 0.02890150451660156, 0.028874752044677734, 0.02897635269165039, 0.02879158401489258, 0.02879283142089844, 0.028827648162841796, 0.028921791076660156, 0.02860857582092285, 0.028755008697509767, 0.028938495635986328, 0.028973983764648437, 0.02891097640991211, 0.029006240844726562, 0.029001728057861328, 0.028874752044677734, 0.028868480682373045, 0.028823680877685547, 0.028981311798095703, 0.028841920852661133, 0.028802976608276368, 0.028788415908813477, 0.028828031539916994, 0.028782304763793946, 0.028883520126342772, 0.028875680923461915, 0.029137088775634767, 0.028956384658813478, 0.02888185691833496, 0.028923168182373046, 0.029018815994262696, 0.028902496337890625, 0.02887366485595703, 0.02917580795288086, 0.028997631072998048, 0.028981088638305664, 0.028893280029296874, 0.028962879180908202, 0.029040639877319335, 0.028874752044677734]",tokens/s,34.58069000602176,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 890, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 822, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 373, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 252, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 115, in __init__ self.v_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 401759 has 14.73 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 3.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1065.127936,2086.535168,0.0,1684.013056,1657.439232,s,1,7.45519921875,7.45519921875,0.0,7.45519921875,7.45519921875,7.45519921875,7.45519921875,[7.45519921875],,kWh,3.899260925102984e-06,4.227769502065435e-07,9.558340979820557e-07,5.277871973291583e-06,,MB,1480.019968,2128.478208,0.0,1713.373184,1302.298112,s,10,0.20427526473999022,0.020427526473999022,0.000181124158101468,0.02040280055999756,0.020693957710266112,0.020733874988555907,0.020765808811187746,"[0.02068508720397949, 0.020494752883911133, 0.020363008499145508, 0.020773792266845705, 0.020306976318359374, 0.020234432220458985, 0.020500255584716798, 0.020278079986572266, 0.020196287155151368, 0.02044259262084961]",tokens/s,12532.10956920541,kWh,6.088699619964662e-07,6.714654534626792e-08,4.023226598208358e-07,1.07833916716357e-06,tokens/kWh,237402115.95334566,MB,1512.558592,2174.615552,0.0,1759.510528,1302.300672,s,10,10.855856079101562,1.0855856079101562,0.04383739070754468,1.0720474243164064,1.0968846435546875,1.1561174316406249,1.2035036621093749,"[1.07984716796875, 1.2153502197265624, 1.0762218017578125, 1.0837218017578125, 1.0663973388671875, 1.0739747314453125, 1.068030517578125, 1.0596126708984375, 1.0701201171875, 1.0625797119140625]",tokens/s,58.03319382732082,kWh,3.117450460967395e-05,3.4377673720903604e-06,1.607707941257934e-05,5.068935139434365e-05,tokens/kWh,1242864.591221226,,s,630,10.853343397140495,0.01722752920181032,0.005318540042275602,0.016953071594238284,0.01731399612426758,0.017463842391967774,0.017882740783691405,"[0.017169919967651368, 0.016851455688476562, 0.016827552795410157, 0.017128192901611328, 0.01699385643005371, 0.01699260711669922, 0.016770496368408203, 0.016844831466674804, 0.016760639190673828, 0.01666102409362793, 0.016841087341308594, 0.016842752456665038, 0.0167663688659668, 0.016683616638183595, 0.01684649658203125, 0.017515039443969725, 0.017282016754150392, 0.01712995147705078, 0.017201536178588866, 0.017119232177734374, 0.01723936080932617, 0.017464351654052735, 0.01768822479248047, 0.017408000946044923, 0.01718217658996582, 0.017033855438232423, 0.018318592071533205, 0.017477439880371093, 0.017023456573486327, 0.01692915153503418, 0.01701593589782715, 0.01725644874572754, 0.017418304443359376, 0.01730137634277344, 0.017435583114624023, 0.017152000427246093, 0.017307647705078123, 0.017033472061157226, 0.01722956848144531, 0.017405088424682618, 0.017261407852172853, 0.01716223907470703, 0.017030208587646485, 0.01787104034423828, 0.017150623321533203, 0.017125471115112305, 0.016940351486206054, 0.016867935180664064, 0.016855039596557618, 0.01684614372253418, 0.01701321601867676, 0.017527103424072266, 0.017315839767456053, 0.017202783584594726, 0.017207456588745118, 0.01710691261291504, 0.017027360916137695, 0.017082368850708008, 0.017057727813720704, 0.016977983474731444, 0.0170700798034668, 0.017194368362426757, 0.017148544311523437, 0.016906816482543944, 0.01701068878173828, 0.017219263076782225, 0.017154367446899414, 0.017106943130493164, 0.017079679489135743, 0.01706662368774414, 0.01704960060119629, 0.01718272018432617, 0.01724825668334961, 0.01734864044189453, 0.0171144962310791, 0.01700924873352051, 0.15035145568847658, 0.017785247802734376, 0.017514463424682616, 0.017256479263305664, 0.01728483200073242, 0.01705219268798828, 0.017329919815063478, 0.017301248550415038, 0.017242368698120118, 0.017182655334472656, 0.017391679763793945, 0.017114240646362303, 0.017105056762695314, 0.01816649627685547, 0.017739776611328126, 0.01710611152648926, 0.01767500877380371, 0.017059904098510742, 0.017047456741333008, 0.016982112884521484, 0.016871423721313478, 0.01684828758239746, 0.016822879791259765, 0.01682156753540039, 0.01698636817932129, 0.01686342430114746, 0.01681455993652344, 0.01699955177307129, 0.01682499122619629, 0.01680793571472168, 0.016898048400878905, 0.01741004753112793, 0.02127462387084961, 0.017347936630249024, 0.017145856857299805, 0.017117855072021484, 0.016920576095581053, 0.016945152282714843, 0.01683456039428711, 0.01692288017272949, 0.0169039363861084, 0.01697587203979492, 0.017026687622070314, 0.016990591049194335, 0.01696950340270996, 0.016933088302612306, 0.01683660888671875, 0.017082368850708008, 0.016874559402465822, 0.01685603141784668, 0.016648191452026367, 0.017111040115356444, 0.01746124839782715, 0.017334272384643554, 0.01746329689025879, 0.017252351760864256, 0.01780905532836914, 0.017482080459594727, 0.017313791275024415, 0.017507808685302734, 0.017021472930908204, 0.016969440460205078, 0.01692291259765625, 0.01682431983947754, 0.01679974365234375, 0.017014080047607422, 0.01689616012573242, 0.016775711059570313, 0.016713727951049806, 0.016912384033203123, 0.017333824157714842, 0.016851680755615234, 0.016788608551025392, 0.016781919479370116, 0.01697395133972168, 0.017049280166625977, 0.01684649658203125, 0.017005088806152344, 0.016931135177612303, 0.016786848068237305, 0.01686457633972168, 0.01687446403503418, 0.017076032638549805, 0.016963775634765626, 0.017051904678344727, 0.017198976516723634, 0.017143680572509767, 0.017303552627563477, 0.01696931266784668, 0.01703596878051758, 0.017071935653686525, 0.01743657684326172, 0.01729475212097168, 0.017168319702148438, 0.017112928390502928, 0.016874303817749025, 0.016927743911743166, 0.016902240753173828, 0.016874399185180664, 0.017016223907470703, 0.01691913604736328, 0.01692025566101074, 0.01697823905944824, 0.01721548843383789, 0.017535135269165038, 0.01730953598022461, 0.017237695693969726, 0.01724448013305664, 0.017173728942871093, 0.016919328689575196, 0.016987968444824218, 0.017292512893676757, 0.01746428871154785, 0.017014463424682616, 0.01760185623168945, 0.017288192749023438, 0.017438720703125, 0.01740924835205078, 0.017367839813232422, 0.01744691276550293, 0.017622400283813476, 0.017433216094970703, 0.017708799362182618, 0.01726192092895508, 0.017107872009277342, 0.017229824066162108, 0.01704550361633301, 0.017346176147460937, 0.017355264663696288, 0.01757375907897949, 0.01696348762512207, 0.01804092788696289, 0.017093856811523436, 0.016998239517211914, 0.017091520309448244, 0.017119232177734374, 0.016904224395751954, 0.016901279449462892, 0.016899904251098632, 0.016921600341796874, 0.016953407287597658, 0.017031103134155273, 0.017049503326416016, 0.0170533447265625, 0.01707257652282715, 0.017092639923095704, 0.016981983184814455, 0.016910335540771485, 0.016954399108886718, 0.017165279388427733, 0.0171909122467041, 0.017048831939697265, 0.016990848541259766, 0.017057695388793946, 0.017184768676757813, 0.017119455337524413, 0.017381088256835937, 0.017234207153320313, 0.017106880187988283, 0.017035327911376952, 0.01718169593811035, 0.017505279541015627, 0.017184576034545897, 0.017171871185302733, 0.017107744216918946, 0.016970815658569335, 0.017038272857666015, 0.017123327255249024, 0.01713363265991211, 0.017045440673828124, 0.017156095504760743, 0.017297664642333985, 0.017527839660644532, 0.017640159606933593, 0.01730512046813965, 0.017283008575439452, 0.016967519760131836, 0.01701215934753418, 0.01692950439453125, 0.016986112594604492, 0.01728102493286133, 0.017239551544189453, 0.017218048095703126, 0.01718681526184082, 0.017452064514160155, 0.017248544692993164, 0.01701139259338379, 0.016973247528076173, 0.016910560607910158, 0.016861536026000976, 0.017043455123901367, 0.016871360778808592, 0.01687558364868164, 0.016906240463256835, 0.01678233528137207, 0.016905216217041014, 0.016873472213745116, 0.01694915199279785, 0.01689743995666504, 0.016886463165283205, 0.016920127868652345, 0.016935359954833983, 0.01683456039428711, 0.01682841682434082, 0.016926719665527345, 0.016928768157958983, 0.016917760848999024, 0.01683443260192871, 0.01693324851989746, 0.01680348777770996, 0.01684556770324707, 0.017018047332763672, 0.016712831497192382, 0.016723743438720705, 0.01701478385925293, 0.01683456039428711, 0.01679350471496582, 0.016990207672119142, 0.01694441604614258, 0.01697670364379883, 0.016963327407836914, 0.017010751724243163, 0.016951488494873046, 0.01682636833190918, 0.01677542304992676, 0.016817375183105467, 0.01689651107788086, 0.016895103454589843, 0.016831071853637695, 0.01683692741394043, 0.01690630340576172, 0.016764863967895508, 0.016785408020019533, 0.016752639770507814, 0.016860448837280273, 0.016675296783447265, 0.01676313591003418, 0.01675254440307617, 0.01710851287841797, 0.01670796775817871, 0.016633983612060546, 0.0167587833404541, 0.017036575317382813, 0.016914783477783205, 0.016769376754760743, 0.016885791778564453, 0.01701888084411621, 0.017119232177734374, 0.01694102478027344, 0.01684899139404297, 0.01684883117675781, 0.016952735900878906, 0.017003103256225584, 0.016838048934936522, 0.017010528564453124, 0.017152767181396484, 0.016982015609741212, 0.01687993621826172, 0.01702195167541504, 0.01703945541381836, 0.01692323112487793, 0.01721356773376465, 0.01705779266357422, 0.017064960479736328, 0.01704230308532715, 0.01700601577758789, 0.016978015899658205, 0.01697123146057129, 0.017177600860595704, 0.01785651206970215, 0.01718681526184082, 0.01718307113647461, 0.017311199188232422, 0.020876895904541014, 0.01714236831665039, 0.016973024368286134, 0.01715718460083008, 0.01701273536682129, 0.01691004753112793, 0.01682022476196289, 0.016782400131225585, 0.016826623916625975, 0.016732864379882813, 0.01675628852844238, 0.01695939254760742, 0.017193376541137697, 0.017018527984619142, 0.017072608947753906, 0.016930816650390625, 0.01711631965637207, 0.01697007942199707, 0.01703167915344238, 0.016985183715820314, 0.016931743621826173, 0.016945152282714843, 0.016926624298095702, 0.016930912017822267, 0.017006591796875, 0.016848447799682618, 0.01683296012878418, 0.016730112075805666, 0.01695120048522949, 0.01681407928466797, 0.016832735061645506, 0.016793056488037108, 0.016909791946411134, 0.01685180854797363, 0.016910335540771485, 0.016748863220214842, 0.01676255989074707, 0.016751903533935547, 0.016804576873779297, 0.016761856079101564, 0.01678643226623535, 0.016846847534179688, 0.016934911727905275, 0.01685091209411621, 0.01697952079772949, 0.017095136642456054, 0.016915807723999022, 0.016882335662841797, 0.016920671463012696, 0.01705120086669922, 0.017144159317016603, 0.01706598472595215, 0.017836032867431642, 0.017090560913085938, 0.016945024490356446, 0.017096223831176757, 0.016994911193847655, 0.017098623275756834, 0.016941375732421875, 0.01695033645629883, 0.01688400077819824, 0.016830944061279298, 0.01680793571472168, 0.016920576095581053, 0.016961536407470702, 0.016952512741088867, 0.01679555130004883, 0.016841663360595702, 0.016805280685424806, 0.01672422409057617, 0.01683488082885742, 0.016891904830932617, 0.017073152542114257, 0.01686016082763672, 0.017390975952148436, 0.01687958335876465, 0.016886175155639647, 0.01677542304992676, 0.017101823806762697, 0.01712998390197754, 0.017103359222412108, 0.016920576095581053, 0.016967872619628906, 0.016774175643920898, 0.016909088134765625, 0.017102848052978514, 0.017111040115356444, 0.017235679626464842, 0.01726652717590332, 0.016882112503051758, 0.016926399230957033, 0.01687555122375488, 0.016814687728881835, 0.016934911727905275, 0.016612863540649413, 0.016929311752319334, 0.01683247947692871, 0.016857088088989256, 0.016806079864501954, 0.01687555122375488, 0.016850719451904295, 0.016846399307250976, 0.016742847442626954, 0.016651872634887696, 0.01732236862182617, 0.016897567749023436, 0.016703296661376953, 0.01677996826171875, 0.016742399215698242, 0.016871423721313478, 0.01697587203979492, 0.016908159255981447, 0.0168855037689209, 0.016750112533569336, 0.016804288864135743, 0.016777599334716797, 0.016820064544677736, 0.016640192031860353, 0.01666662406921387, 0.016741439819335936, 0.01672524833679199, 0.016727519989013673, 0.016692832946777345, 0.016658111572265624, 0.016644800186157226, 0.016756704330444336, 0.01676700782775879, 0.016650400161743163, 0.016606304168701173, 0.016673696517944335, 0.016660320281982423, 0.017448991775512696, 0.016859359741210937, 0.01679769515991211, 0.016906240463256835, 0.017217727661132814, 0.016979232788085937, 0.016764736175537108, 0.017001184463500976, 0.01680214309692383, 0.01698134422302246, 0.016746688842773437, 0.016692384719848633, 0.016722047805786133, 0.01788751983642578, 0.016742431640625, 0.01665190315246582, 0.016687744140625, 0.016709888458251953, 0.0167158088684082, 0.016677984237670897, 0.016707616806030272, 0.016677759170532228, 0.016680959701538087, 0.016721471786499024, 0.01665567970275879, 0.016620384216308594, 0.01661756706237793, 0.016807424545288087, 0.017083999633789062, 0.017154272079467774, 0.017033920288085938, 0.016918527603149415, 0.016867328643798828, 0.01694063949584961, 0.01693145561218262, 0.01681795120239258, 0.01683456039428711, 0.01679974365234375, 0.01723391914367676, 0.01679769515991211, 0.016809152603149413, 0.016812864303588866, 0.01680384063720703, 0.016941055297851563, 0.016728063583374024, 0.016970048904418944, 0.017079776763916015, 0.017192575454711916, 0.01714441680908203, 0.017274879455566407, 0.017221792221069336, 0.017063776016235353, 0.016996160507202148, 0.01694099235534668, 0.016855295181274415, 0.01763532829284668, 0.016928768157958983, 0.0168407039642334, 0.0172576961517334, 0.01696233558654785, 0.016955392837524414, 0.017007999420166016, 0.01696374320983887, 0.016862720489501954, 0.016923007965087892, 0.01719152069091797, 0.016903615951538085, 0.016835136413574217, 0.016918527603149415, 0.01681612777709961, 0.01684787178039551, 0.016767999649047852, 0.016947200775146484, 0.0167542724609375, 0.016744863510131835, 0.01680384063720703, 0.01683251190185547, 0.01696767997741699, 0.01742848014831543, 0.017354175567626952, 0.017230400085449217, 0.017159168243408202, 0.017052671432495118, 0.01719910430908203, 0.017334272384643554, 0.01720832061767578, 0.017257280349731445, 0.016795711517333986, 0.016797887802124024, 0.01672403144836426, 0.016688575744628908, 0.01673695945739746, 0.016739744186401367, 0.01674697685241699, 0.016785408020019533, 0.016711328506469728, 0.0168123836517334, 0.016913759231567384, 0.016894624710083007, 0.016838527679443358, 0.016680543899536132, 0.016629791259765624, 0.016666175842285157, 0.01673209571838379, 0.016933887481689454, 0.01677107238769531, 0.016703487396240235, 0.01699398422241211, 0.01678950309753418, 0.01681439971923828, 0.016943103790283204, 0.017043455123901367, 0.017126848220825195, 0.017254976272583007, 0.017225183486938477, 0.017150495529174806, 0.01743791961669922, 0.017023296356201173, 0.016904672622680663, 0.016900096893310547, 0.017072128295898437, 0.017213504791259767, 0.017156032562255858, 0.017068031311035157, 0.016936960220336913, 0.01685305595397949, 0.016784864425659178, 0.016771551132202148, 0.01671504020690918, 0.016818912506103515, 0.016756736755371093, 0.016704704284667967, 0.01671660804748535, 0.016695295333862305, 0.016879615783691407, 0.01721343994140625, 0.016977567672729493, 0.016882015228271485, 0.016910335540771485, 0.01686252784729004, 0.016771360397338866, 0.016769439697265624, 0.01678950309753418, 0.016745792388916016, 0.016732864379882813, 0.01682054328918457, 0.016707263946533202, 0.01671743965148926, 0.01667417526245117, 0.016706560134887697]",tokens/s,58.046629222658176,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1066.5984,2086.535168,0.0,1684.013056,1657.439232,s,1,7.227865234375,7.227865234375,0.0,7.227865234375,7.227865234375,7.227865234375,7.227865234375,[7.227865234375],,kWh,3.953093666708203e-06,4.288417980124362e-07,2.047779416008977e-06,6.429714880729617e-06,,MB,1345.384448,2126.381056,0.0,1713.373184,1302.298112,s,10,0.9795404205322266,0.09795404205322265,0.0026660330366253377,0.09715760040283203,0.09895260543823242,0.10234006462097167,0.10505003196716309,"[0.10572752380371093, 0.09709158325195312, 0.09660508728027344, 0.09574070739746093, 0.09703462219238282, 0.0968575668334961, 0.09783545684814453, 0.09722361755371094, 0.09722441864013671, 0.09819983673095703]",tokens/s,2613.470507535607,kWh,3.2444591076845996e-06,3.5770122721364165e-07,2.0875356206444417e-06,5.689695955542682e-06,tokens/kWh,44993616.882219285,MB,1370.365952,2172.5184,0.0,1759.510528,1302.300672,s,10,9.905968566894531,0.9905968566894531,0.035761010236919154,0.9786603698730468,0.9977132446289062,1.0475537658691405,1.087426182861328,"[0.97908056640625, 1.097394287109375, 0.9866375732421875, 0.9804755249023438, 0.9744963989257812, 0.9770191650390625, 0.9782401733398437, 0.976370849609375, 0.9814755859375, 0.9747784423828125]",tokens/s,63.59802130863228,kWh,2.835047755232187e-05,3.1267146137032974e-06,1.494906134195796e-05,4.642625350798313e-05,tokens/kWh,1356990.8239347155,,s,630,9.903568740844726,0.015719950382293214,0.004964644515349483,0.015464640140533448,0.01579123497009277,0.015890094137191772,0.01656889644622803,"[0.015617216110229492, 0.015501919746398926, 0.015302783966064453, 0.015482879638671876, 0.015351807594299317, 0.015590463638305664, 0.015367072105407715, 0.015564191818237304, 0.015689855575561525, 0.015620223999023438, 0.01565734386444092, 0.015575039863586425, 0.01594332790374756, 0.015601183891296387, 0.016353151321411134, 0.01592416000366211, 0.015818495750427247, 0.015572640419006347, 0.015737759590148927, 0.015500384330749512, 0.015559040069580077, 0.01540937614440918, 0.015638175964355468, 0.015468416213989257, 0.015499839782714844, 0.01553587245941162, 0.015435935974121094, 0.015411264419555664, 0.015532095909118653, 0.015367263793945313, 0.01519696044921875, 0.016631807327270508, 0.015441920280456543, 0.0152838716506958, 0.01538697624206543, 0.015378432273864746, 0.01543564796447754, 0.015378560066223144, 0.01528831958770752, 0.015519743919372558, 0.015378591537475585, 0.01585750389099121, 0.015767552375793456, 0.0158535680770874, 0.015673343658447265, 0.01548083209991455, 0.015423135757446289, 0.01550704002380371, 0.015948415756225586, 0.015452287673950195, 0.015601504325866699, 0.015372447967529297, 0.01529792022705078, 0.015270527839660644, 0.01533238410949707, 0.015536224365234375, 0.015356800079345704, 0.015336735725402832, 0.015315679550170899, 0.01540236759185791, 0.01534220790863037, 0.015450112342834473, 0.01532528018951416, 0.015526080131530761, 0.015383135795593262, 0.015355903625488282, 0.01539891242980957, 0.015310848236083984, 0.015319007873535156, 0.015374367713928222, 0.015333375930786132, 0.015182847976684571, 0.015303680419921875, 0.015321215629577637, 0.01537936019897461, 0.015222944259643555, 0.1400635528564453, 0.01594700813293457, 0.015504128456115722, 0.015323136329650879, 0.015357952117919921, 0.015371264457702637, 0.015364095687866211, 0.01520150375366211, 0.01582876777648926, 0.015349184036254883, 0.01540563201904297, 0.015532032012939453, 0.015876095771789552, 0.015441375732421875, 0.015483039855957031, 0.01529651165008545, 0.01549459171295166, 0.015438752174377441, 0.015507488250732422, 0.015826335906982424, 0.015596128463745118, 0.015403008460998536, 0.01579212760925293, 0.015308799743652344, 0.015418399810791015, 0.015258591651916504, 0.015386719703674317, 0.01534505558013916, 0.015260160446166992, 0.015277600288391114, 0.01536633586883545, 0.015431072235107422, 0.015270784378051757, 0.015271391868591309, 0.015241663932800293, 0.01542972755432129, 0.015226880073547363, 0.015243264198303222, 0.015330304145812988, 0.01526204776763916, 0.015555232048034669, 0.015589247703552246, 0.01587827205657959, 0.01562393569946289, 0.015728896141052245, 0.015540384292602539, 0.01563219165802002, 0.01553769588470459, 0.015442432403564453, 0.015462400436401368, 0.01613337516784668, 0.016126720428466797, 0.015705951690673826, 0.01579648017883301, 0.015591423988342285, 0.015574175834655762, 0.015464672088623047, 0.015468640327453613, 0.015309344291687012, 0.01665433692932129, 0.016635360717773436, 0.015723039627075195, 0.015565983772277832, 0.015612768173217774, 0.015378432273864746, 0.015320416450500488, 0.015387295722961426, 0.015447744369506836, 0.015495488166809082, 0.015620287895202636, 0.015662912368774415, 0.015597344398498535, 0.015611455917358399, 0.015575712203979492, 0.01586956787109375, 0.01555673599243164, 0.015476991653442383, 0.015520832061767579, 0.015515999794006348, 0.015529855728149414, 0.01634787178039551, 0.016565919876098633, 0.01551734447479248, 0.015411904335021973, 0.01539891242980957, 0.015403008460998536, 0.015659008026123047, 0.015876095771789552, 0.01565622425079346, 0.015571680068969727, 0.015624192237854004, 0.0158535680770874, 0.015814656257629393, 0.01562009620666504, 0.015490592002868652, 0.015550944328308105, 0.015527487754821778, 0.015475071907043458, 0.015437503814697266, 0.015480928421020508, 0.015560992240905762, 0.01544591999053955, 0.015618144035339355, 0.015796223640441896, 0.015648223876953124, 0.01576838397979736, 0.01567859172821045, 0.015825599670410157, 0.015727999687194825, 0.015656543731689454, 0.015514016151428223, 0.015411744117736817, 0.015541279792785644, 0.016158720016479493, 0.015883423805236815, 0.015757311820983888, 0.015717215538024902, 0.016013280868530273, 0.016426591873168944, 0.0159903678894043, 0.015786848068237304, 0.01567961597442627, 0.01561996841430664, 0.015537280082702637, 0.015739775657653807, 0.015427552223205566, 0.015329312324523926, 0.015381600379943848, 0.015756128311157226, 0.015550527572631836, 0.015452159881591796, 0.015599616050720215, 0.015857664108276368, 0.015379712104797364, 0.01535366439819336, 0.01535206413269043, 0.015335359573364259, 0.01536076831817627, 0.015583231925964355, 0.01546444797515869, 0.015236960411071778, 0.015345824241638184, 0.015491071701049805, 0.015363712310791016, 0.015395199775695801, 0.015396960258483887, 0.01526921558380127, 0.01529206371307373, 0.015258784294128419, 0.015334560394287109, 0.015397472381591797, 0.015503359794616698, 0.015597311973571778, 0.01572275161743164, 0.015708160400390626, 0.01580025577545166, 0.01563849639892578, 0.015627424240112306, 0.015870176315307617, 0.015743712425231933, 0.01570147228240967, 0.015786527633666992, 0.015660991668701173, 0.015504608154296875, 0.015413311958312989, 0.015627039909362792, 0.015501312255859375, 0.01563795185089111, 0.015427871704101563, 0.015347647666931152, 0.015548768043518067, 0.015262816429138184, 0.015291296005249023, 0.015334495544433594, 0.015388863563537598, 0.015319456100463867, 0.01554793643951416, 0.015394528388977051, 0.015470751762390137, 0.015338080406188965, 0.015388671875, 0.015382464408874513, 0.015577152252197265, 0.015511839866638184, 0.015642335891723633, 0.015701087951660156, 0.01562921619415283, 0.015607999801635743, 0.01532703971862793, 0.015337471961975097, 0.015699968338012696, 0.015384127616882324, 0.015404735565185547, 0.015368960380554198, 0.015245311737060547, 0.015304991722106934, 0.015380191802978515, 0.015236127853393554, 0.015429920196533203, 0.015305407524108887, 0.015220447540283203, 0.015277664184570312, 0.015286975860595704, 0.015303903579711915, 0.015334176063537599, 0.015223999977111817, 0.015246463775634765, 0.015233887672424316, 0.01532096004486084, 0.015373279571533203, 0.015387711524963378, 0.01533024024963379, 0.015824895858764648, 0.01583513641357422, 0.015900671958923338, 0.015718048095703124, 0.01568342399597168, 0.015599552154541016, 0.015451744079589843, 0.015328224182128907, 0.015406687736511231, 0.015394463539123535, 0.01534438419342041, 0.015363391876220702, 0.015370944023132323, 0.01559273624420166, 0.015510239601135255, 0.015345664024353027, 0.01559552001953125, 0.01562828826904297, 0.015630335807800294, 0.015731871604919432, 0.01569603157043457, 0.015581024169921876, 0.015629152297973632, 0.015542271614074708, 0.015473664283752442, 0.015428607940673827, 0.015499168395996094, 0.01625497627258301, 0.01580851173400879, 0.016014688491821288, 0.01569276809692383, 0.015591103553771973, 0.015355135917663574, 0.015401151657104492, 0.015395232200622559, 0.015429792404174804, 0.015416704177856445, 0.01555519962310791, 0.015448063850402831, 0.015357952117919921, 0.015391776084899902, 0.015502304077148437, 0.01575887966156006, 0.015398976325988769, 0.015415712356567383, 0.015621376037597657, 0.01575209617614746, 0.015480671882629395, 0.015688032150268556, 0.015455904006958008, 0.015339520454406739, 0.015329279899597169, 0.015435296058654785, 0.015407584190368652, 0.015667455673217773, 0.0157959680557251, 0.015616000175476074, 0.015471903800964356, 0.015567392349243163, 0.015796416282653807, 0.015895551681518554, 0.015683839797973633, 0.015825663566589356, 0.015584480285644532, 0.015482975959777831, 0.015419936180114746, 0.01538268756866455, 0.015298080444335938, 0.015286080360412598, 0.015253631591796876, 0.01534825611114502, 0.015425503730773926, 0.015464608192443847, 0.015412544250488281, 0.015318911552429199, 0.015321120262145996, 0.015247072219848633, 0.015381440162658692, 0.015347711563110352, 0.015404352188110352, 0.015489727973937988, 0.015437600135803222, 0.015449728012084961, 0.015450719833374024, 0.015530112266540528, 0.01538646411895752, 0.015415328025817871, 0.015517696380615235, 0.015389823913574218, 0.015313119888305663, 0.01547334384918213, 0.015433279991149903, 0.01570201587677002, 0.015331744194030762, 0.015285856246948242, 0.015940032005310058, 0.015259648323059082, 0.015267840385437012, 0.015321248054504395, 0.015406559944152833, 0.015339903831481934, 0.01553606414794922, 0.015428768157958985, 0.015335583686828613, 0.015402751922607422, 0.015227904319763183, 0.015342752456665039, 0.015395680427551269, 0.015366144180297851, 0.015269887924194337, 0.015196063995361327, 0.015230303764343261, 0.015302751541137695, 0.01530947208404541, 0.015309056282043458, 0.015385567665100098, 0.015717151641845704, 0.015818400382995604, 0.015759231567382813, 0.015753151893615723, 0.015700511932373047, 0.015675392150878906, 0.015507424354553222, 0.015341600418090821, 0.015392767906188964, 0.015304479598999023, 0.015341856002807618, 0.015316703796386718, 0.015364255905151367, 0.015278143882751465, 0.0157327356338501, 0.01547430419921875, 0.015436160087585448, 0.015435775756835938, 0.015386624336242676, 0.015324704170227051, 0.015605631828308106, 0.015372896194458007, 0.015433728218078613, 0.015359999656677246, 0.01529651165008545, 0.015332511901855469, 0.015274847984313965, 0.015325183868408204, 0.015325183868408204, 0.015362048149108886, 0.015380096435546876, 0.015432064056396484, 0.015953215599060058, 0.01558944034576416, 0.018631296157836916, 0.01713260841369629, 0.016313568115234375, 0.015924991607666014, 0.015612159729003906, 0.015314944267272949, 0.015390048027038574, 0.015345824241638184, 0.01538099193572998, 0.015562368392944335, 0.01541158390045166, 0.015275584220886231, 0.015558303833007813, 0.015430015563964844, 0.015290271759033204, 0.01555622386932373, 0.015466591835021973, 0.015364255905151367, 0.015280384063720703, 0.015252927780151367, 0.015283136367797852, 0.015308287620544434, 0.01527353572845459, 0.015348671913146972, 0.015580639839172364, 0.01548953628540039, 0.015400992393493653, 0.015433728218078613, 0.015413248062133789, 0.015468192100524903, 0.015419743537902831, 0.015695391654968263, 0.015679967880249022, 0.015629952430725097, 0.015731072425842284, 0.01547059154510498, 0.015422623634338379, 0.015391519546508789, 0.015607711791992187, 0.015540384292602539, 0.01546444797515869, 0.01560489559173584, 0.015719264030456544, 0.015674880027770996, 0.015731200218200684, 0.015695520401000976, 0.015638879776000977, 0.015458368301391601, 0.015355839729309081, 0.015599871635437012, 0.015504128456115722, 0.015578111648559571, 0.015584863662719727, 0.015540639877319335, 0.015611040115356445, 0.015488127708435058, 0.015651840209960938, 0.015559391975402832, 0.01552998447418213, 0.015609248161315918, 0.015479392051696777, 0.01546444797515869, 0.015357024192810058, 0.015362560272216797, 0.015384384155273437, 0.015473247528076172, 0.016324607849121094, 0.016051551818847657, 0.01558579158782959, 0.01562435245513916, 0.015708160400390626, 0.015791135787963866, 0.01581769561767578, 0.015649791717529296, 0.015633407592773436, 0.015509504318237305, 0.015583040237426758, 0.015470784187316894, 0.015697248458862306, 0.01568835163116455, 0.015484928131103515, 0.01556275177001953, 0.015550463676452637, 0.015351424217224121, 0.01530303955078125, 0.015431679725646973, 0.015325311660766602, 0.015591296195983886, 0.015454208374023438, 0.01549721622467041, 0.015492575645446777, 0.015477279663085937, 0.015507455825805663, 0.01547059154510498, 0.015378432273864746, 0.015374303817749024, 0.01565875244140625, 0.015462688446044922, 0.015575039863586425, 0.0157673282623291, 0.015511775970458984, 0.015430815696716308, 0.015489888191223145, 0.01561961555480957, 0.015817184448242188, 0.0158023681640625, 0.015734016418457033, 0.0156364164352417, 0.015789119720458985, 0.015546112060546876, 0.015558655738830567, 0.015622143745422363, 0.015812607765197755, 0.015849472045898438, 0.01570406436920166, 0.015525888442993165, 0.01536348819732666, 0.015790559768676757, 0.015554688453674316, 0.015375679969787597, 0.015379136085510253, 0.015366144180297851, 0.015341567993164062, 0.015458304405212403, 0.01542467212677002, 0.015399040222167969, 0.015743712425231933, 0.015379648208618164, 0.015358783721923829, 0.015900927543640135, 0.015720895767211914, 0.015534079551696778, 0.015564352035522462, 0.015675488471984862, 0.01546275234222412, 0.015456255912780761, 0.015405055999755859, 0.01569587230682373, 0.01564793586730957, 0.01590726375579834, 0.015746560096740723, 0.015522336006164551, 0.015453984260559082, 0.015354175567626954, 0.015505855560302734, 0.015547712326049805, 0.015370752334594727, 0.015374336242675781, 0.015329279899597169, 0.01547059154510498, 0.015388640403747558, 0.015350815773010253, 0.015309823989868163, 0.015550463676452637, 0.015525888442993165, 0.015350815773010253, 0.015557600021362304, 0.0156364803314209, 0.01530675220489502, 0.015340959548950196, 0.015472928047180175, 0.015324480056762695, 0.015428607940673827, 0.015347647666931152, 0.015398624420166016, 0.01538697624206543, 0.015322848320007324, 0.01539305591583252, 0.015382528305053711, 0.01541750431060791, 0.015410335540771484, 0.01527843189239502, 0.015298336029052735, 0.016570112228393555, 0.015352607727050781, 0.015271967887878417, 0.01543984031677246, 0.015318719863891602, 0.01536240005493164, 0.015299903869628905, 0.015260671615600586, 0.015515328407287598, 0.015411199569702149, 0.01540022373199463, 0.015520352363586426, 0.015462464332580566, 0.015454272270202636, 0.015301664352416993, 0.015387616157531737, 0.015382528305053711, 0.015664671897888182, 0.015339327812194823]",tokens/s,63.613432337953775,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,804.74112,748.552192,0.0,346.03008,335.0016,s,1,9.1871630859375,9.1871630859375,0.0,9.1871630859375,9.1871630859375,9.1871630859375,9.1871630859375,[9.1871630859375],,kWh,2.8567122791689787e-06,3.079291074817086e-07,9.336118579989991e-07,4.0982532446496866e-06,,MB,1265.68448,773.718016,0.0,358.612992,327.706112,s,19,0.20090147399902344,0.010573761789422287,0.0001413961679397895,0.0105283203125,0.010731853103637695,0.01086082887649536,0.010896057224273682,"[0.010687583923339844, 0.01048419189453125, 0.01051955223083496, 0.010904864311218261, 0.010443008422851563, 0.010371199607849121, 0.010491616249084472, 0.010658880233764649, 0.010487744331359863, 0.010477215766906738, 0.010536191940307617, 0.010529376029968262, 0.01070083236694336, 0.0105283203125, 0.010855936050415039, 0.010685888290405274, 0.010490431785583496, 0.010650527954101563, 0.010398112297058105]",tokens/s,24210.872639110865,kWh,3.0991780366684737e-07,3.417809122355602e-08,1.85370223785563e-07,5.294661186759664e-07,tokens/kWh,483505914.6752923,MB,1298.223104,790.495232,0.0,375.390208,327.708672,s,19,10.306522888183595,0.5424485730622944,0.005714890427987214,0.5401807861328125,0.5510193237304688,0.553623681640625,0.55465876953125,"[0.538726806640625, 0.5467258911132813, 0.5417543334960937, 0.5460720825195312, 0.5351142578125, 0.5383952026367187, 0.5372440795898438, 0.5401807861328125, 0.5387529296875, 0.5379505615234375, 0.5453820190429688, 0.5534799194335938, 0.5504041748046875, 0.5549175415039063, 0.5458282470703125, 0.5393392333984375, 0.5380530395507812, 0.535457275390625, 0.5427445068359374]",tokens/s,116.14004189253369,kWh,1.5465539467824624e-05,1.7051630450106126e-06,6.487515231477606e-06,2.3658217744312835e-05,tokens/kWh,2662922.4855766864,,s,1197,10.296777518272412,0.00860215331518162,0.00024326840869998742,0.008539168357849122,0.008879897689819337,0.008992928123474122,0.009474801483154296,"[0.008724767684936524, 0.008839167594909669, 0.008801535606384277, 0.008917568206787109, 0.00862758445739746, 0.008672063827514648, 0.008447199821472169, 0.008382719993591308, 0.008614432334899903, 0.008439807891845704, 0.008379903793334961, 0.008707712173461914, 0.00842841625213623, 0.008386560440063476, 0.008446016311645509, 0.008784000396728515, 0.0084071683883667, 0.00852950382232666, 0.008420543670654297, 0.008395744323730468, 0.008583359718322754, 0.008447903633117676, 0.008424480438232422, 0.008712384223937988, 0.009497152328491212, 0.00974240016937256, 0.008607647895812988, 0.008472672462463379, 0.008456192016601562, 0.008701120376586914, 0.008476991653442383, 0.008458239555358887, 0.008425984382629394, 0.008431167602539062, 0.008474656105041504, 0.008393247604370118, 0.008353440284729004, 0.00841164779663086, 0.008363807678222656, 0.008336511611938477, 0.00849731159210205, 0.00841817569732666, 0.008531776428222657, 0.008447967529296874, 0.008527135848999024, 0.00850607967376709, 0.008461376190185547, 0.00845305633544922, 0.008439200401306152, 0.008460543632507323, 0.008456543922424316, 0.008438048362731933, 0.00850879955291748, 0.008437088012695312, 0.008347935676574707, 0.008438495635986329, 0.008389984130859375, 0.008404864311218261, 0.008430591583251953, 0.008469535827636718, 0.00907545566558838, 0.008480992317199707, 0.008496928215026855, 0.008213567733764648, 0.0084235200881958, 0.008405856132507324, 0.008423423767089844, 0.008480287551879883, 0.008392319679260254, 0.008464960098266602, 0.008444191932678223, 0.008660160064697265, 0.008403615951538086, 0.008439040184020996, 0.008499808311462402, 0.00850483226776123, 0.00849193572998047, 0.008490912437438965, 0.008517631530761719, 0.008507391929626466, 0.008495103836059571, 0.00850268840789795, 0.008665696144104004, 0.009002528190612792, 0.008712160110473632, 0.008640095710754395, 0.008586112022399903, 0.008589344024658203, 0.008517727851867676, 0.00850432014465332, 0.008514783859252929, 0.008592479705810547, 0.008511967658996582, 0.008560768127441406, 0.008926624298095704, 0.009902848243713378, 0.008878144264221191, 0.009097311973571777, 0.008780863761901856, 0.00872051239013672, 0.008849920272827149, 0.008960000038146973, 0.00894156837463379, 0.00894486427307129, 0.008942496299743653, 0.008941344261169434, 0.00874505615234375, 0.008710304260253906, 0.00868131160736084, 0.008704095840454102, 0.008748767852783203, 0.008843744277954101, 0.009025247573852539, 0.00884233570098877, 0.00867420768737793, 0.008737024307250976, 0.00883894443511963, 0.008936832427978516, 0.008962656021118164, 0.008804351806640624, 0.008687616348266602, 0.008525440216064453, 0.008472991943359374, 0.008435680389404296, 0.008428735733032227, 0.008387359619140625, 0.008071167945861817, 0.008454208374023437, 0.008550335884094238, 0.009352448463439942, 0.008992608070373536, 0.00883296012878418, 0.008820768356323241, 0.009018303871154786, 0.008820735931396484, 0.008713408470153809, 0.008606528282165527, 0.00858681583404541, 0.008547840118408203, 0.008504256248474121, 0.008519040107727052, 0.008551039695739747, 0.008545791625976563, 0.008607775688171386, 0.008706527709960938, 0.008750432014465332, 0.008874624252319337, 0.009021568298339843, 0.008853023529052734, 0.0089682559967041, 0.008775744438171387, 0.008601856231689454, 0.008620256423950196, 0.008443936347961426, 0.008476415634155273, 0.00846780776977539, 0.008470623970031739, 0.008477248191833497, 0.008470527648925781, 0.008449983596801759, 0.00840931224822998, 0.008407232284545898, 0.008379039764404296, 0.008425984382629394, 0.00842803192138672, 0.008478240013122558, 0.008456928253173828, 0.008449824333190917, 0.008419296264648438, 0.00840499210357666, 0.008383551597595214, 0.008360159873962403, 0.008368864059448242, 0.0084781436920166, 0.008423904418945312, 0.008589504241943359, 0.008744864463806153, 0.008548352241516113, 0.008663040161132812, 0.0085032958984375, 0.008472895622253418, 0.008521408081054687, 0.008619903564453125, 0.00865664005279541, 0.008484831809997559, 0.00843126392364502, 0.00881766414642334, 0.008607071876525879, 0.008749792098999024, 0.008284735679626464, 0.008540032386779785, 0.008552448272705078, 0.008495103836059571, 0.008403167724609376, 0.008408543586730956, 0.00844422435760498, 0.008837023735046386, 0.008632351875305175, 0.009474111557006835, 0.009389568328857421, 0.009500767707824707, 0.008566944122314454, 0.01151411247253418, 0.008592927932739258, 0.008524255752563476, 0.008515583992004394, 0.008460288047790527, 0.008474623680114746, 0.008470239639282227, 0.008640800476074219, 0.00859545612335205, 0.00849715232849121, 0.008539839744567871, 0.00856710433959961, 0.00905180835723877, 0.008551103591918945, 0.008473600387573242, 0.008511263847351075, 0.008452863693237304, 0.008441856384277344, 0.008658592224121093, 0.008987263679504394, 0.00877791976928711, 0.00883471965789795, 0.008499199867248536, 0.00854748821258545, 0.008416255950927735, 0.008374112129211426, 0.008407039642333984, 0.008409407615661621, 0.008402848243713379, 0.008406944274902343, 0.008705920219421386, 0.008781824111938476, 0.008445119857788086, 0.008729408264160157, 0.00874880027770996, 0.008753567695617677, 0.0087192964553833, 0.008784799575805665, 0.00859337615966797, 0.008470560073852539, 0.008576255798339844, 0.008518400192260743, 0.008638496398925782, 0.008627296447753906, 0.008577919960021973, 0.008500351905822753, 0.008622976303100586, 0.008503456115722657, 0.008480607986450195, 0.008642880439758301, 0.008105055809020996, 0.008476384162902832, 0.008459263801574708, 0.00856175994873047, 0.008808287620544433, 0.008716192245483399, 0.008665375709533692, 0.008609600067138672, 0.008675552368164063, 0.008500255584716798, 0.008463456153869628, 0.00840982437133789, 0.008391615867614746, 0.008394783973693847, 0.0084071683883667, 0.008712032318115234, 0.008478303909301758, 0.008472064018249511, 0.008380384445190429, 0.008360927581787109, 0.008377504348754884, 0.008395584106445312, 0.008441984176635742, 0.008437631607055664, 0.008451680183410644, 0.008435615539550781, 0.008405216217041015, 0.008442144393920898, 0.008545920372009278, 0.008449664115905762, 0.008479488372802735, 0.00845206356048584, 0.008552479743957519, 0.008539456367492676, 0.008491711616516113, 0.008435711860656739, 0.008443903923034669, 0.008417247772216797, 0.008379615783691407, 0.008479167938232422, 0.00862566375732422, 0.008517600059509278, 0.00860489559173584, 0.00877945613861084, 0.008658944129943847, 0.008566783905029298, 0.008436767578125, 0.008448991775512695, 0.008615936279296875, 0.008478719711303711, 0.008417280197143554, 0.008421407699584962, 0.008435680389404296, 0.008400832176208496, 0.00842684841156006, 0.00837235164642334, 0.008405152320861817, 0.008351648330688476, 0.008364895820617675, 0.008698592185974121, 0.008614879608154297, 0.008460160255432128, 0.008417280197143554, 0.008081119537353515, 0.008407135963439942, 0.008328319549560547, 0.008426112174987793, 0.008417535781860351, 0.00850499153137207, 0.008434016227722168, 0.008732192039489746, 0.00879593563079834, 0.00864019203186035, 0.008467455863952637, 0.008467616081237793, 0.00839686393737793, 0.008465248107910156, 0.008427200317382812, 0.00843391990661621, 0.008415231704711914, 0.008370112419128418, 0.008411264419555664, 0.008514911651611327, 0.008490688323974609, 0.008463264465332031, 0.008447423934936523, 0.008482624053955078, 0.008443648338317872, 0.008432319641113281, 0.008446271896362306, 0.008423423767089844, 0.008388607978820802, 0.008420928001403809, 0.008388511657714844, 0.008397695541381835, 0.008688768386840821, 0.008399392127990723, 0.008386560440063476, 0.008412480354309082, 0.008524479866027833, 0.008747008323669434, 0.008998847961425782, 0.008974176406860351, 0.008749279975891113, 0.008832287788391114, 0.009054944038391114, 0.00890675163269043, 0.00876694393157959, 0.00881107234954834, 0.008740832328796386, 0.008791647911071777, 0.008827296257019043, 0.008707584381103516, 0.00860416030883789, 0.008544256210327148, 0.008467616081237793, 0.008478560447692872, 0.008499327659606934, 0.00844275188446045, 0.008490367889404297, 0.00843014430999756, 0.008480832099914551, 0.008514559745788575, 0.008424384117126465, 0.008453375816345214, 0.008396608352661133, 0.008089599609375, 0.008401087760925293, 0.008384511947631837, 0.008453824043273927, 0.008394720077514649, 0.008368576049804687, 0.008394975662231446, 0.008447903633117676, 0.008424799919128418, 0.008441791534423829, 0.008524607658386231, 0.008783295631408691, 0.008722687721252441, 0.00882688045501709, 0.008768896102905273, 0.008714879989624023, 0.00869702434539795, 0.008561504364013673, 0.008495072364807128, 0.008558176040649413, 0.008521344184875488, 0.008393504142761231, 0.008410816192626952, 0.008359807968139649, 0.008382271766662598, 0.008360575675964356, 0.008377823829650879, 0.008374815940856933, 0.008439807891845704, 0.008556127548217773, 0.008587712287902833, 0.008529888153076173, 0.008605695724487305, 0.008579039573669433, 0.008560319900512696, 0.008587615966796875, 0.008615296363830566, 0.008757887840270996, 0.008626175880432128, 0.00853711986541748, 0.00858448028564453, 0.008476351737976074, 0.008494655609130859, 0.008521696090698243, 0.008578975677490235, 0.008610176086425782, 0.008531167984008789, 0.008579520225524903, 0.008697792053222657, 0.009002847671508789, 0.008524543762207031, 0.008507391929626466, 0.008435680389404296, 0.008665120124816895, 0.00841318416595459, 0.008378368377685547, 0.008354047775268554, 0.008393823623657226, 0.008391039848327637, 0.008439488410949707, 0.00858521556854248, 0.008535872459411621, 0.008436127662658692, 0.008067071914672852, 0.008601247787475586, 0.008401247978210449, 0.009224191665649414, 0.00970751953125, 0.010260416030883788, 0.00861190414428711, 0.008570079803466796, 0.008496064186096192, 0.008502655982971192, 0.008513376235961915, 0.008518272399902344, 0.008546239852905273, 0.00858563232421875, 0.008561408042907715, 0.008444479942321777, 0.008438112258911133, 0.008355551719665528, 0.008431903839111328, 0.008407039642333984, 0.008354080200195313, 0.008334336280822753, 0.008342016220092773, 0.008386783599853516, 0.008474016189575195, 0.008374079704284668, 0.00842851161956787, 0.00841708755493164, 0.008452192306518554, 0.008499135971069336, 0.008525792121887208, 0.008693856239318848, 0.008617728233337403, 0.00852943992614746, 0.008436351776123048, 0.008491007804870606, 0.00870195198059082, 0.008553824424743652, 0.008499872207641602, 0.008570879936218261, 0.008631584167480468, 0.008579808235168457, 0.008554080009460448, 0.008519871711730957, 0.008508895874023438, 0.008419808387756347, 0.008552736282348632, 0.008498656272888183, 0.008470047950744629, 0.0087225923538208, 0.008948672294616699, 0.00880784034729004, 0.00871884822845459, 0.008558591842651368, 0.00854963207244873, 0.00847094440460205, 0.008434016227722168, 0.008474623680114746, 0.008519680023193359, 0.008472607612609864, 0.008447967529296874, 0.008462559700012208, 0.008437312126159668, 0.008134688377380371, 0.008398847579956055, 0.008392704010009766, 0.008463935852050781, 0.008450495719909667, 0.008427743911743163, 0.008406559944152832, 0.008511263847351075, 0.008665568351745605, 0.008738080024719238, 0.008742879867553711, 0.00853388786315918, 0.008446144104003907, 0.008389408111572266, 0.008576128005981445, 0.008444319725036621, 0.008437600135803223, 0.008411680221557617, 0.008400896072387695, 0.008384384155273438, 0.008378496170043945, 0.008402015686035156, 0.008510592460632325, 0.008722208023071288, 0.008882240295410156, 0.00860153579711914, 0.008571167945861817, 0.008656607627868653, 0.008792287826538086, 0.008599231719970703, 0.008728960037231445, 0.008885984420776366, 0.009232383728027344, 0.009421055793762206, 0.008619903564453125, 0.008509183883666992, 0.008717472076416016, 0.0084367036819458, 0.008462368011474609, 0.008401023864746093, 0.008356767654418945, 0.009309247970581054, 0.00848300838470459, 0.00846406364440918, 0.008404928207397461, 0.008443360328674316, 0.00839852809906006, 0.008403776168823242, 0.008371904373168945, 0.008460927963256835, 0.008431391716003418, 0.00843721580505371, 0.008372608184814454, 0.008365440368652343, 0.008612640380859375, 0.009140607833862304, 0.00857254409790039, 0.008492735862731934, 0.008393024444580078, 0.008337632179260254, 0.008396575927734375, 0.008386112213134766, 0.008343999862670898, 0.00823788833618164, 0.00840886402130127, 0.00848310375213623, 0.008471327781677246, 0.008412256240844726, 0.008445535659790039, 0.008449695587158202, 0.00845241641998291, 0.008642784118652344, 0.00870201587677002, 0.008643872261047363, 0.008592096328735352, 0.008790016174316406, 0.008761119842529298, 0.00876921558380127, 0.008783840179443359, 0.008880703926086427, 0.008970239639282226, 0.008763392448425293, 0.008673279762268067, 0.00855350399017334, 0.008541152000427247, 0.008640543937683105, 0.008513216018676759, 0.008619456291198731, 0.008539168357849122, 0.008484671592712403, 0.008482208251953125, 0.008493599891662597, 0.008566143989562989, 0.0086812162399292, 0.008639360427856445, 0.008637951850891114, 0.008663135528564453, 0.008551103591918945, 0.008623904228210449, 0.008592960357666015, 0.008638463973999023, 0.008591135978698731, 0.00852444839477539, 0.008453760147094726, 0.008512160301208497, 0.00843734359741211, 0.008419039726257325, 0.008390624046325684, 0.008431488037109375, 0.008374848365783692, 0.008433535575866699, 0.008382559776306153, 0.008418399810791016, 0.008417407989501953, 0.008413503646850586, 0.008405152320861817, 0.008386912345886231, 0.008373567581176758, 0.008399744033813477, 0.008408608436584473, 0.008456480026245118, 0.008366080284118652, 0.008399104118347168, 0.008400192260742187, 0.008421088218688965, 0.008448543548583985, 0.008171263694763184, 0.008523776054382324, 0.008440704345703124, 0.008531647682189942, 0.008558015823364257, 0.00850592041015625, 0.008474623680114746, 0.008615103721618653, 0.008909407615661622, 0.008923263549804687, 0.008855327606201172, 0.008752703666687012, 0.008700672149658204, 0.009219743728637696, 0.00919587230682373, 0.008977791786193848, 0.00894758415222168, 0.00891750431060791, 0.00875494384765625, 0.008769472122192383, 0.008831328392028808, 0.008841440200805665, 0.008720383644104004, 0.008644607543945313, 0.008759296417236329, 0.008766752243041992, 0.008646944046020507, 0.008693216323852539, 0.008612095832824707, 0.008496864318847656, 0.008547327995300292, 0.008665247917175293, 0.008640352249145508, 0.008652799606323243, 0.00877683162689209, 0.008829024314880371, 0.008866527557373046, 0.008741184234619141, 0.00866646385192871, 0.008577568054199219, 0.00850111961364746, 0.008515583992004394, 0.008548352241516113, 0.008980480194091797, 0.008590944290161133, 0.008512224197387695, 0.00868115234375, 0.008709983825683594, 0.008682720184326172, 0.008528512001037597, 0.008626496315002442, 0.008467616081237793, 0.008424320220947266, 0.008419008255004882, 0.00848515224456787, 0.008417280197143554, 0.0084071683883667, 0.008460160255432128, 0.008386560440063476, 0.008394623756408691, 0.008444031715393067, 0.008496767997741699, 0.008481504440307618, 0.008130656242370606, 0.008471872329711914, 0.008718591690063476, 0.00873532772064209, 0.009260895729064942, 0.009055456161499024, 0.009763615608215332, 0.008906047821044922, 0.008974207878112792, 0.008741567611694336, 0.00870412826538086, 0.008764991760253906, 0.00881056022644043, 0.008742879867553711, 0.008728192329406739, 0.0087010555267334, 0.0086463041305542, 0.008641663551330567, 0.008706144332885742, 0.008661248207092286, 0.008655136108398437, 0.008605952262878419, 0.008656895637512207, 0.008650752067565918, 0.008650752067565918, 0.008666208267211914, 0.008674336433410644, 0.008707967758178711, 0.00881392002105713, 0.00872105598449707, 0.008652799606323243, 0.008583168029785156, 0.008657952308654786, 0.008708415985107422, 0.008753727912902833, 0.008691583633422852, 0.008675456047058105, 0.008642656326293945, 0.00872428798675537, 0.008651040077209472, 0.008824928283691406, 0.008812352180480957, 0.008843263626098634, 0.008918975830078125, 0.008980319976806641, 0.009007072448730469, 0.008935680389404297, 0.00906982421875, 0.009049887657165528, 0.008911231994628906, 0.008806528091430664, 0.0089617919921875, 0.008994688034057617, 0.008896672248840332, 0.008723456382751465, 0.008696800231933593, 0.00866153621673584, 0.008575263977050782, 0.008708000183105469, 0.008806079864501952, 0.008661312103271484, 0.008840800285339356, 0.008822688102722168, 0.008606528282165527, 0.008709856033325195, 0.008712127685546875, 0.008664640426635742, 0.008700639724731445, 0.008617312431335448, 0.008643487930297851, 0.008854399681091309, 0.008862624168395996, 0.009435071945190429, 0.009366623878479004, 0.009188096046447753, 0.00901961612701416, 0.009013248443603515, 0.008937472343444825, 0.008962335586547852, 0.008889792442321777, 0.009005120277404785, 0.00893776035308838, 0.008882495880126953, 0.00875276756286621, 0.008828160285949706, 0.00887177562713623, 0.008991647720336914, 0.008947263717651368, 0.008825280189514161, 0.008771552085876464, 0.008712224006652832, 0.00882688045501709, 0.008730688095092774, 0.008615967750549316, 0.00856454372406006, 0.00862217617034912, 0.008614144325256347, 0.008560383796691894, 0.008561663627624512, 0.008475711822509766, 0.008444992065429688, 0.008741567611694336, 0.008775872230529785, 0.008757472038269043, 0.008703776359558105, 0.008648639678955078, 0.008837183952331544, 0.008976032257080078, 0.008938143730163575, 0.008867712020874023, 0.008778783798217773, 0.008594207763671875, 0.00850550365447998, 0.008513376235961915, 0.008511487960815429, 0.008465503692626953, 0.008450079917907714, 0.008427743911743163, 0.008401568412780763, 0.0085032958984375, 0.008396320343017578, 0.008411520004272461, 0.008372320175170898, 0.008527168273925781, 0.008567487716674805, 0.008491007804870606, 0.00835750389099121, 0.00867948818206787, 0.008817440032958984, 0.009676799774169922, 0.008994272232055664, 0.009320704460144043, 0.009356767654418945, 0.00896288013458252, 0.008957951545715333, 0.008924480438232421, 0.008807104110717773, 0.008822976112365722, 0.008843168258666993, 0.008915007591247559, 0.008800095558166503, 0.008785344123840333, 0.00884006404876709, 0.008752927780151368, 0.008796416282653808, 0.008994208335876466, 0.008882431983947755, 0.008824128150939942, 0.008843328475952148, 0.008763360023498536, 0.008716959953308106, 0.00877791976928711, 0.008769344329833985, 0.008927231788635253, 0.009076736450195312, 0.008894463539123536, 0.008853504180908203, 0.008849408149719238, 0.008934623718261718, 0.008864704132080079, 0.008886112213134765, 0.008946975708007813, 0.008930144309997558, 0.008852800369262696, 0.008842944145202637, 0.008862591743469239, 0.008881695747375488, 0.009105888366699219, 0.008879360198974609, 0.008761311531066894, 0.008657504081726074, 0.008668352127075195, 0.008739839553833008, 0.008712191581726075, 0.008638175964355468, 0.008540448188781738, 0.008679648399353028, 0.008503135681152343, 0.008472064018249511, 0.008487360000610351, 0.008437760353088379, 0.008564512252807617, 0.0085066556930542, 0.008451007843017578, 0.00860159969329834, 0.008402144432067871, 0.008465184211730956, 0.00875260829925537, 0.00872640037536621, 0.008568096160888671, 0.008728960037231445, 0.008625375747680663, 0.008568863868713378, 0.00859001636505127, 0.008517824172973632, 0.008443967819213867, 0.008450016021728516, 0.00844495964050293, 0.00850227165222168, 0.008589311599731446, 0.008747008323669434, 0.008947936058044434, 0.00902940845489502, 0.009261055946350098, 0.009196576118469238, 0.008887552261352539, 0.008824416160583496, 0.008685695648193359, 0.008630271911621093, 0.008674367904663086, 0.008600288391113282, 0.008568863868713378, 0.008538111686706543, 0.008438976287841796, 0.00846243190765381, 0.008517600059509278, 0.008473183631896973, 0.008511967658996582, 0.00848038387298584, 0.008440064430236817, 0.008462271690368652, 0.008826944351196289, 0.008814144134521484, 0.008767583847045898, 0.008857888221740723, 0.008638527870178223, 0.008678463935852051, 0.008747936248779297, 0.00886128044128418, 0.008767295837402343, 0.00868329620361328, 0.008550559997558594, 0.00856112003326416, 0.008585439682006837, 0.008668191909790039, 0.008827872276306152, 0.00873804759979248, 0.008732992172241211, 0.00862019157409668, 0.008616031646728516, 0.00850710391998291, 0.008676992416381837, 0.008430432319641114, 0.00837622356414795, 0.008472672462463379, 0.008600607872009277, 0.008391551971435547, 0.009105440139770507, 0.008607968330383301, 0.008711551666259766, 0.008701472282409669, 0.008737055778503418, 0.00831116771697998, 0.008683648109436035, 0.008640512466430664, 0.00870809555053711, 0.008730624198913574, 0.00872755241394043, 0.008632863998413086, 0.008573344230651855, 0.008504511833190918, 0.008501983642578126, 0.008487071990966797, 0.008464384078979491, 0.008652799606323243, 0.008429920196533203, 0.00843331241607666, 0.008449728012084962, 0.008444448471069337, 0.00842739200592041, 0.008366239547729493, 0.008452992439270019, 0.008421504020690918, 0.008428000450134277, 0.008415519714355469, 0.008390527725219726, 0.008523903846740722, 0.00851353645324707, 0.008472576141357421, 0.008439807891845704, 0.008779840469360351, 0.00843779182434082, 0.008450207710266113, 0.0084551362991333, 0.008885024070739746, 0.00901699161529541, 0.008834464073181152, 0.008877247810363769, 0.008875328063964844, 0.00895577621459961, 0.009052736282348633, 0.008770751953125, 0.008639616012573242, 0.008555904388427734, 0.008487232208251953, 0.008452095985412598, 0.008456192016601562, 0.008578399658203124, 0.008499872207641602, 0.008503071784973145, 0.008615167617797851, 0.008592512130737305, 0.008539999961853027, 0.008507391929626466, 0.00860364818572998, 0.008476672172546386, 0.00840499210357666, 0.008472576141357421, 0.008443391799926758, 0.008444191932678223, 0.008426719665527344, 0.008392959594726563, 0.008379103660583496, 0.008410176277160645, 0.00834659194946289, 0.00816102409362793, 0.008458368301391602, 0.008726143836975097, 0.008542207717895508, 0.008499711990356444, 0.008461376190185547, 0.008424511909484863, 0.008527199745178222, 0.008505248069763183, 0.008735039710998534, 0.008724800109863281, 0.008665087699890137, 0.00858460807800293, 0.00853052806854248, 0.008535648345947265, 0.008457663536071778, 0.008399552345275878, 0.00849731159210205, 0.008464639663696288, 0.008402655601501465, 0.008416607856750488, 0.008424448013305665, 0.008421088218688965, 0.008374367713928223, 0.008406399726867676, 0.008438400268554688, 0.009029631614685058, 0.008419103622436523, 0.00841750431060791, 0.008384511947631837, 0.008364031791687012, 0.008450176239013672, 0.008388480186462403, 0.008392704010009766, 0.00841536045074463, 0.0084017915725708, 0.008602592468261719, 0.008570112228393555, 0.008543295860290527, 0.008519392013549804, 0.008509440422058106, 0.00851353645324707, 0.008568832397460938, 0.008554752349853516, 0.008801728248596192, 0.008796480178833008, 0.008728575706481934, 0.00871622371673584, 0.008716064453125, 0.008806367874145508, 0.00868348789215088, 0.008577088356018066, 0.00851798439025879, 0.008566720008850098, 0.008500736236572265, 0.008511103630065919, 0.008663935661315918, 0.008570879936218261, 0.00849612808227539, 0.008410112380981445, 0.008357760429382324, 0.008587519645690917, 0.008662912368774414, 0.008457375526428222, 0.008866463661193848, 0.008720576286315918, 0.008568927764892579, 0.00849500846862793, 0.008409343719482421, 0.008396575927734375, 0.008421024322509766, 0.008447744369506837, 0.008434240341186523, 0.008429568290710449, 0.008444928169250488, 0.00843295955657959, 0.008374239921569824, 0.00842031955718994, 0.00836684799194336, 0.008404864311218261, 0.00839897632598877, 0.008343647956848145, 0.0083821439743042, 0.008365951538085937, 0.008357376098632812, 0.008370976448059082, 0.00839907169342041, 0.008402400016784668, 0.008409472465515136, 0.008427519798278809, 0.008640512466430664, 0.00857487964630127, 0.00863161563873291, 0.008565535545349121, 0.008568351745605468, 0.00856112003326416, 0.008689951896667481, 0.008642271995544433, 0.0089303035736084, 0.008856063842773437, 0.008835583686828614, 0.008656895637512207, 0.008540160179138183, 0.008468832015991211, 0.008447648048400879, 0.008451680183410644, 0.008411168098449707, 0.008421759605407714, 0.008392224311828613, 0.008373824119567871, 0.008434592247009277, 0.008423423767089844, 0.00845241641998291, 0.008383328437805176, 0.008473440170288086, 0.008453120231628418, 0.008451071739196778, 0.008419455528259278, 0.008662912368774414, 0.00867311954498291, 0.008409055709838867, 0.008384384155273438, 0.008407360076904296, 0.008396608352661133, 0.008376447677612305, 0.008541888236999511, 0.008067904472351074, 0.008393183708190918, 0.008348031997680664, 0.008398688316345215, 0.008409536361694336, 0.008625887870788574, 0.008755167961120606, 0.008679743766784667, 0.008664799690246581, 0.008625856399536133, 0.008581439971923828, 0.008602975845336914, 0.008626848220825196, 0.008679743766784667, 0.009019071578979493, 0.009012864112854005, 0.008544639587402343, 0.008444191932678223, 0.00840608024597168, 0.008374048233032226, 0.008393695831298828, 0.00859062385559082, 0.00839027214050293, 0.008391679763793946, 0.00837667179107666, 0.00893507194519043, 0.008558591842651368, 0.008494655609130859, 0.009177536010742187, 0.009729120254516601, 0.00949135971069336, 0.009840640068054199, 0.008847359657287598, 0.008535648345947265, 0.008454560279846191, 0.008560640335083008, 0.00849459171295166, 0.008471391677856445, 0.008451904296875, 0.008480480194091797, 0.008542336463928223, 0.00851353645324707, 0.008463999748229981, 0.008430047988891602, 0.008451840400695802, 0.00845849609375, 0.008484064102172851, 0.00852233600616455, 0.008544351577758789, 0.008783871650695801, 0.00874675178527832, 0.008681088447570802, 0.008614015579223634, 0.00854911994934082, 0.008526816368103027, 0.008479392051696778, 0.008421183586120606, 0.008474944114685058, 0.008541472434997558, 0.008542431831359863, 0.008540800094604492, 0.00850931167602539, 0.00848095989227295]",tokens/s,116.2499624640655,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 890, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 822, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 373, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 115, in __init__ self.v_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 402886 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 3.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 890, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 822, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 383, in __init__ self.fc2 = nn.Linear(config.ffn_dim, self.embed_dim, bias=config.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 784.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 210.12 MiB is free. Process 402520 has 14.53 GiB memory in use. Of the allocated memory 14.41 GiB is allocated by PyTorch, and 5.21 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 890, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 822, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 373, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 115, in __init__ self.v_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 400267 has 14.73 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 3.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,805.629952,1113.45664,0.0,710.934528,657.419264,s,1,8.962650390625,8.962650390625,0.0,8.962650390625,8.962650390625,8.962650390625,8.962650390625,[8.962650390625],,kWh,2.7941555541663846e-06,3.00988801177399e-07,9.144451759996591e-07,4.009589531343443e-06,,MB,1140.49024,1157.496832,0.0,744.48896,633.645568,s,21,0.35188183021545405,0.016756277629307335,0.0004884192233001455,0.016638336181640626,0.016774816513061522,0.016787168502807617,0.018491744613647464,"[0.018917888641357423, 0.016628032684326173, 0.016550079345703125, 0.016595359802246093, 0.016616960525512696, 0.016699487686157227, 0.016719648361206055, 0.01662268829345703, 0.01672972869873047, 0.016638336181640626, 0.016787168502807617, 0.016571744918823243, 0.016695520401000977, 0.016524127960205078, 0.016593183517456055, 0.016687776565551756, 0.01668172836303711, 0.01656425666809082, 0.016679231643676757, 0.016774816513061522, 0.01660406494140625]",tokens/s,15277.856195951705,kWh,6.021927138487984e-07,6.641113838301393e-08,3.997935615298956e-07,1.068397413761708e-06,tokens/kWh,239611212.74025983,MB,1152.323584,1178.468352,0.0,765.46048,633.648128,s,21,9.950679748535155,0.4738418927873883,0.004647500506196707,0.47350015258789063,0.47959307861328127,0.4830073547363281,0.4851194885253906,"[0.4675379638671875, 0.46911691284179685, 0.46694949340820313, 0.46993194580078124, 0.46973345947265627, 0.4830073547363281, 0.47494830322265624, 0.47350015258789063, 0.48564752197265626, 0.47312664794921877, 0.47576007080078125, 0.47959307861328127, 0.4741302185058594, 0.47476974487304685, 0.4729462585449219, 0.47118865966796875, 0.47016287231445314, 0.4709754943847656, 0.47779998779296873, 0.47573394775390626, 0.4741196594238281]",tokens/s,132.95574105826887,kWh,1.3656941258373437e-05,1.5061217338508747e-06,6.559008301803473e-06,2.172207129402779e-05,tokens/kWh,2900275.9058856904,,s,1323,9.943833352088925,0.007516124982682484,0.00024363500048826857,0.007452288150787354,0.007728025722503662,0.007854064083099365,0.008331454772949219,"[0.0075781760215759275, 0.0077001919746398926, 0.007719520092010498, 0.0075879359245300294, 0.007647136211395264, 0.0074271678924560545, 0.0074617919921875, 0.0075996479988098145, 0.007409599781036377, 0.007389920234680176, 0.007574463844299316, 0.007419839859008789, 0.007373631954193115, 0.007528063774108887, 0.007399295806884766, 0.007381472110748291, 0.007374879837036133, 0.007360864162445069, 0.007333439826965332, 0.007358687877655029, 0.007368800163269043, 0.007353280067443848, 0.007377888202667236, 0.007558815956115723, 0.0073892478942871095, 0.0073853759765625, 0.007360735893249512, 0.007489183902740479, 0.0074282240867614745, 0.007408671855926514, 0.007420896053314209, 0.007378176212310791, 0.007387904167175293, 0.007350048065185547, 0.007333183765411377, 0.007561855792999268, 0.007390880107879639, 0.007377056121826172, 0.007424479961395264, 0.0074280319213867185, 0.007423295974731446, 0.007390207767486572, 0.007386879920959473, 0.007401472091674805, 0.007379968166351319, 0.007316480159759522, 0.007305215835571289, 0.0073359360694885255, 0.007353407859802246, 0.007351327896118164, 0.007359744071960449, 0.007332511901855469, 0.007319551944732666, 0.0073636798858642575, 0.007361440181732177, 0.007388671875, 0.0074237117767333985, 0.00743503999710083, 0.007339615821838379, 0.00735916805267334, 0.007375616073608399, 0.007403584003448487, 0.007361440181732177, 0.007333504199981689, 0.007407584190368652, 0.007572288036346435, 0.007368896007537842, 0.007548128128051758, 0.007411776065826416, 0.007432735919952392, 0.007592351913452149, 0.007446144104003906, 0.007536608219146729, 0.0077209601402282715, 0.007413760185241699, 0.007365952014923096, 0.007381631851196289, 0.007383103847503662, 0.007385087966918945, 0.007365791797637939, 0.007371615886688232, 0.0073472318649291995, 0.007497983932495117, 0.007832640171051026, 0.007689888000488281, 0.0076451201438903806, 0.0075797119140625, 0.00752243185043335, 0.007489408016204834, 0.00748857593536377, 0.007474112033843994, 0.007510015964508057, 0.007448224067687989, 0.007397727966308594, 0.00740556812286377, 0.007417856216430664, 0.007380832195281983, 0.00737500810623169, 0.0073993921279907225, 0.007446559906005859, 0.007569600105285645, 0.007591487884521485, 0.007443999767303467, 0.007435264110565186, 0.007712800025939942, 0.0073331198692321775, 0.0073768959045410155, 0.007375296115875244, 0.007362559795379638, 0.0073803520202636716, 0.007369344234466553, 0.007290880203247071, 0.007350272178649903, 0.007360447883605957, 0.0073523840904235836, 0.00734115219116211, 0.007355296134948731, 0.007405087947845459, 0.007326079845428467, 0.007341631889343262, 0.007391456127166748, 0.0074074559211730956, 0.00739785623550415, 0.007409664154052734, 0.0073400321006774905, 0.007409088134765625, 0.0072773761749267574, 0.00758784008026123, 0.007401472091674805, 0.00736246395111084, 0.00752239990234375, 0.0073359360694885255, 0.0073071041107177735, 0.007567808151245117, 0.007415520191192627, 0.007526400089263916, 0.00742307186126709, 0.007387743949890137, 0.007479616165161133, 0.007310944080352783, 0.00735484790802002, 0.007384031772613525, 0.0073695039749145505, 0.007412960052490234, 0.007387775897979737, 0.007356160163879395, 0.007334559917449951, 0.007398496150970459, 0.007340191841125488, 0.007469759941101074, 0.007354368209838867, 0.007342080116271973, 0.007329152107238769, 0.007371391773223877, 0.007345183849334717, 0.0076007041931152345, 0.007509600162506103, 0.0074617600440979, 0.007415711879730225, 0.0073762240409851075, 0.007352704048156738, 0.007389503955841065, 0.007378143787384033, 0.007424863815307617, 0.007369984149932862, 0.007424704074859619, 0.007405119895935059, 0.007403232097625732, 0.007414080142974853, 0.0074203200340271, 0.007407616138458252, 0.007436287879943848, 0.007425600051879882, 0.007389311790466309, 0.007392767906188965, 0.0074002561569213865, 0.0073968319892883305, 0.007422495841979981, 0.007370495796203613, 0.0073229122161865235, 0.0073528637886047365, 0.007381440162658692, 0.007372799873352051, 0.007480319976806641, 0.007445184230804444, 0.007446847915649414, 0.007469056129455566, 0.007485727787017823, 0.007460864067077637, 0.007336480140686035, 0.007498752117156982, 0.007543615818023682, 0.007458176136016846, 0.007490047931671143, 0.007464672088623047, 0.007413856029510498, 0.007471712112426758, 0.007458208084106445, 0.007668479919433594, 0.0075253119468688965, 0.007553599834442139, 0.007613759994506836, 0.007403679847717285, 0.007422976016998291, 0.007552159786224365, 0.00742848014831543, 0.007433440208435059, 0.007467135906219482, 0.007418335914611817, 0.007457024097442627, 0.007434400081634521, 0.0073690562248229985, 0.007717599868774414, 0.007481599807739258, 0.00734278392791748, 0.007428095817565918, 0.007446559906005859, 0.007362624168395996, 0.007372576236724854, 0.007421760082244873, 0.007414080142974853, 0.0074403839111328125, 0.007456768035888672, 0.00741539192199707, 0.007397408008575439, 0.007401088237762451, 0.007367424011230469, 0.007434207916259766, 0.007460896015167236, 0.0074035201072692874, 0.007432191848754883, 0.007427872180938721, 0.007362080097198486, 0.007547391891479492, 0.007463103771209717, 0.007391520023345947, 0.007460576057434082, 0.007466271877288818, 0.007385824203491211, 0.007459871768951416, 0.007455935955047607, 0.007427616119384766, 0.007448575973510742, 0.007504032135009765, 0.0074217281341552735, 0.007423520088195801, 0.007474143981933593, 0.0073992319107055666, 0.007526400089263916, 0.007510015964508057, 0.007481344223022461, 0.00752400016784668, 0.007345151901245117, 0.007570271968841553, 0.007489535808563232, 0.007437952041625976, 0.007409279823303223, 0.007428991794586182, 0.007454592227935791, 0.007436287879943848, 0.007434239864349365, 0.007452832221984863, 0.0074115839004516605, 0.007470592021942139, 0.007368192195892334, 0.00740825605392456, 0.007547232151031494, 0.007403039932250976, 0.007406047821044922, 0.00743552017211914, 0.007317344188690186, 0.0074535999298095705, 0.007513504028320312, 0.00741868782043457, 0.007436063766479492, 0.0074035201072692874, 0.007339583873748779, 0.00751148796081543, 0.0074635519981384275, 0.00741209602355957, 0.007515359878540039, 0.007439424037933349, 0.00742790412902832, 0.007432096004486084, 0.007417856216430664, 0.00740118408203125, 0.007500063896179199, 0.0074930558204650876, 0.007447455883026123, 0.007431392192840576, 0.007462560176849365, 0.007362688064575195, 0.007452896118164062, 0.007494080066680908, 0.0074319357872009275, 0.007491968154907227, 0.007536064147949219, 0.007417791843414306, 0.007395840167999267, 0.007483391761779785, 0.007411712169647216, 0.007475264072418213, 0.007499711990356445, 0.007408736228942871, 0.007488416194915771, 0.007489535808563232, 0.007364607810974121, 0.00744652795791626, 0.007540800094604492, 0.007529471874237061, 0.0076154241561889645, 0.0075615358352661135, 0.0074687361717224125, 0.0074321279525756834, 0.007466944217681884, 0.007541567802429199, 0.007479328155517578, 0.007411680221557617, 0.007548255920410156, 0.007488160133361816, 0.007380640029907227, 0.007626431941986084, 0.007484000205993652, 0.007542111873626709, 0.007444799900054932, 0.007471519947052002, 0.007479296207427978, 0.007714015960693359, 0.007711328029632568, 0.007647424221038818, 0.00853228759765625, 0.007982399940490723, 0.007597568035125732, 0.007506303787231446, 0.007492095947265625, 0.007453855991363526, 0.007639039993286132, 0.007717535972595215, 0.00777235221862793, 0.007728288173675537, 0.007748447895050049, 0.007699552059173584, 0.00778326416015625, 0.007696767807006836, 0.007630623817443847, 0.007827360153198243, 0.007809023857116699, 0.007686143875122071, 0.007845888137817383, 0.00773740816116333, 0.007581632137298584, 0.00755731201171875, 0.007550879955291748, 0.007489439964294434, 0.007477248191833496, 0.007567520141601562, 0.007559008121490478, 0.007796480178833008, 0.007996895790100097, 0.008010368347167968, 0.007913311958312989, 0.007761439800262451, 0.007719840049743652, 0.007843008041381836, 0.007563968181610107, 0.007553023815155029, 0.007548895835876465, 0.0075714879035949706, 0.007573503971099854, 0.007577600002288819, 0.007506207942962646, 0.007460576057434082, 0.007432191848754883, 0.007403776168823242, 0.007532127857208252, 0.007833759784698486, 0.008199359893798828, 0.008067520141601562, 0.007703296184539795, 0.00790451192855835, 0.007721183776855468, 0.007936160087585449, 0.007738719940185547, 0.007598207950592041, 0.007562016010284424, 0.007452288150787354, 0.007556767940521241, 0.007440959930419922, 0.007380576133728028, 0.0074527359008789065, 0.007390880107879639, 0.0074002881050109865, 0.0075734081268310546, 0.0075797438621521, 0.007487167835235595, 0.007485087871551514, 0.007700736045837402, 0.007660192012786865, 0.0075830078125, 0.0075944638252258305, 0.007620287895202637, 0.007688831806182861, 0.007898208141326904, 0.00772976016998291, 0.007658624172210694, 0.00833625602722168, 0.007526400089263916, 0.00750928020477295, 0.007469791889190674, 0.007459904193878174, 0.007451583862304687, 0.007458816051483155, 0.007695487976074219, 0.007641727924346924, 0.007562975883483886, 0.007535136222839355, 0.007456768035888672, 0.007376287937164307, 0.007356768131256104, 0.007475456237792969, 0.0074035201072692874, 0.007458816051483155, 0.007426047801971435, 0.007452672004699707, 0.0074035201072692874, 0.007383039951324463, 0.007469056129455566, 0.007423999786376953, 0.007505216121673584, 0.007386975765228272, 0.0073837761878967284, 0.007405695915222168, 0.007372799873352051, 0.007448575973510742, 0.007422976016998291, 0.007418879985809326, 0.0074170241355896, 0.007442399978637695, 0.007385663986206055, 0.0074160962104797366, 0.007407616138458252, 0.007419167995452881, 0.0074759359359741215, 0.007407616138458252, 0.007475520133972168, 0.0074291200637817386, 0.007414463996887207, 0.007425407886505127, 0.007453311920166015, 0.007401472091674805, 0.0074561600685119625, 0.007658080101013183, 0.007539008140563965, 0.007644864082336425, 0.007642879962921143, 0.007622879981994629, 0.007745567798614502, 0.007600351810455323, 0.007492544174194336, 0.007555712223052979, 0.007495903968811035, 0.007409023761749268, 0.007393919944763184, 0.007415808200836181, 0.007358463764190673, 0.007389215946197509, 0.007487455844879151, 0.007485407829284668, 0.0074561600685119625, 0.007432864189147949, 0.007435391902923584, 0.00741871976852417, 0.007444479942321777, 0.007510047912597656, 0.007479263782501221, 0.0075979199409484865, 0.00759007978439331, 0.007613887786865234, 0.007598624229431152, 0.007548768043518067, 0.007387296199798584, 0.007433311939239502, 0.007451551914215088, 0.007469056129455566, 0.007481344223022461, 0.007454815864562988, 0.007425951957702637, 0.00745472002029419, 0.007452000141143799, 0.007424672126770019, 0.007454080104827881, 0.0075905280113220215, 0.007641215801239013, 0.007550848007202148, 0.007548927783966064, 0.007778079986572266, 0.007751904010772705, 0.007652575969696045, 0.007573535919189453, 0.007586559772491455, 0.007569119930267334, 0.0075327038764953615, 0.007524352073669433, 0.007506048202514649, 0.007658495903015137, 0.00786572790145874, 0.007651872158050537, 0.007616864204406738, 0.007704224109649658, 0.007494719982147216, 0.007623616218566894, 0.007595776081085205, 0.007571712017059327, 0.007677792072296142, 0.007612576007843017, 0.007677055835723877, 0.007619455814361572, 0.007531744003295898, 0.007537439823150634, 0.007798367977142334, 0.007539008140563965, 0.007581791877746582, 0.007526400089263916, 0.007667712211608887, 0.007553023815155029, 0.007520031929016113, 0.0076392641067504884, 0.007602176189422607, 0.007684095859527588, 0.007513088226318359, 0.007452799797058106, 0.007433087825775147, 0.007581696033477783, 0.00750105619430542, 0.007527167797088623, 0.007718912124633789, 0.00782528018951416, 0.007819071769714355, 0.007911168098449706, 0.007804992198944091, 0.007805759906768799, 0.00793555212020874, 0.008005663871765137, 0.00803439998626709, 0.008011775970458984, 0.008083456039428711, 0.007953728199005127, 0.007974783897399902, 0.00784441614151001, 0.007776512145996094, 0.007809023857116699, 0.008230976104736328, 0.007703775882720947, 0.007619296073913574, 0.007616511821746826, 0.007706624031066894, 0.007581696033477783, 0.007583744049072265, 0.007763967990875244, 0.007689407825469971, 0.007875711917877197, 0.007730879783630371, 0.007696415901184082, 0.007681280136108398, 0.0076336321830749515, 0.007726367950439453, 0.00756550407409668, 0.007356256008148193, 0.00753542423248291, 0.007435264110565186, 0.007410783767700196, 0.007594048023223877, 0.007394144058227539, 0.0074709439277648925, 0.007383264064788818, 0.007471039772033692, 0.0075095357894897465, 0.007407551765441895, 0.0074098238945007324, 0.007503488063812256, 0.00742790412902832, 0.007419072151184082, 0.007425792217254638, 0.007432191848754883, 0.007441664218902588, 0.007374688148498535, 0.007383967876434326, 0.007352320194244385, 0.007428095817565918, 0.00731987190246582, 0.007429823875427246, 0.007358784198760986, 0.007363776206970215, 0.007381247997283936, 0.007552832126617432, 0.007438784122467041, 0.007372799873352051, 0.007390880107879639, 0.007411104202270508, 0.007379839897155762, 0.007411776065826416, 0.007362304210662841, 0.007383296012878418, 0.007474527835845947, 0.007445024013519287, 0.007506048202514649, 0.007528031826019287, 0.007539455890655518, 0.007536287784576416, 0.00749567985534668, 0.007501471996307373, 0.00750380802154541, 0.007473567962646485, 0.007532320022583008, 0.007547103881835938, 0.007571455955505371, 0.007548096179962158, 0.007543776035308838, 0.0076778559684753415, 0.007886688232421874, 0.007478911876678467, 0.00756387186050415, 0.007614687919616699, 0.007816864013671874, 0.008011775970458984, 0.007794688224792481, 0.007706592082977295, 0.007768064022064209, 0.007774240016937256, 0.007744927883148193, 0.007842144012451173, 0.007827455997467042, 0.007776256084442139, 0.0077325758934021, 0.0075905599594116215, 0.00767193603515625, 0.007521535873413086, 0.007618239879608154, 0.007717984199523926, 0.0076399679183959965, 0.007714943885803223, 0.007927648067474365, 0.007860383987426757, 0.007785568237304688, 0.007888607978820801, 0.007812704086303711, 0.00771017599105835, 0.00779475212097168, 0.007768159866333008, 0.0077461118698120114, 0.007661791801452636, 0.007926911830902099, 0.0077034239768981935, 0.007597536087036133, 0.007643680095672608, 0.0076902399063110355, 0.0075706238746643065, 0.007410496234893799, 0.007403200149536133, 0.007358784198760986, 0.007366432189941406, 0.007328127861022949, 0.00730844783782959, 0.007359488010406494, 0.007364287853240967, 0.0073680000305175785, 0.007348031997680664, 0.007447423934936523, 0.007504191875457764, 0.007454048156738282, 0.007332191944122314, 0.007369791984558106, 0.007379903793334961, 0.007374720096588135, 0.00741103982925415, 0.007457568168640137, 0.007933951854705811, 0.00738483190536499, 0.0074364480972290035, 0.00740496015548706, 0.007389279842376709, 0.007394976139068603, 0.00731987190246582, 0.0073591041564941406, 0.007507967948913574, 0.00739737606048584, 0.007440351963043213, 0.007422048091888428, 0.007353631973266601, 0.007427807807922363, 0.007410272121429444, 0.0074059200286865235, 0.007407616138458252, 0.0072641282081604, 0.007420415878295898, 0.007485439777374267, 0.007368703842163086, 0.007628799915313721, 0.007999040126800538, 0.007394752025604248, 0.007488512039184571, 0.007413343906402588, 0.0073547840118408205, 0.007524352073669433, 0.009983136177062988, 0.010416159629821778, 0.007529280185699463, 0.008286208152770995, 0.007595359802246094, 0.008026304244995118, 0.008198623657226562, 0.0077209601402282715, 0.007946239948272706, 0.007897088050842285, 0.0076900157928466795, 0.007565120220184326, 0.00745308780670166, 0.007383039951324463, 0.007404895782470703, 0.00742195177078247, 0.007362783908843994, 0.007409503936767578, 0.007429920196533203, 0.007407584190368652, 0.007417759895324707, 0.007434879779815674, 0.007348544120788574, 0.007391232013702393, 0.007333055973052978, 0.0074795842170715336, 0.007481887817382813, 0.007409664154052734, 0.007364416122436523, 0.00866323184967041, 0.007464960098266602, 0.007370751857757568, 0.007401472091674805, 0.007415135860443115, 0.007342720031738281, 0.0073851199150085445, 0.007417856216430664, 0.00741315221786499, 0.007414271831512451, 0.007413663864135742, 0.007446976184844971, 0.007460415840148926, 0.007474559783935547, 0.007469888210296631, 0.007540736198425293, 0.007561247825622558, 0.007452640056610107, 0.0074271998405456546, 0.007387616157531738, 0.007406079769134521, 0.007417759895324707, 0.00744217586517334, 0.007435967922210693, 0.007447391986846924, 0.007540863990783691, 0.007401472091674805, 0.00745472002029419, 0.007392416000366211, 0.007373663902282715, 0.0073541121482849124, 0.007636288166046143, 0.007776512145996094, 0.007443136215209961, 0.007370751857757568, 0.007393280029296875, 0.007366655826568603, 0.0074035201072692874, 0.011380736351013183, 0.007486559867858887, 0.007930208206176757, 0.007911712169647216, 0.007414048194885254, 0.00739737606048584, 0.007307199954986572, 0.007337535858154297, 0.007344639778137207, 0.007362559795379638, 0.007348415851593018, 0.00742790412902832, 0.007550975799560547, 0.007915520191192627, 0.007485439777374267, 0.008517760276794434, 0.00759500789642334, 0.007448991775512695, 0.007430240154266358, 0.007497983932495117, 0.007397568225860596, 0.0073968000411987304, 0.007330463886260986, 0.007445888042449951, 0.007506400108337402, 0.007425983905792236, 0.007420991897583008, 0.007416768074035645, 0.007394815921783447, 0.007354944229125976, 0.007455840110778809, 0.007338016033172607, 0.007379839897155762, 0.0073743038177490235, 0.007367487907409668, 0.0074076480865478515, 0.007311039924621582, 0.007347263813018799, 0.007380127906799316, 0.007423808097839355, 0.007380959987640381, 0.007407135963439941, 0.007369184017181397, 0.007347551822662354, 0.007467584133148193, 0.007382143974304199, 0.007404319763183594, 0.007345439910888672, 0.007299295902252197, 0.007517888069152832, 0.00733625602722168, 0.007369919776916504, 0.007418176174163818, 0.007365119934082031, 0.007415743827819824, 0.007593088150024414, 0.007566271781921386, 0.007827455997467042, 0.0075977277755737305, 0.007503744125366211, 0.007624512195587158, 0.007520927906036377, 0.007518208026885987, 0.00749567985534668, 0.0074711041450500485, 0.007458879947662354, 0.0074659519195556644, 0.0075697598457336425, 0.00759222412109375, 0.007508480072021485, 0.007627840042114258, 0.007572256088256836, 0.007395328044891358, 0.00742195177078247, 0.007532351970672607, 0.0073545598983764645, 0.007391232013702393, 0.0073994240760803225, 0.007383039951324463, 0.007348576068878174, 0.007485087871551514, 0.007640255928039551, 0.0077870402336120605, 0.007700831890106201, 0.007823296070098877, 0.007611519813537597, 0.007531680107116699, 0.007487391948699951, 0.007501408100128174, 0.007659520149230957, 0.007472383975982666, 0.008530912399291992, 0.007716991901397705, 0.007726975917816162, 0.007587679862976074, 0.007495840072631836, 0.007382271766662597, 0.007512608051300049, 0.00754041576385498, 0.007854720115661622, 0.007441376209259033, 0.007486303806304931, 0.007554175853729248, 0.00745366382598877, 0.007700479984283447, 0.007378943920135498, 0.007380256175994873, 0.007389920234680176, 0.007414015769958496, 0.007405312061309815, 0.007368703842163086, 0.007473440170288086, 0.0073827519416809085, 0.007335008144378662, 0.0074617919921875, 0.007437727928161621, 0.0074143681526184085, 0.007513663768768311, 0.0074234561920166016, 0.007379936218261719, 0.007431263923645019, 0.007375584125518799, 0.007497024059295654, 0.007448736190795898, 0.007381728172302246, 0.007364607810974121, 0.007385087966918945, 0.00738099193572998, 0.007354656219482422, 0.007372511863708496, 0.007350272178649903, 0.007386528015136719, 0.007359072208404541, 0.007452095985412598, 0.008374143600463867, 0.007729856014251709, 0.007833600044250488, 0.007429152011871338, 0.007531744003295898, 0.01018988800048828, 0.007398079872131347, 0.007380127906799316, 0.007416672229766845, 0.007391232013702393, 0.0073994240760803225, 0.007413760185241699, 0.007372799873352051, 0.0073359360694885255, 0.007419040203094482, 0.007508800029754638, 0.007510047912597656, 0.007434239864349365, 0.007376832008361816, 0.007763328075408936, 0.007383552074432373, 0.007383232116699219, 0.007446271896362305, 0.007346432209014892, 0.0074670081138610836, 0.00746281623840332, 0.007532639980316162, 0.007393280029296875, 0.007526400089263916, 0.007638944149017334, 0.0075879359245300294, 0.007540512084960938, 0.0074462399482727055, 0.007404032230377197, 0.0074301438331604, 0.0073968319892883305, 0.007387839794158936, 0.007406496047973633, 0.007418816089630127, 0.007501823902130127, 0.0073400321006774905, 0.007806911945343018, 0.007456831932067871, 0.007398848056793213, 0.007367231845855713, 0.0073554239273071285, 0.00739631986618042, 0.007366496086120606, 0.0074141759872436526, 0.007378687858581543, 0.0074711041450500485, 0.007358463764190673, 0.007390463829040527, 0.007480063915252686, 0.0074107198715209965, 0.007435232162475586, 0.007382271766662597, 0.007359007835388184, 0.007328256130218506, 0.00737667179107666, 0.007357823848724365, 0.007344543933868408, 0.007360256195068359, 0.00734009599685669, 0.007362912178039551, 0.007301119804382325, 0.007352320194244385, 0.007359519958496093, 0.007338719844818115, 0.00734819221496582, 0.0074795842170715336, 0.0073994240760803225, 0.007382815837860108, 0.007340288162231446, 0.007395296096801758, 0.007581855773925781, 0.007788608074188233, 0.0076808958053588864, 0.0075664958953857425, 0.00743398380279541, 0.007522592067718506, 0.007407328128814697, 0.00738918399810791, 0.007419904232025146, 0.007401472091674805, 0.0073842558860778805, 0.007360960006713867, 0.007434624195098877, 0.007436287879943848, 0.007492671966552735, 0.007518655776977539, 0.007452288150787354, 0.0074720001220703125, 0.007695456027984619, 0.00781440019607544, 0.007640736103057861, 0.007624703884124756, 0.007696383953094482, 0.007614719867706299, 0.008049759864807129, 0.007755936145782471, 0.007790495872497558, 0.007641183853149414, 0.007654143810272217, 0.007997536182403564, 0.007552512168884277, 0.0074225602149963375, 0.007518208026885987, 0.007423295974731446, 0.007344096183776855, 0.007502560138702393, 0.007374688148498535, 0.007505184173583985, 0.0073900799751281734, 0.007388671875, 0.00750438404083252, 0.007341216087341309, 0.007418208122253418, 0.007408127784729004, 0.007481344223022461, 0.00735427188873291, 0.007351424217224121, 0.007361504077911377, 0.0074355840682983395, 0.007342463970184327, 0.007358496189117432, 0.008352031707763672, 0.007910431861877442, 0.007371200084686279, 0.007518752098083496, 0.007325856208801269, 0.007399263858795166, 0.007360511779785156, 0.007501823902130127, 0.0074065918922424315, 0.007386112213134765, 0.0073994240760803225, 0.007374879837036133, 0.007352128028869629, 0.007327904224395752, 0.007377024173736572, 0.007360095977783203, 0.007436575889587402, 0.007350272178649903, 0.0073175039291381834, 0.007364607810974121, 0.007409664154052734, 0.0073933758735656736, 0.00746284818649292, 0.008130656242370606, 0.007742591857910156, 0.007399519920349121, 0.007410208225250244, 0.007345536231994629, 0.007371359825134277, 0.007386720180511475, 0.007416255950927734, 0.00740496015548706, 0.007534656047821045, 0.00740393590927124, 0.007341951847076416, 0.007397759914398194, 0.00738918399810791, 0.007417568206787109, 0.007481632232666016, 0.007427135944366455, 0.007389408111572265, 0.007598176002502442, 0.007536640167236328, 0.0077452797889709475, 0.007454976081848145, 0.007445759773254394, 0.007518976211547851, 0.007419871807098389, 0.007391263961791992, 0.007600128173828125, 0.008173215866088867, 0.00762883186340332, 0.007413792133331299, 0.007346464157104492, 0.007408736228942871, 0.00744700813293457, 0.007410111904144287, 0.00742416000366211, 0.007460224151611328, 0.007384928226470947, 0.0073818879127502445, 0.00740121603012085, 0.007530272006988525, 0.007403232097625732, 0.007384960174560547, 0.00741644811630249, 0.007337279796600342, 0.007346879959106445, 0.0075380802154541016, 0.007377024173736572, 0.007439072132110596, 0.007423520088195801, 0.007384575843811035, 0.0073530559539794925, 0.007405344009399414, 0.00749616003036499, 0.007390624046325684, 0.007387008190155029, 0.007395199775695801, 0.00734611177444458, 0.0073787841796875, 0.007469664096832276, 0.007383264064788818, 0.007485439777374267, 0.0075548157691955565, 0.007620863914489746, 0.007566944122314453, 0.007506495952606202, 0.007421792030334473, 0.007428095817565918, 0.007540736198425293, 0.007458655834197998, 0.007494944095611572, 0.007466879844665528, 0.007445504188537597, 0.0074035201072692874, 0.0074496960639953615, 0.007773056030273438, 0.007533984184265137, 0.007484032154083252, 0.007515615940093994, 0.007487071990966797, 0.007483871936798095, 0.0075033278465271, 0.007396224021911621, 0.0073400321006774905, 0.007510079860687256, 0.007366591930389404, 0.007499807834625244, 0.007357920169830322, 0.007374368190765381, 0.007486432075500488, 0.0073359360694885255, 0.0073669438362121585, 0.007485151767730713, 0.007358463764190673, 0.007353856086730957, 0.007330304145812989, 0.007318816184997559, 0.00735100793838501, 0.007327744007110596, 0.007352320194244385, 0.008028096199035644, 0.00831443214416504, 0.00824556827545166, 0.007524511814117432, 0.008675359725952148, 0.007483104228973389, 0.007596320152282715, 0.0074848318099975586, 0.0074860482215881345, 0.007632895946502686, 0.007377247810363769, 0.00745027208328247, 0.0075980801582336424, 0.007468063831329346, 0.007476191997528076, 0.007491583824157715, 0.007452191829681397, 0.007444096088409424, 0.007392096042633057, 0.007370687961578369, 0.007374207973480224, 0.007370463848114014, 0.007377632141113281, 0.007360735893249512, 0.0074076480865478515, 0.007343552112579346, 0.007375423908233643, 0.0073820481300354, 0.0074618558883666995, 0.00790118408203125, 0.007689568042755127, 0.007903264045715333, 0.008352512359619141, 0.00848031997680664, 0.007665823936462402, 0.00771292781829834, 0.007669760227203369, 0.008058879852294922, 0.00759603214263916, 0.008038399696350097, 0.0074691839218139645, 0.007509888172149658, 0.007639039993286132, 0.007831647872924804, 0.007401823997497559, 0.007602176189422607, 0.007411712169647216, 0.007356416225433349, 0.007532256126403809, 0.007438687801361084, 0.007499584197998047, 0.007447999954223633, 0.007415808200836181, 0.0076397438049316405, 0.007612415790557861, 0.007686143875122071, 0.007766016006469726, 0.007698239803314209, 0.007566976070404053, 0.00758841609954834, 0.007521312236785888, 0.007520480155944824, 0.007494272232055664, 0.007569536209106446, 0.007524352073669433, 0.007497791767120361, 0.00741107177734375, 0.007485983848571777, 0.007364640235900879, 0.007485439777374267, 0.007544928073883057, 0.007505375862121582, 0.007436736106872559, 0.007534912109375, 0.0077775678634643556, 0.007721119880676269, 0.007567264080047607, 0.007673664093017578, 0.007572000026702881, 0.0076754879951477055, 0.0076557121276855464, 0.007579648017883301, 0.007497536182403564, 0.0075021438598632815, 0.00746617603302002, 0.007455552101135254, 0.007384064197540283, 0.0073851518630981446, 0.007437248229980469, 0.007394688129425049, 0.0075797438621521, 0.0076600642204284666, 0.007848159790039062, 0.007951168060302735, 0.007877600193023682, 0.007614463806152344, 0.007515135765075683, 0.007430592060089111, 0.007428991794586182, 0.007447264194488525, 0.00743449592590332, 0.0074711360931396485, 0.007463615894317627, 0.007968160152435302, 0.007540544033050537, 0.0074288959503173825, 0.007456768035888672, 0.007500927925109863, 0.0074592318534851074, 0.0074015998840332034, 0.0074694080352783205, 0.0074170241355896, 0.007380832195281983, 0.007486400127410889, 0.007388544082641601, 0.00744924783706665, 0.00736787223815918, 0.007395264148712158, 0.007465856075286865, 0.007418879985809326, 0.007331967830657959, 0.007310207843780518, 0.007325695991516113, 0.007376480102539063, 0.007389503955841065, 0.007577695846557617, 0.007432479858398438, 0.007442431926727295, 0.007509727954864502, 0.007518208026885987, 0.00742195177078247, 0.007380032062530518, 0.007414080142974853, 0.0073632001876831055, 0.007398911952972412, 0.00739788818359375, 0.007591936111450195, 0.007423999786376953, 0.007380320072174072, 0.007592607975006103, 0.0074271678924560545, 0.0077259840965271, 0.007657792091369629, 0.0075363202095031735, 0.0075673599243164065, 0.00758128023147583, 0.0076698241233825685, 0.007694911956787109, 0.007682015895843506, 0.00772873592376709, 0.00765331220626831, 0.007762207984924317, 0.007741439819335938, 0.007688352108001709, 0.007745376110076904, 0.007731328010559082, 0.007585472106933594, 0.00791318416595459, 0.00763750410079956, 0.00756499195098877, 0.007571743965148926, 0.007573023796081543, 0.007519775867462158, 0.007520639896392822, 0.007479775905609131, 0.007394495964050293, 0.007494048118591308, 0.007564799785614014, 0.0075642881393432615, 0.007608320236206055]",tokens/s,133.0472819842736,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 706, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.96 GiB. GPU 0 has a total capacity of 14.74 GiB of which 662.12 MiB is free. Process 421917 has 14.09 GiB memory in use. Of the allocated memory 13.97 GiB is allocated by PyTorch, and 6.66 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1068.404736,3734.89664,0.0,3332.374528,3314.861056,s,1,7.33119287109375,7.33119287109375,0.0,7.33119287109375,7.33119287109375,7.33119287109375,7.33119287109375,[7.33119287109375],,kWh,4.1885012167313104e-06,4.5442254090846736e-07,1.8697237179898796e-06,6.512647475629657e-06,,MB,1376.935936,3804.102656,0.0,3391.094784,2593.689088,s,10,0.6897658920288086,0.06897658920288086,0.0018599558890681459,0.06852655792236328,0.06965534286499023,0.0720173038482666,0.0739068726348877,"[0.07437926483154297, 0.06859622192382812, 0.06875750732421874, 0.06834636688232422, 0.06770877075195313, 0.06884307098388671, 0.06913046264648437, 0.06845689392089843, 0.06780973052978516, 0.06773760223388672]",tokens/s,3711.4041584026018,kWh,2.362914187129982e-06,2.6058611066635886e-07,1.5643269683066422e-06,4.1878272661029835e-06,tokens/kWh,61129550.89435264,MB,1389.039616,3804.102656,0.0,3391.094784,2593.691648,s,10,11.127339843749999,1.112733984375,0.03575566341824669,1.1002948608398437,1.1253911499023437,1.1716014587402344,1.208569705810547,"[1.217811767578125, 1.106619140625, 1.0967337646484374, 1.1007166748046875, 1.1151221923828125, 1.0949591064453126, 1.099873046875, 1.089849609375, 1.0958099365234375, 1.1098446044921875]",tokens/s,56.617305559680396,kWh,3.1535750350369236e-05,3.4779537368291892e-06,1.861066139389158e-05,5.362436548109001e-05,tokens/kWh,1174839.0761325874,,s,630,11.124750635147093,0.017658334341503323,0.0047519653098351666,0.01738771152496338,0.0177239294052124,0.01793473110198975,0.01935872444152833,"[0.13618934631347657, 0.01729017639160156, 0.017311391830444337, 0.017292383193969727, 0.017226655960083007, 0.017247583389282225, 0.01737590408325195, 0.017221343994140624, 0.017221920013427733, 0.017201152801513672, 0.017147647857666017, 0.0172458553314209, 0.017414751052856444, 0.017252351760864256, 0.017358047485351562, 0.01729033660888672, 0.01727395248413086, 0.017453632354736327, 0.017657632827758788, 0.017229536056518554, 0.017182559967041017, 0.018108543395996095, 0.01815100860595703, 0.017349599838256836, 0.017343807220458984, 0.017375936508178712, 0.017408000946044923, 0.01728291130065918, 0.01738515281677246, 0.017267040252685547, 0.017221855163574218, 0.017294368743896484, 0.017341312408447267, 0.017271039962768554, 0.017280256271362305, 0.017240575790405274, 0.017293312072753905, 0.017282943725585937, 0.01727926445007324, 0.017184032440185546, 0.0189607048034668, 0.01810047912597656, 0.017321983337402345, 0.017557504653930665, 0.01761248016357422, 0.017673856735229494, 0.01751043128967285, 0.017387775421142577, 0.01763164710998535, 0.017899711608886718, 0.017440576553344727, 0.017321983337402345, 0.017182079315185547, 0.017293952941894532, 0.01731564712524414, 0.017473056793212892, 0.01753055953979492, 0.017527776718139647, 0.017527936935424804, 0.018029439926147462, 0.017686527252197267, 0.01750160026550293, 0.017606399536132813, 0.017870847702026366, 0.017661439895629884, 0.017418880462646485, 0.017467008590698243, 0.017396095275878907, 0.017446176528930664, 0.01753353691101074, 0.017379104614257814, 0.017593696594238283, 0.017281120300292968, 0.017244960784912108, 0.01742393684387207, 0.01830918312072754, 0.01772377586364746, 0.01830611228942871, 0.01748780822753906, 0.01747648048400879, 0.01737500762939453, 0.017690879821777344, 0.017428575515747072, 0.01746329689025879, 0.017491968154907226, 0.017504512786865236, 0.017477024078369142, 0.01763363265991211, 0.017729536056518554, 0.01764352035522461, 0.017551103591918946, 0.017622751235961916, 0.017794687271118163, 0.018029727935791016, 0.017774335861206053, 0.01738137626647949, 0.01744486427307129, 0.017565696716308594, 0.017438720703125, 0.017451007843017577, 0.017486175537109374, 0.017370399475097657, 0.017850751876831054, 0.017649663925170898, 0.017812959671020506, 0.017697311401367186, 0.017768447875976562, 0.0176312313079834, 0.017462783813476563, 0.017467199325561525, 0.0174005126953125, 0.01739776039123535, 0.017451007843017577, 0.017450784683227537, 0.017356639862060548, 0.017397344589233397, 0.017277727127075194, 0.017272640228271484, 0.017499679565429686, 0.017512672424316405, 0.017330528259277344, 0.01767843246459961, 0.017536352157592774, 0.017644479751586915, 0.017708127975463867, 0.017694719314575197, 0.017831872940063477, 0.017555360794067384, 0.017363103866577148, 0.01733807945251465, 0.017401920318603516, 0.017401248931884765, 0.01729414367675781, 0.017294559478759765, 0.017238271713256835, 0.017219520568847655, 0.01738764762878418, 0.01722819137573242, 0.01731180763244629, 0.017475584030151366, 0.017507328033447265, 0.017452032089233398, 0.017374336242675783, 0.017301664352416993, 0.017441471099853514, 0.017313823699951172, 0.01740185546875, 0.01743667221069336, 0.017560895919799806, 0.01751046371459961, 0.017738367080688478, 0.017655807495117186, 0.017693727493286134, 0.017619935989379883, 0.017446304321289064, 0.017469440460205078, 0.01740764808654785, 0.017304288864135743, 0.01742051124572754, 0.017397695541381836, 0.017382623672485352, 0.017361759185791015, 0.01724345588684082, 0.017259199142456053, 0.017343807220458984, 0.017343231201171875, 0.017346559524536134, 0.017239744186401368, 0.017256704330444336, 0.01722368049621582, 0.01721958351135254, 0.01726915168762207, 0.017341760635375975, 0.017252639770507814, 0.017342464447021484, 0.017259807586669923, 0.017233791351318358, 0.017453344345092773, 0.01732796859741211, 0.01732476806640625, 0.01732150459289551, 0.017457632064819335, 0.01789952087402344, 0.01765376091003418, 0.01761894416809082, 0.017447999954223633, 0.017445823669433595, 0.01749523162841797, 0.01734329605102539, 0.01743404769897461, 0.017369375228881836, 0.017219680786132813, 0.01746553611755371, 0.0172728328704834, 0.017352703094482422, 0.017334432601928712, 0.017592159271240235, 0.017362720489501954, 0.017356128692626954, 0.01763545608520508, 0.017427040100097657, 0.017533088684082033, 0.017404064178466797, 0.017463136672973632, 0.017541120529174805, 0.017543167114257813, 0.0173734073638916, 0.017286272048950196, 0.017281120300292968, 0.017326656341552733, 0.017557504653930665, 0.0175795841217041, 0.01766806411743164, 0.01756822395324707, 0.01760256004333496, 0.01767628860473633, 0.01753251266479492, 0.017531295776367188, 0.01738956832885742, 0.017317888259887695, 0.017358591079711914, 0.01747158432006836, 0.017344671249389647, 0.017385471343994142, 0.01745305633544922, 0.017495391845703125, 0.01754979133605957, 0.01746963119506836, 0.01745305633544922, 0.017336320877075196, 0.017293312072753905, 0.01724527931213379, 0.01739664077758789, 0.017469440460205078, 0.017582080841064454, 0.01825382423400879, 0.01741209602355957, 0.017373184204101562, 0.017467391967773437, 0.017636959075927734, 0.0174800968170166, 0.01807155227661133, 0.017663263320922853, 0.017652000427246094, 0.017744543075561524, 0.017501983642578125, 0.017364383697509766, 0.01737334442138672, 0.017189632415771483, 0.017280704498291017, 0.017425664901733397, 0.017292032241821288, 0.01813587188720703, 0.017661056518554687, 0.017519487380981445, 0.017391616821289063, 0.017328128814697266, 0.017370431900024415, 0.017349056243896484, 0.01728531265258789, 0.01729542350769043, 0.017469663619995118, 0.017532064437866212, 0.017461408615112306, 0.017471456527709962, 0.017209856033325196, 0.01802444839477539, 0.01754857635498047, 0.01947648048400879, 0.025942111968994142, 0.017756128311157228, 0.017613344192504883, 0.017317344665527343, 0.01749225616455078, 0.017359231948852537, 0.017207040786743164, 0.01718092727661133, 0.017229824066162108, 0.017262592315673828, 0.01735862350463867, 0.017459392547607422, 0.01753910446166992, 0.017434623718261717, 0.017391616821289063, 0.017563648223876953, 0.01782905578613281, 0.017611583709716796, 0.0175797119140625, 0.017713472366333007, 0.018290687561035156, 0.01735798454284668, 0.017535839080810547, 0.01741004753112793, 0.017443904876708983, 0.017486783981323244, 0.017382783889770506, 0.017371007919311524, 0.01770719909667969, 0.018213600158691407, 0.017808319091796875, 0.01770745658874512, 0.01768819236755371, 0.01775811195373535, 0.017712064743041992, 0.01760665512084961, 0.017494016647338868, 0.017326080322265625, 0.01730143928527832, 0.017464544296264647, 0.01804579162597656, 0.01765171241760254, 0.017631103515625, 0.017299583435058594, 0.01722777557373047, 0.017558719635009764, 0.01793497657775879, 0.017707008361816406, 0.017405216217041015, 0.017394399642944335, 0.01720524787902832, 0.017321983337402345, 0.01737491226196289, 0.019431711196899414, 0.017463327407836914, 0.017633567810058592, 0.017790624618530274, 0.01769273567199707, 0.017749664306640624, 0.01752422332763672, 0.017351520538330077, 0.017262592315673828, 0.017366464614868165, 0.017257024765014648, 0.01804083251953125, 0.01740777587890625, 0.01723619270324707, 0.017131519317626954, 0.017254400253295898, 0.017423839569091798, 0.017361440658569337, 0.01740608024597168, 0.018023359298706056, 0.017492927551269532, 0.017505664825439454, 0.01738159942626953, 0.01747920036315918, 0.017213472366333006, 0.017258527755737305, 0.017099584579467773, 0.01718988800048828, 0.017153024673461914, 0.01721500778198242, 0.017236064910888672, 0.01721401596069336, 0.017147552490234374, 0.01730361557006836, 0.017115232467651367, 0.017332223892211913, 0.017147903442382813, 0.01775116729736328, 0.01717695999145508, 0.017295135498046874, 0.017205984115600585, 0.017293312072753905, 0.017125471115112305, 0.01722153663635254, 0.017157503128051758, 0.01720809555053711, 0.017190303802490235, 0.0173920955657959, 0.017078208923339843, 0.01717977523803711, 0.017050207138061522, 0.017342784881591796, 0.017048576354980468, 0.017209856033325196, 0.017013280868530274, 0.017160160064697266, 0.017260704040527344, 0.01825916862487793, 0.017382015228271486, 0.017544927597045897, 0.017518815994262697, 0.017255712509155273, 0.017191423416137695, 0.017338880538940428, 0.017540895462036132, 0.017565248489379882, 0.017622623443603515, 0.0175882568359375, 0.017524864196777342, 0.017240480422973634, 0.017408287048339844, 0.017378976821899414, 0.017319391250610352, 0.017584831237792968, 0.017405344009399415, 0.017361696243286134, 0.017506303787231444, 0.017610687255859375, 0.017521024703979492, 0.017319103240966797, 0.01726924705505371, 0.017310911178588868, 0.017908384323120117, 0.017301023483276366, 0.01729747200012207, 0.017328607559204103, 0.017320032119750976, 0.017557504653930665, 0.0176343994140625, 0.017597343444824217, 0.017399808883666993, 0.01724006462097168, 0.017358848571777344, 0.0174736328125, 0.01882512092590332, 0.017702079772949218, 0.01763315200805664, 0.017488832473754882, 0.017451007843017577, 0.017305728912353515, 0.01739116859436035, 0.017297727584838867, 0.017242111206054688, 0.017196544647216795, 0.01723417663574219, 0.01729523277282715, 0.01737104034423828, 0.017406431198120118, 0.017373184204101562, 0.017389440536499025, 0.017400991439819335, 0.01749500846862793, 0.01723513603210449, 0.01722038459777832, 0.017475168228149415, 0.017349056243896484, 0.017379007339477538, 0.017626911163330077, 0.017579872131347655, 0.01732905578613281, 0.01738947105407715, 0.017426752090454103, 0.017436447143554686, 0.017543167114257813, 0.017475584030151366, 0.017283071517944337, 0.017276927947998046, 0.017242111206054688, 0.017201152801513672, 0.01721548843383789, 0.017334272384643554, 0.017278976440429687, 0.01849900817871094, 0.017363519668579103, 0.017326080322265625, 0.017313631057739257, 0.017409727096557616, 0.01721126365661621, 0.017248863220214843, 0.017204288482666016, 0.017208255767822266, 0.017261695861816407, 0.017281919479370116, 0.017169567108154298, 0.01728006362915039, 0.017200799942016603, 0.017262048721313476, 0.017144479751586915, 0.01724006462097168, 0.017231008529663087, 0.01725935935974121, 0.01721958351135254, 0.017319936752319336, 0.01723299217224121, 0.017321983337402345, 0.017187744140625, 0.017219839096069337, 0.017173280715942384, 0.017267679214477537, 0.017161535263061523, 0.017306304931640624, 0.01719500732421875, 0.01728019142150879, 0.01718150329589844, 0.017276927947998046, 0.017237407684326172, 0.01720729637145996, 0.017103456497192384, 0.017530879974365234, 0.01722502326965332, 0.017219615936279298, 0.01724687957763672, 0.017305023193359376, 0.01748624038696289, 0.01740611267089844, 0.01717043113708496, 0.017315839767456053, 0.01726201629638672, 0.017223295211791993, 0.0171529598236084, 0.01714352035522461, 0.01749772834777832, 0.017643680572509767, 0.017918304443359376, 0.01761257553100586, 0.017440992355346678, 0.017304927825927734, 0.017156768798828125, 0.017151264190673827, 0.017257183074951173, 0.017309696197509765, 0.017143327713012694, 0.017161727905273438, 0.017174911499023438, 0.01711724853515625, 0.017156639099121095, 0.01706777572631836, 0.017157440185546876, 0.017159103393554687, 0.017217536926269532, 0.01725644874572754, 0.017252351760864256, 0.01719305610656738, 0.01737513542175293, 0.017100799560546876, 0.017156095504760743, 0.01719910430908203, 0.017176576614379883, 0.017123327255249024, 0.017101055145263673, 0.01714348793029785, 0.017147359848022462, 0.017236000061035157, 0.017303712844848634, 0.017232288360595704, 0.01718409538269043, 0.017105567932128907, 0.017100799560546876, 0.01725472068786621, 0.01707084846496582, 0.0171079044342041, 0.017081472396850587, 0.01763827133178711, 0.01767206382751465, 0.017501535415649413, 0.0180284481048584, 0.01726908874511719, 0.017408544540405274, 0.017723392486572266, 0.017860511779785156, 0.01782793617248535, 0.01768550491333008, 0.017562751770019532, 0.017444736480712892, 0.017358367919921874, 0.017510271072387694, 0.017250207901000975, 0.017256927490234376, 0.017656288146972655, 0.017194751739501954, 0.017686527252197267, 0.020324352264404297, 0.01739347267150879, 0.017479871749877928, 0.017712831497192383, 0.017641471862792968, 0.01755340766906738, 0.017366880416870116, 0.017410207748413085, 0.017260543823242186, 0.017370176315307618, 0.017178592681884767, 0.017132511138916017, 0.017158016204833985, 0.01720742416381836, 0.01804038429260254, 0.01918003273010254, 0.017884096145629882, 0.017738752365112305, 0.017675680160522463, 0.017752672195434572, 0.017317472457885744, 0.017619359970092775, 0.017311744689941407, 0.017325504302978516, 0.017934431076049806, 0.017185184478759767, 0.017203264236450196, 0.017192480087280273, 0.017133344650268556, 0.01714246368408203, 0.01750614356994629, 0.017297216415405273, 0.017398111343383788, 0.01738313674926758, 0.017413631439208984, 0.017227872848510743, 0.017527488708496092, 0.017249792098999024, 0.017257984161376954, 0.01735577583312988, 0.01727267265319824, 0.017270944595336915, 0.0174531192779541, 0.017477567672729493, 0.017511808395385742, 0.017477344512939454, 0.01740483283996582, 0.017587711334228515, 0.01753548812866211, 0.017444255828857422, 0.01780940818786621, 0.017867359161376953, 0.017690624237060547, 0.01833337593078613, 0.022105567932128905, 0.019688287734985353, 0.017430303573608398, 0.017326143264770506, 0.017430688858032225, 0.017725311279296874, 0.017528640747070313, 0.017538944244384767, 0.017562047958374023, 0.017741823196411134, 0.017709056854248048, 0.017565696716308594, 0.01755072021484375]",tokens/s,56.63048284512581,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,787.103744,14136.77056,0.0,13734.248448,13728.777216,s,1,7.513701171875,7.513701171875,0.0,7.513701171875,7.513701171875,7.513701171875,7.513701171875,[7.513701171875],,kWh,6.653668845910942e-06,7.262489984862029e-07,3.624169566007751e-06,1.1004087410404896e-05,,MB,1232.289792,14151.450624,0.0,13736.3456,13487.53408,s,10,1.628662567138672,0.1628662567138672,0.0027252420706094633,0.16340350341796875,0.16432027282714845,0.16510616607666015,0.16573488067626954,"[0.15506346130371093, 0.16589205932617188, 0.1641456298828125, 0.1634385986328125, 0.16312876892089845, 0.16327967834472656, 0.16283779907226562, 0.1633958740234375, 0.16406956481933593, 0.1634111328125]",tokens/s,1571.841860710015,kWh,4.777222318750895e-06,5.268408463845354e-07,3.1798322929676274e-06,8.483895458103058e-06,tokens/kWh,30174817.837422986,MB,1264.828416,14151.450624,0.0,13736.3456,13661.262848,s,10,37.93551904296876,3.7935519042968755,0.004901287590880917,3.7917740478515625,3.7993248779296875,3.800223229980469,3.8009419116210936,"[3.7910361328125, 3.7977041015625, 3.798703857421875, 3.80112158203125, 3.799125244140625, 3.786149658203125, 3.7914287109375, 3.789326904296875, 3.788803466796875, 3.792119384765625]",tokens/s,16.60712745979335,kWh,0.00011061108589707514,1.2200634627129695e-05,7.342028454263357e-05,0.00019623200506683838,tokens/kWh,321048.54648221954,,s,630,37.931929466247524,0.060209411851186605,0.00027012619093296164,0.06018145561218262,0.0604509880065918,0.06057724781036377,0.06145498264312744,"[0.06148543930053711, 0.06066527938842774, 0.06001968002319336, 0.059942272186279295, 0.059953792572021485, 0.06012435150146484, 0.059918750762939454, 0.06021366500854492, 0.05993881607055664, 0.060006591796875, 0.059915199279785156, 0.05998476791381836, 0.059840511322021485, 0.0600186882019043, 0.059889663696289064, 0.05994870376586914, 0.06017206573486328, 0.060238399505615235, 0.06035865783691406, 0.060252159118652344, 0.06013951873779297, 0.060159423828125, 0.06007222366333008, 0.05998409652709961, 0.06008575820922851, 0.060088672637939454, 0.059965663909912106, 0.060087806701660154, 0.06002329635620117, 0.060063743591308595, 0.06011638259887695, 0.06029068756103516, 0.060258369445800784, 0.06039971160888672, 0.06026121520996094, 0.06046844863891602, 0.060250049591064454, 0.060244800567626954, 0.060030975341796876, 0.06011011123657226, 0.06011497497558594, 0.06022991943359375, 0.060106590270996095, 0.06011119842529297, 0.06012131118774414, 0.06015385437011719, 0.06025827026367187, 0.060128543853759764, 0.06013587188720703, 0.06031801605224609, 0.06066995239257812, 0.06039756774902344, 0.060036865234375, 0.06016640090942383, 0.0601879997253418, 0.060117855072021484, 0.060118110656738284, 0.060176929473876956, 0.06018876647949219, 0.06027478408813477, 0.060181758880615235, 0.060324607849121095, 0.06015795135498047, 0.061494400024414066, 0.06069436645507813, 0.060014591217041016, 0.05998796844482422, 0.060022270202636716, 0.06003968048095703, 0.0598548469543457, 0.06005350494384765, 0.06010639953613281, 0.0600241584777832, 0.06015283203125, 0.06004022216796875, 0.060109790802001954, 0.06009446334838867, 0.06002687835693359, 0.0599736328125, 0.06016204833984375, 0.060434432983398435, 0.06033919906616211, 0.060283905029296876, 0.06008838272094726, 0.060200897216796875, 0.06034841537475586, 0.05996953582763672, 0.06000230407714844, 0.06004256057739258, 0.059972286224365234, 0.06016320037841797, 0.06023468780517578, 0.0602534065246582, 0.060326847076416015, 0.060151359558105466, 0.06022304153442383, 0.060326400756835936, 0.06036703872680664, 0.060335521697998044, 0.06023209762573242, 0.06025804901123047, 0.060659904479980466, 0.060243743896484375, 0.06014803314208984, 0.06029267120361328, 0.06014777755737305, 0.060226047515869144, 0.06033817672729492, 0.06054707336425781, 0.06023168182373047, 0.06031564712524414, 0.060429439544677735, 0.060429183959960935, 0.06046665573120117, 0.06055321502685547, 0.06035468673706055, 0.060407711029052735, 0.06040854263305664, 0.06032156753540039, 0.060655521392822265, 0.06043862533569336, 0.06021884918212891, 0.06036636734008789, 0.060363201141357424, 0.06060655975341797, 0.06077487945556641, 0.06145609664916992, 0.06063129425048828, 0.06003302383422852, 0.05987641525268555, 0.05979641723632813, 0.05990822219848633, 0.059870784759521484, 0.06004499053955078, 0.06026428985595703, 0.06016694259643555, 0.0600055046081543, 0.05993356704711914, 0.06011904144287109, 0.0599736328125, 0.05999379348754883, 0.06000876617431641, 0.060290176391601565, 0.06032876968383789, 0.060174400329589844, 0.06100585556030273, 0.06025539016723633, 0.06015264129638672, 0.06019276809692383, 0.06012102508544922, 0.06018886566162109, 0.06026995086669922, 0.06010726547241211, 0.06027264022827149, 0.060391422271728515, 0.06037088012695312, 0.060182239532470705, 0.06026268768310547, 0.06048758316040039, 0.06040387344360352, 0.06039267349243164, 0.060485790252685544, 0.060383872985839845, 0.06028630447387695, 0.060338848114013674, 0.060186622619628906, 0.06022348785400391, 0.060313663482666015, 0.060413887023925784, 0.060260353088378904, 0.06019071960449219, 0.06028905487060547, 0.06040364837646484, 0.06035990524291992, 0.060340030670166016, 0.060371967315673826, 0.06050944137573242, 0.06046188735961914, 0.060526302337646484, 0.060641632080078126, 0.06033542251586914, 0.06038995361328125, 0.0603911361694336, 0.060422431945800784, 0.06033926391601562, 0.06046815872192383, 0.06028204727172851, 0.06034515380859375, 0.06040496063232422, 0.061851425170898436, 0.06074544143676758, 0.060269023895263674, 0.060141376495361325, 0.060100799560546876, 0.06006502532958984, 0.05999411010742187, 0.06013209533691406, 0.05998332977294922, 0.060160545349121096, 0.060155776977539065, 0.06026639938354492, 0.05996771240234375, 0.06019276809692383, 0.06013264083862305, 0.06024854278564453, 0.06032940673828125, 0.06048441696166992, 0.060374591827392576, 0.06033203125, 0.06035811233520508, 0.06043862533569336, 0.06013174438476562, 0.06017804718017578, 0.06018115234375, 0.06015740966796875, 0.0602611198425293, 0.060379104614257814, 0.060395359039306644, 0.060311424255371095, 0.060321887969970706, 0.06036857604980469, 0.06044927978515625, 0.06039078521728516, 0.060354175567626955, 0.060450942993164065, 0.06044147109985352, 0.060555038452148435, 0.06033347320556641, 0.06030937576293945, 0.0603043212890625, 0.060388607025146486, 0.06044339370727539, 0.060391422271728515, 0.06038713455200195, 0.06038547134399414, 0.060396575927734376, 0.060298206329345704, 0.06029935836791992, 0.060439743041992185, 0.060451553344726565, 0.0603238410949707, 0.0603504638671875, 0.0603804817199707, 0.06053548812866211, 0.060235774993896485, 0.060563007354736326, 0.06029091262817383, 0.0602341423034668, 0.06024236679077148, 0.06017753601074219, 0.06023632049560547, 0.0602850227355957, 0.06148486328125, 0.06053267288208008, 0.05994688034057617, 0.06001113510131836, 0.05989126586914063, 0.060076446533203126, 0.05996355056762695, 0.059967361450195315, 0.05995532989501953, 0.060065502166748046, 0.06034457778930664, 0.060061729431152344, 0.05999740982055664, 0.06016080093383789, 0.06011036682128906, 0.060131168365478514, 0.0602375373840332, 0.06048886489868164, 0.0603359375, 0.06028076934814453, 0.06008963012695313, 0.0600412483215332, 0.06002758407592773, 0.05995731353759766, 0.060010433197021484, 0.06043356704711914, 0.06004787063598633, 0.06002115249633789, 0.06012716674804688, 0.06007926559448242, 0.06078556823730469, 0.06030227279663086, 0.060303806304931644, 0.060367359161376956, 0.060368030548095704, 0.0607938232421875, 0.06025414276123047, 0.06036684799194336, 0.06021052932739258, 0.060297183990478516, 0.06030201721191406, 0.06052864074707031, 0.06058313751220703, 0.06045161437988281, 0.06021865463256836, 0.06029385757446289, 0.06026649475097656, 0.06051241683959961, 0.060405601501464845, 0.060393470764160156, 0.0603504638671875, 0.060432384490966794, 0.06043971252441406, 0.060451393127441404, 0.0608873291015625, 0.06037667083740234, 0.060341953277587894, 0.06042489624023437, 0.06039254379272461, 0.060408767700195314, 0.06043033599853516, 0.0605263671875, 0.06037926483154297, 0.06167695999145508, 0.06049260711669922, 0.06006556701660156, 0.06000409698486328, 0.05987491226196289, 0.05995734405517578, 0.059990814208984375, 0.05983852767944336, 0.059779006958007815, 0.05986918258666992, 0.059842559814453126, 0.059989151000976564, 0.05979436874389649, 0.05997878265380859, 0.06013993453979492, 0.05990777587890625, 0.06020985412597656, 0.06032137680053711, 0.06004377746582031, 0.05990195083618164, 0.05981388854980469, 0.05992038345336914, 0.05987091064453125, 0.0599043197631836, 0.059799552917480465, 0.05994425582885742, 0.05996166229248047, 0.059928958892822265, 0.05983846282958984, 0.05984364700317383, 0.05988800048828125, 0.05995167922973633, 0.060055553436279295, 0.060225662231445314, 0.0603196792602539, 0.06016390228271484, 0.06016134262084961, 0.060162750244140625, 0.06015369415283203, 0.06006771087646484, 0.060049823760986325, 0.060203006744384766, 0.06005132675170898, 0.06006796646118164, 0.06025593566894531, 0.06016032028198242, 0.060129280090332034, 0.06014156723022461, 0.06028287887573242, 0.06019606399536133, 0.06032831954956055, 0.06045711898803711, 0.0602237434387207, 0.06020710372924805, 0.06015593719482422, 0.060227615356445316, 0.060135456085205076, 0.06028278350830078, 0.06018048095703125, 0.06009801483154297, 0.06005199813842774, 0.060129280090332034, 0.060147071838378904, 0.0613232307434082, 0.0603504638671875, 0.05998995208740234, 0.05997731018066406, 0.05984092712402344, 0.05993084716796875, 0.05985468673706055, 0.05995110321044922, 0.059983806610107424, 0.059966751098632816, 0.059908416748046874, 0.060082015991210935, 0.059961982727050785, 0.0599552001953125, 0.05990607833862305, 0.060418014526367185, 0.060096511840820314, 0.060278785705566405, 0.06028857421875, 0.060570049285888675, 0.06020223999023438, 0.060066558837890624, 0.06007187271118164, 0.060008510589599606, 0.060128448486328125, 0.06021171188354492, 0.05997343826293945, 0.06003507232666016, 0.05998649597167969, 0.06009423828125, 0.06022159957885742, 0.06012067031860351, 0.06033235168457031, 0.0602154541015625, 0.060362686157226564, 0.06017228698730469, 0.060096511840820314, 0.06044675064086914, 0.06008803176879883, 0.06003123092651367, 0.060017791748046875, 0.060083072662353514, 0.06021529769897461, 0.06011673736572266, 0.060088577270507815, 0.060217342376708984, 0.06010879898071289, 0.06013315200805664, 0.060190689086914065, 0.06027289581298828, 0.06019184112548828, 0.060205982208251956, 0.06023974227905273, 0.06030720138549805, 0.06039795303344726, 0.060353729248046876, 0.06024889755249024, 0.060217342376708984, 0.06029097747802734, 0.06058550262451172, 0.06046368026733399, 0.06030720138549805, 0.06029344177246094, 0.06133833694458008, 0.06035456085205078, 0.059840511322021485, 0.059875038146972655, 0.059818080902099606, 0.05989766311645508, 0.05994089508056641, 0.05983676910400391, 0.059858943939208986, 0.05998150253295898, 0.06006355285644531, 0.060663745880126956, 0.06027724838256836, 0.059888961791992185, 0.05993535995483398, 0.059940128326416015, 0.06039788818359375, 0.060227584838867185, 0.060144161224365236, 0.060098560333251956, 0.059924480438232425, 0.05995110321044922, 0.06003507232666016, 0.06054092788696289, 0.060055553436279295, 0.059977279663085935, 0.06007033538818359, 0.06008627319335937, 0.05995503997802734, 0.06001395034790039, 0.06004550552368164, 0.06026710510253906, 0.06017814254760742, 0.06022582244873047, 0.06040790557861328, 0.06023676681518555, 0.06019372940063476, 0.06012108612060547, 0.060047359466552735, 0.060080127716064455, 0.06015926361083984, 0.060015071868896486, 0.05993446350097656, 0.06014003372192383, 0.060087711334228515, 0.060217281341552735, 0.06007260894775391, 0.060217342376708984, 0.060069438934326175, 0.06027222442626953, 0.06027536010742188, 0.06021343994140625, 0.06019664001464844, 0.06009683227539062, 0.06017424011230469, 0.06018019104003906, 0.060224864959716795, 0.06033299255371094, 0.06016207885742188, 0.06024771118164062, 0.060491649627685544, 0.060219135284423825, 0.06019747161865235, 0.06131539154052734, 0.06032134246826172, 0.05992265701293945, 0.05991241455078125, 0.05983027267456055, 0.05985718536376953, 0.05983935928344727, 0.05992534255981445, 0.0599183349609375, 0.059963390350341796, 0.06000230407714844, 0.05983417510986328, 0.06012716674804688, 0.060002239227294925, 0.05979296112060547, 0.05995974349975586, 0.060313919067382815, 0.06030547332763672, 0.06015769577026367, 0.06008230209350586, 0.059969600677490235, 0.06036479949951172, 0.0599246711730957, 0.05984979248046875, 0.06010860824584961, 0.059941825866699217, 0.06015536117553711, 0.060085857391357425, 0.06015385437011719, 0.06003705596923828, 0.05995008087158203, 0.06027040100097656, 0.06022982406616211, 0.06018255996704101, 0.06022883224487305, 0.060168960571289065, 0.06016819381713867, 0.06018617630004883, 0.06006796646118164, 0.060192447662353515, 0.06001932907104492, 0.059996097564697266, 0.05997555160522461, 0.06019705581665039, 0.060112895965576174, 0.06012313461303711, 0.0604139518737793, 0.060112895965576174, 0.060170238494873046, 0.06020505523681641, 0.06036636734008789, 0.06019120025634766, 0.06011286544799805, 0.0601416015625, 0.06015359878540039, 0.060354305267333985, 0.06033459091186524, 0.06021529769897461, 0.06021878433227539, 0.06027632141113281, 0.06066483306884766, 0.060329601287841796, 0.06019929504394531, 0.06145225524902344, 0.0606802864074707, 0.060004222869873045, 0.059821727752685544, 0.05981894302368164, 0.05990991973876953, 0.05973987197875977, 0.05975273513793945, 0.05970035171508789, 0.05990694427490234, 0.05991120147705078, 0.059902942657470704, 0.05993471908569336, 0.059835777282714844, 0.05986576080322266, 0.060002113342285154, 0.06035065460205078, 0.06045833587646485, 0.06027872085571289, 0.060058303833007816, 0.060007678985595704, 0.059985950469970704, 0.05997654342651367, 0.05997964859008789, 0.05989990234375, 0.05999993515014648, 0.060038944244384766, 0.06002947235107422, 0.06036070251464844, 0.06072227096557617, 0.06025718307495117, 0.06014976119995117, 0.060351646423339844, 0.060418304443359376, 0.06071289443969727, 0.06035932922363281, 0.060130462646484376, 0.060178688049316406, 0.06009302520751953, 0.060034656524658205, 0.060002239227294925, 0.06002908706665039, 0.05992483139038086, 0.06002070236206054, 0.06008182525634766, 0.06043024063110351, 0.060488128662109376, 0.06053887939453125, 0.06028902435302735, 0.06030339050292969, 0.06037091064453125, 0.06036896133422852, 0.06027798461914063, 0.06032252883911133, 0.06023987197875977, 0.06013337707519531, 0.06016409683227539, 0.06014976119995117, 0.06013078308105469, 0.06204265594482422, 0.06009014511108399, 0.060131263732910153, 0.06018268966674805]",tokens/s,16.608699026517595,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,786.071552,1140.719616,0.0,738.197504,715.772928,s,1,7.593796875,7.593796875,0.0,7.593796875,7.593796875,7.593796875,7.593796875,[7.593796875],,kWh,3.1621531374639743e-06,3.4158290651561233e-07,9.463896459838139e-07,4.4501256899634e-06,,MB,1234.092032,1182.662656,0.0,767.557632,723.637248,s,10,0.178069185256958,0.0178069185256958,0.0001945073962952612,0.01783595275878906,0.018021560859680177,0.018050332164764402,0.018073349208831784,"[0.01788198471069336, 0.017902048110961914, 0.017780704498291014, 0.01768934440612793, 0.01807910346984863, 0.018015167236328126, 0.017941951751708984, 0.01758915138244629, 0.017399808883666993, 0.017789920806884765]",tokens/s,14376.434621778384,kWh,5.386425637376754e-07,5.9400405973095904e-08,3.5698573044278445e-07,9.55028700153556e-07,tokens/kWh,268054771.50460356,MB,1266.962432,1222.508544,0.0,807.40352,735.775744,s,10,10.06535400390625,1.006535400390625,0.009834493509237058,1.010860168457031,1.0131887451171875,1.0158673217773437,1.0180101831054686,"[1.012593505859375, 1.0086698608398437, 0.9952569580078126, 1.001731689453125, 1.0119219970703126, 1.011552734375, 1.0101676025390625, 0.9833295288085937, 1.011584228515625, 1.0185458984375]",tokens/s,62.590943125845754,kWh,2.9723969349596404e-05,3.2780563630907763e-06,1.2881219304558831e-05,4.588324501724603e-05,tokens/kWh,1373050.2272958318,,s,630,10.058648376464843,0.01596610853407118,0.00043604432991394633,0.015969232082366946,0.016144662094116212,0.016245702171325684,0.016963520431518556,"[0.01600908851623535, 0.016021503448486327, 0.01603561592102051, 0.01612838363647461, 0.01590019226074219, 0.0158253755569458, 0.015783935546875, 0.015928992271423338, 0.015931103706359865, 0.015903488159179687, 0.015886207580566407, 0.01587814426422119, 0.016080896377563478, 0.015960063934326172, 0.016060319900512696, 0.016144479751586914, 0.015978495597839357, 0.015976287841796874, 0.01598684787750244, 0.018937824249267578, 0.016807968139648438, 0.016942367553710938, 0.015960351943969726, 0.01602604866027832, 0.0159552001953125, 0.016179967880249023, 0.015994879722595216, 0.015925248146057128, 0.01593087959289551, 0.01628323173522949, 0.016023487091064454, 0.016005695343017576, 0.01600111961364746, 0.0160382080078125, 0.01610918426513672, 0.01591497611999512, 0.01589292812347412, 0.015902688026428222, 0.015859711647033693, 0.015966527938842772, 0.015908255577087402, 0.015836511611938477, 0.01586892795562744, 0.0158853759765625, 0.015844127655029298, 0.015912799835205077, 0.015907072067260743, 0.01588633632659912, 0.015953375816345216, 0.015979040145874025, 0.016053375244140626, 0.015989631652832032, 0.015914591789245605, 0.015995295524597167, 0.01607084846496582, 0.016123392105102538, 0.015909184455871583, 0.016223712921142577, 0.016083488464355467, 0.01620582389831543, 0.016151935577392577, 0.01606902313232422, 0.016046432495117186, 0.015959327697753906, 0.015975135803222656, 0.01601126480102539, 0.01596950435638428, 0.01602230453491211, 0.016031999588012696, 0.01600819206237793, 0.01605407905578613, 0.01599775981903076, 0.01602118492126465, 0.016152544021606444, 0.015927488327026368, 0.01607094383239746, 0.016057695388793945, 0.016058271408081054, 0.016153215408325195, 0.016064895629882812, 0.015904224395751954, 0.016394079208374022, 0.016060863494873047, 0.016024864196777344, 0.016023584365844726, 0.016024255752563478, 0.01623859214782715, 0.016115711212158202, 0.016451583862304688, 0.016029695510864257, 0.015956255912780763, 0.01602761650085449, 0.01622604751586914, 0.01616486358642578, 0.01624662399291992, 0.01608323287963867, 0.016051679611206054, 0.01605878448486328, 0.016092384338378906, 0.015960160255432128, 0.01592390441894531, 0.01600307273864746, 0.015894495964050293, 0.01602729606628418, 0.016299711227416993, 0.01596895980834961, 0.015864928245544432, 0.015849663734436036, 0.015832096099853515, 0.015803135871887208, 0.015930303573608397, 0.015878496170043947, 0.016129375457763672, 0.015904319763183593, 0.015744832038879393, 0.015768128395080565, 0.015878527641296387, 0.01587814426422119, 0.01597987174987793, 0.016009567260742187, 0.01574124813079834, 0.015863807678222656, 0.015830623626708985, 0.015728511810302735, 0.015814944267272948, 0.01581449604034424, 0.01572454357147217, 0.01582630443572998, 0.01572928047180176, 0.015804287910461425, 0.015847552299499513, 0.015989760398864745, 0.016030719757080078, 0.016022592544555663, 0.01585657596588135, 0.016127391815185545, 0.015966815948486326, 0.015765503883361818, 0.015756735801696776, 0.015921728134155273, 0.01574841594696045, 0.0158024320602417, 0.015822848320007323, 0.015975040435791017, 0.015859904289245604, 0.01581446361541748, 0.01584332847595215, 0.01607219123840332, 0.01610393524169922, 0.016054208755493165, 0.01591308784484863, 0.015869407653808593, 0.015806943893432616, 0.01580681610107422, 0.015783552169799805, 0.015748767852783202, 0.01579456043243408, 0.015908864021301268, 0.016532928466796874, 0.015774271965026856, 0.015874048233032227, 0.015936863899230956, 0.01612486457824707, 0.015793888092041016, 0.015831071853637694, 0.01575119972229004, 0.01594156837463379, 0.015777376174926756, 0.01571827220916748, 0.015743519783020018, 0.01566268825531006, 0.015745471954345704, 0.01567740821838379, 0.015622143745422363, 0.015519743919372558, 0.015566847801208495, 0.01545644760131836, 0.015457375526428223, 0.015528096199035644, 0.015455840110778808, 0.015645055770874022, 0.015553119659423829, 0.015539679527282715, 0.015416000366210937, 0.01566703987121582, 0.01547871971130371, 0.015528256416320801, 0.015480064392089843, 0.01575068759918213, 0.015814528465270997, 0.01568566417694092, 0.015687840461730957, 0.015609536170959473, 0.015597536087036132, 0.015605792045593262, 0.01557094383239746, 0.015523263931274414, 0.01569375991821289, 0.015618687629699706, 0.015640735626220703, 0.015775615692138673, 0.01568355178833008, 0.015726592063903807, 0.015667200088500977, 0.015650815963745117, 0.016037887573242187, 0.015855135917663573, 0.0157903356552124, 0.015825119972229004, 0.01584537601470947, 0.015890496253967285, 0.015890496253967285, 0.01594371223449707, 0.016143583297729493, 0.015859871864318847, 0.015853983879089355, 0.015808671951293946, 0.01573801612854004, 0.015819135665893554, 0.015829567909240724, 0.015927103996276854, 0.015824447631835936, 0.015934207916259765, 0.016020896911621094, 0.017147167205810547, 0.016092159271240233, 0.016068544387817383, 0.01594374370574951, 0.016029407501220703, 0.01596649646759033, 0.01587507152557373, 0.015981087684631346, 0.01590115165710449, 0.015845215797424317, 0.016076063156127928, 0.015911904335021973, 0.01603775978088379, 0.015880224227905273, 0.01608121681213379, 0.015986528396606445, 0.015954015731811523, 0.015902463912963866, 0.0160501766204834, 0.016074752807617186, 0.01602739143371582, 0.015915264129638673, 0.015917056083679198, 0.01593855953216553, 0.015952896118164063, 0.016008640289306642, 0.01605894470214844, 0.01601535987854004, 0.01600067138671875, 0.01599728012084961, 0.01597804832458496, 0.01602979278564453, 0.015908864021301268, 0.015921152114868165, 0.016099327087402342, 0.015968255996704102, 0.015941311836242675, 0.016083263397216798, 0.016265216827392577, 0.016152576446533205, 0.016027872085571288, 0.01598646354675293, 0.016121408462524415, 0.015958047866821288, 0.015972288131713867, 0.016233055114746094, 0.0160316162109375, 0.015990336418151854, 0.01648841667175293, 0.016085472106933594, 0.01605583953857422, 0.01598847961425781, 0.015946463584899904, 0.01599078369140625, 0.015927295684814453, 0.015964159965515135, 0.016058368682861326, 0.015968576431274414, 0.015986080169677733, 0.016023263931274415, 0.015983039855957032, 0.01625481605529785, 0.0161342716217041, 0.015992735862731932, 0.016041919708251952, 0.016068511962890625, 0.01588700771331787, 0.01595571231842041, 0.016051967620849608, 0.016165119171142578, 0.01601740837097168, 0.0160248966217041, 0.01599510383605957, 0.016083423614501952, 0.015931136131286622, 0.01620748710632324, 0.01598038387298584, 0.015976544380187988, 0.01598028755187988, 0.01598969554901123, 0.015937536239624024, 0.016107519149780272, 0.016379423141479492, 0.016032352447509765, 0.01598633575439453, 0.015934816360473635, 0.015950559616088867, 0.015992992401123046, 0.01602560043334961, 0.016029472351074218, 0.016972160339355467, 0.01584832000732422, 0.016046079635620117, 0.01614044761657715, 0.01608073616027832, 0.016074623107910156, 0.016224384307861328, 0.015904767990112305, 0.016125951766967773, 0.016316160202026368, 0.01597222423553467, 0.015968768119812012, 0.01601424026489258, 0.015977439880371095, 0.016064544677734376, 0.01596003246307373, 0.01598204803466797, 0.016013248443603516, 0.01600111961364746, 0.01608915138244629, 0.01599449634552002, 0.016071327209472658, 0.016007232666015624, 0.01658412742614746, 0.016222911834716795, 0.016097471237182616, 0.016107295989990233, 0.015972352027893065, 0.016254335403442382, 0.01591155242919922, 0.016012767791748046, 0.016088960647583007, 0.015963007926940918, 0.01595567989349365, 0.016059551239013672, 0.015982624053955077, 0.01603468894958496, 0.016205951690673827, 0.016146303176879883, 0.016058368682861326, 0.01616636848449707, 0.016111295700073244, 0.01595689582824707, 0.015972288131713867, 0.015976479530334473, 0.015953248023986816, 0.0158372802734375, 0.01597632026672363, 0.016098175048828125, 0.01603152084350586, 0.01596649646759033, 0.01611731147766113, 0.015958175659179688, 0.016021087646484376, 0.016036256790161133, 0.01600624084472656, 0.016132320404052734, 0.015989439964294434, 0.01608064079284668, 0.016000736236572267, 0.016004959106445314, 0.015964863777160644, 0.01599929618835449, 0.01597558403015137, 0.016015552520751954, 0.016132160186767577, 0.016000991821289064, 0.0161627197265625, 0.0159617919921875, 0.016078559875488282, 0.016080768585205078, 0.01615135955810547, 0.016015104293823242, 0.015927807807922363, 0.015885472297668457, 0.016127840042114257, 0.01593008041381836, 0.01638534355163574, 0.016016063690185548, 0.015882240295410157, 0.016287424087524413, 0.01595961570739746, 0.016232927322387694, 0.0160596809387207, 0.016034528732299803, 0.01600716781616211, 0.015922816276550292, 0.016068832397460937, 0.016238399505615234, 0.01609382438659668, 0.016072576522827148, 0.016054399490356447, 0.015994879722595216, 0.01593958377838135, 0.015975520133972167, 0.01604867172241211, 0.01596454429626465, 0.016007360458374024, 0.016009023666381836, 0.016090496063232422, 0.01612883186340332, 0.016109376907348632, 0.016043071746826173, 0.016034751892089843, 0.015978272438049317, 0.0159552640914917, 0.015909152030944826, 0.01586246395111084, 0.01589241600036621, 0.015990336418151854, 0.015903167724609375, 0.015818143844604494, 0.015805312156677246, 0.016115423202514648, 0.015882240295410157, 0.015945728302001954, 0.01595296001434326, 0.015993791580200194, 0.015943519592285155, 0.015845439910888673, 0.01590665626525879, 0.015933664321899414, 0.016033824920654298, 0.016078847885131836, 0.015980544090270995, 0.016737279891967775, 0.015919903755187988, 0.01583907222747803, 0.01593350410461426, 0.015853983879089355, 0.015810367584228515, 0.015775615692138673, 0.016295936584472655, 0.01575267219543457, 0.015668959617614747, 0.01552467155456543, 0.015847359657287598, 0.015638591766357422, 0.01557913589477539, 0.015625439643859863, 0.015481280326843263, 0.015728992462158205, 0.015617280006408692, 0.015706303596496583, 0.015665727615356444, 0.015688032150268556, 0.01566476821899414, 0.015595552444458007, 0.015542112350463867, 0.01562230396270752, 0.015832863807678222, 0.015646944046020506, 0.015748895645141602, 0.015540448188781739, 0.01579529571533203, 0.015648927688598633, 0.015843711853027343, 0.015772031784057616, 0.015672384262084962, 0.015466943740844726, 0.01538038444519043, 0.015659647941589354, 0.015507264137268066, 0.015568991661071778, 0.015921024322509764, 0.015908543586730956, 0.01578649616241455, 0.01569382381439209, 0.015762847900390627, 0.01573539161682129, 0.015611231803894043, 0.015468992233276368, 0.015446240425109864, 0.015233023643493653, 0.015311936378479003, 0.01541158390045166, 0.015448160171508789, 0.015366623878479004, 0.015384575843811036, 0.015224831581115723, 0.01544313621520996, 0.015202367782592773, 0.015209055900573731, 0.015354016304016112, 0.015357664108276367, 0.01553388786315918, 0.015475168228149414, 0.015402943611145019, 0.015205856323242187, 0.015322879791259765, 0.015472928047180175, 0.015693984031677245, 0.015304415702819825, 0.015263199806213379, 0.015233599662780761, 0.015381792068481445, 0.01528320026397705, 0.01517734432220459, 0.015328703880310058, 0.01540214443206787, 0.015501088142395019, 0.015812640190124513, 0.01621731185913086, 0.015837439537048338, 0.015721247673034667, 0.01608265686035156, 0.015693984031677245, 0.015726304054260253, 0.015721728324890136, 0.015786080360412597, 0.015950143814086912, 0.015880160331726074, 0.015739168167114258, 0.016105056762695313, 0.015913599967956542, 0.020230144500732423, 0.022624416351318358, 0.016852287292480467, 0.016026144027709962, 0.01587548828125, 0.01597721576690674, 0.01589404773712158, 0.01592288017272949, 0.016000991821289064, 0.015903167724609375, 0.01591932773590088, 0.016119583129882813, 0.016021472930908203, 0.015937727928161623, 0.016042047500610352, 0.01597417640686035, 0.01602582359313965, 0.015931488037109375, 0.016060096740722656, 0.0159518404006958, 0.016062719345092773, 0.01603923225402832, 0.01599567985534668, 0.015988096237182618, 0.01618534469604492, 0.01599955177307129, 0.016006559371948243, 0.0160753288269043, 0.01599846363067627, 0.015929856300354005, 0.015972352027893065, 0.016232255935668946, 0.0159684476852417, 0.015871487617492675, 0.015916768074035643, 0.016029727935791015, 0.016029727935791015, 0.016018144607543944, 0.01593388843536377, 0.01598464012145996, 0.015927295684814453, 0.016340032577514648, 0.016036800384521484, 0.016883712768554687, 0.01846998405456543, 0.016157600402832033, 0.016084192276000976, 0.016038911819458008, 0.016097055435180665, 0.015935487747192383, 0.015969599723815918, 0.015978400230407714, 0.01592140769958496, 0.01596070384979248, 0.016066463470458984, 0.01595571231842041, 0.016020799636840822, 0.016206783294677736, 0.016273248672485353, 0.016059808731079102, 0.01609782409667969, 0.016085216522216797, 0.016166912078857423, 0.01675289535522461, 0.01842367935180664, 0.016052064895629884, 0.01609440040588379, 0.016085855484008788, 0.016072576522827148, 0.01595132827758789, 0.015905695915222166, 0.015890432357788087, 0.015941632270812987, 0.016051071166992188, 0.016165088653564454, 0.016093568801879884, 0.016132640838623046, 0.016119232177734377, 0.01624457550048828, 0.016005599975585937, 0.015939359664916993, 0.015931615829467772, 0.016088895797729492, 0.016134559631347658, 0.016039968490600586, 0.01606425666809082, 0.01592319965362549, 0.015951552391052247, 0.01601363182067871, 0.01604595184326172, 0.016152448654174804, 0.016017663955688478, 0.01601923179626465, 0.015955967903137205, 0.016070751190185546, 0.01602799987792969, 0.01600476837158203, 0.016067935943603517, 0.016106271743774415, 0.016504831314086914, 0.016134239196777343]",tokens/s,62.63266956165499,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 705, in __init__ self.model = XGLMModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 517, in __init__ self.layers = nn.ModuleList([XGLMDecoderLayer(config) for _ in range(config.num_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 517, in self.layers = nn.ModuleList([XGLMDecoderLayer(config) for _ in range(config.num_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 371, in __init__ self.fc1 = nn.Linear(self.embed_dim, config.ffn_dim) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 128.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 78.12 MiB is free. Process 414666 has 14.66 GiB memory in use. Of the allocated memory 14.54 GiB is allocated by PyTorch, and 12.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpzaccmzal/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 890, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 822, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 373, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 252, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 115, in __init__ self.v_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 404780 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 3.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp733kqmbw/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,788.619264,11664.228352,0.0,11261.70624,11255.391232,s,1,7.699521484375,7.699521484375,0.0,7.699521484375,7.699521484375,7.699521484375,7.699521484375,[7.699521484375],,kWh,5.135730045791813e-06,5.590329252506261e-07,2.3983352520073886e-06,8.093098223049826e-06,,MB,1168.171008,11674.714112,0.0,11261.70624,10971.009024,s,10,3.5324797363281255,0.3532479736328126,0.005488434883393947,0.3544241790771484,0.35849992980957035,0.3585626480102539,0.3586128225708008,"[0.34007058715820315, 0.35417059326171874, 0.35467776489257813, 0.35175088500976565, 0.3530396728515625, 0.35848599243164064, 0.34735467529296876, 0.3586253662109375, 0.3577989807128906, 0.35650521850585937]",tokens/s,724.7033786699141,kWh,1.0289454887070265e-05,1.134344471862195e-06,6.821327296139191e-06,1.8245126655071653e-05,tokens/kWh,14031144.033129467,MB,1173.712896,11676.811264,0.0,11263.803392,11168.310272,s,10,32.3587314453125,3.23587314453125,0.002421926978261035,3.235115234375,3.2400518554687503,3.2403258544921876,3.2405450537109375,"[3.23422119140625, 3.233904052734375, 3.2365, 3.234213623046875, 3.240599853515625, 3.2347783203125, 3.2354521484375, 3.239990966796875, 3.233010498046875, 3.236060791015625]",tokens/s,19.469242824451392,kWh,9.450608709751391e-05,1.0424656552769856e-05,6.277520060325956e-05,0.00016770594425354334,tokens/kWh,375657.52532155055,,s,630,32.354787281036366,0.05135680520799425,0.00029171386051263053,0.05134310531616211,0.05167521667480469,0.05174189758300781,0.052295170707702635,"[0.052272926330566405, 0.05127648162841797, 0.05105180740356445, 0.05104611206054688, 0.050975200653076175, 0.05084953689575195, 0.05115574264526367, 0.051073024749755856, 0.051014720916748045, 0.05094268798828125, 0.05095446395874023, 0.05101772689819336, 0.05102182388305664, 0.05127372741699219, 0.05114038467407227, 0.05102214431762695, 0.05118873596191406, 0.051089889526367185, 0.05139295959472656, 0.05133270263671875, 0.051251617431640625, 0.05122662353515625, 0.05124710464477539, 0.051095550537109374, 0.051051998138427736, 0.0511165771484375, 0.051256446838378905, 0.05111084747314453, 0.05106406402587891, 0.05112396621704102, 0.05113564682006836, 0.051035934448242185, 0.05115264129638672, 0.05121459197998047, 0.05124300765991211, 0.05123481750488281, 0.05133692932128906, 0.05118806457519531, 0.05159091186523437, 0.051361568450927736, 0.05125081634521484, 0.05139945602416992, 0.05138431930541992, 0.05129951858520508, 0.053308223724365236, 0.051527519226074216, 0.05151145553588867, 0.05134713745117187, 0.051402847290039064, 0.05159708786010742, 0.05143782424926758, 0.0516135368347168, 0.051466304779052736, 0.0515830078125, 0.05176486587524414, 0.05146918487548828, 0.05151103973388672, 0.05154816055297851, 0.05168742370605469, 0.05162393569946289, 0.05171200180053711, 0.05166806411743164, 0.05159209442138672, 0.05220153427124023, 0.05122880172729492, 0.051016254425048826, 0.05110784149169922, 0.05091123199462891, 0.051014720916748045, 0.05084636688232422, 0.05097292709350586, 0.051189247131347655, 0.051108383178710935, 0.05088390350341797, 0.05099961471557617, 0.0510222396850586, 0.050947776794433595, 0.051077278137207034, 0.05109158325195313, 0.05102592086791992, 0.05118975830078125, 0.051574783325195314, 0.051302528381347655, 0.051203742980957034, 0.051271903991699216, 0.05131647872924805, 0.05114886474609375, 0.05103353500366211, 0.051103649139404295, 0.05115542221069336, 0.05129587173461914, 0.05112704086303711, 0.051221534729003905, 0.051065921783447266, 0.05118761444091797, 0.05126553726196289, 0.051318782806396485, 0.05128937530517578, 0.05148767852783203, 0.05157164764404297, 0.0514158706665039, 0.051499038696289065, 0.051591201782226564, 0.05144707107543945, 0.05141779327392578, 0.05155209732055664, 0.05131683349609375, 0.051390529632568356, 0.05157590484619141, 0.05151414489746094, 0.0513554573059082, 0.05137228775024414, 0.051460224151611327, 0.05139174270629883, 0.051313343048095705, 0.0515109748840332, 0.0515928955078125, 0.051520030975341795, 0.05164656066894531, 0.051591167449951174, 0.051607551574707033, 0.05161564636230469, 0.05223001480102539, 0.05178556823730469, 0.05148915100097656, 0.05154611206054688, 0.05234128189086914, 0.05134150314331055, 0.05117491149902344, 0.050979198455810545, 0.05094617462158203, 0.05096384048461914, 0.05111872100830078, 0.05100163269042969, 0.0509703369140625, 0.051435073852539065, 0.051274177551269534, 0.05140390396118164, 0.051, 0.05110393524169922, 0.051144222259521484, 0.05129264068603516, 0.05108531188964844, 0.051361793518066405, 0.05147260665893555, 0.05154732894897461, 0.05140457534790039, 0.051196735382080076, 0.051294208526611325, 0.05124505615234375, 0.051058399200439454, 0.05125334548950195, 0.051362239837646484, 0.05114444732666015, 0.05116924667358398, 0.051180831909179686, 0.05132803344726562, 0.05112803268432617, 0.051100799560546875, 0.05132582473754883, 0.05134159851074219, 0.051429088592529294, 0.05142323303222656, 0.051585025787353515, 0.051450977325439455, 0.0515511360168457, 0.05139046478271484, 0.05148057556152344, 0.05148483276367188, 0.0515247688293457, 0.05141891098022461, 0.05151017761230469, 0.05171007919311523, 0.05153779220581055, 0.051290081024169924, 0.051509281158447266, 0.05149491119384766, 0.05134131240844726, 0.05139865493774414, 0.051529312133789064, 0.051689247131347656, 0.05160953521728515, 0.05153804779052734, 0.05171011352539062, 0.05157238388061523, 0.05171231842041016, 0.05155001449584961, 0.0516135368347168, 0.05156857681274414, 0.05242889785766602, 0.051308448791503904, 0.05102163314819336, 0.05101091384887695, 0.050864990234375, 0.050881950378417966, 0.051130977630615235, 0.050958335876464846, 0.0510115852355957, 0.0510832633972168, 0.05097449493408203, 0.05117974472045898, 0.05123891067504883, 0.051049503326416015, 0.05106524658203125, 0.05097878265380859, 0.05109206390380859, 0.051275745391845706, 0.051351585388183595, 0.05152767944335938, 0.05129379272460938, 0.05130281448364258, 0.05116928100585937, 0.05112736129760742, 0.05115334320068359, 0.05114316940307617, 0.05134774398803711, 0.05114608001708985, 0.05115667343139649, 0.05114479827880859, 0.05107932662963867, 0.051085121154785154, 0.05126591873168945, 0.051254558563232425, 0.05140304183959961, 0.051343360900878904, 0.05140550231933594, 0.05157625579833985, 0.051526206970214844, 0.0514703369140625, 0.051416641235351564, 0.05151395034790039, 0.05148451232910156, 0.05142240142822266, 0.05136368179321289, 0.05155120086669922, 0.05152767944335938, 0.051353790283203124, 0.05143747329711914, 0.0513936653137207, 0.05138307189941406, 0.05137027359008789, 0.05143267059326172, 0.05149884796142578, 0.05158153533935547, 0.05160358428955078, 0.0516668815612793, 0.05168041610717773, 0.051722625732421874, 0.051673473358154295, 0.05172172927856445, 0.05158972930908203, 0.05158198547363281, 0.05256592178344727, 0.05135897445678711, 0.05117219161987305, 0.05123481750488281, 0.05093974304199219, 0.05101737594604492, 0.05093571090698242, 0.051130401611328126, 0.05099577713012695, 0.05106454467773437, 0.05103849411010742, 0.05104995346069336, 0.05118211364746094, 0.05109145736694336, 0.05122048187255859, 0.05114006423950195, 0.051190303802490233, 0.05130444717407227, 0.05134272003173828, 0.05156313705444336, 0.051356895446777344, 0.051338016510009764, 0.05125321578979492, 0.051240318298339846, 0.05115897750854492, 0.05124521636962891, 0.051348129272460935, 0.051472286224365234, 0.05123833465576172, 0.051280319213867186, 0.05121158218383789, 0.051329822540283204, 0.05124051284790039, 0.05151177597045899, 0.05139046478271484, 0.05158620834350586, 0.051434337615966795, 0.051386367797851565, 0.05132268905639648, 0.05149631881713867, 0.051417343139648436, 0.051427902221679686, 0.05143891143798828, 0.05143212890625, 0.05164851379394531, 0.05175641632080078, 0.051565185546875, 0.0515728645324707, 0.052182079315185544, 0.052638526916503905, 0.05168742370605469, 0.05168329620361328, 0.051507232666015625, 0.051492862701416016, 0.05154336166381836, 0.05149766540527344, 0.05222134399414063, 0.051640670776367185, 0.05166723251342773, 0.05169372940063476, 0.051744575500488284, 0.05167257690429688, 0.05168569564819336, 0.05230425643920898, 0.05123721694946289, 0.05095577621459961, 0.05095884704589844, 0.05107651138305664, 0.050856544494628904, 0.05098448181152344, 0.05119836807250976, 0.05097833633422852, 0.050969375610351565, 0.05098428726196289, 0.05121475219726562, 0.051337215423583986, 0.05115497589111328, 0.051238880157470704, 0.05109766387939453, 0.05102342224121094, 0.05139059066772461, 0.051366142272949215, 0.051324382781982425, 0.05122307205200195, 0.05148262405395508, 0.051312606811523435, 0.05129532623291016, 0.05117433547973633, 0.05120819091796875, 0.05115526580810547, 0.0512344970703125, 0.05116070556640625, 0.0511962890625, 0.051095199584960935, 0.05121814346313477, 0.05120678329467773, 0.05140825653076172, 0.05124991989135742, 0.051553569793701175, 0.05137059020996094, 0.05149203109741211, 0.05172329711914062, 0.05147750473022461, 0.05137830352783203, 0.05139056015014649, 0.05142335891723633, 0.05154246520996094, 0.05144780731201172, 0.05152739334106445, 0.05139484786987305, 0.05137923049926758, 0.05154095840454102, 0.051522945404052736, 0.051385982513427735, 0.05149593734741211, 0.05132287979125977, 0.05161929702758789, 0.05141763305664063, 0.051607551574707033, 0.05157273483276367, 0.05160140609741211, 0.051609600067138675, 0.05162393569946289, 0.05153900909423828, 0.05163513565063477, 0.05152899169921875, 0.052584640502929686, 0.05120796966552734, 0.05095647811889648, 0.05093484878540039, 0.05097856140136719, 0.050812896728515626, 0.05102182388305664, 0.05100873565673828, 0.05113126373291016, 0.05111798477172851, 0.050995582580566405, 0.051038078308105465, 0.051263137817382814, 0.05105263900756836, 0.051144481658935544, 0.0511695671081543, 0.05104838562011719, 0.05137974548339844, 0.05115337753295898, 0.05139251327514648, 0.051253246307373046, 0.051286014556884765, 0.051431232452392575, 0.05121033477783203, 0.05109360122680664, 0.051296257019042966, 0.05118566513061523, 0.05106294250488281, 0.05117712020874023, 0.051224769592285155, 0.05115039825439453, 0.05131504058837891, 0.051301567077636716, 0.05149753570556641, 0.05135599899291992, 0.051312736511230465, 0.05161155319213867, 0.05141708755493164, 0.051447425842285156, 0.051489185333251954, 0.05149039840698242, 0.051394432067871094, 0.05151414489746094, 0.051445472717285154, 0.05142508697509766, 0.05151363372802734, 0.051311679840087894, 0.05136633682250977, 0.0513458251953125, 0.05148057556152344, 0.05145542526245117, 0.05212575912475586, 0.051507198333740234, 0.05160508728027344, 0.05144464111328125, 0.05156985473632812, 0.05154694366455078, 0.05173452758789063, 0.051885185241699217, 0.051616481781005856, 0.05158649444580078, 0.05162598419189453, 0.051573600769042965, 0.052217632293701174, 0.05150128173828125, 0.05122867202758789, 0.051041793823242185, 0.05100185775756836, 0.050898944854736325, 0.05102592086791992, 0.05105574417114258, 0.05100019073486328, 0.05101964950561524, 0.05094390487670898, 0.05110393524169922, 0.051400737762451174, 0.051199806213378905, 0.0510868148803711, 0.05112700653076172, 0.05107247924804687, 0.0512367057800293, 0.05128262329101563, 0.051574783325195314, 0.05130035018920898, 0.051533824920654295, 0.05130649566650391, 0.05123481750488281, 0.05112742233276367, 0.0511794548034668, 0.05116019058227539, 0.051093311309814454, 0.051138656616210934, 0.051500961303710936, 0.05143452835083008, 0.051260383605957034, 0.05124915313720703, 0.05133926391601563, 0.05192416000366211, 0.05140943908691406, 0.05173379135131836, 0.051695777893066404, 0.05140566253662109, 0.05157888031005859, 0.051512832641601565, 0.0517239990234375, 0.051530529022216796, 0.05166080093383789, 0.05149491119384766, 0.051496768951416014, 0.05149100875854492, 0.051568897247314456, 0.051356704711914065, 0.05192800140380859, 0.0516278076171875, 0.05167513656616211, 0.051722240447998044, 0.0517020149230957, 0.05155209732055664, 0.05167628860473633, 0.05167593765258789, 0.05160918426513672, 0.05164608001708984, 0.051874942779541015, 0.05189187240600586, 0.05184486389160156, 0.05170569610595703, 0.052254688262939455, 0.05109526443481445, 0.051203872680664064, 0.05099078369140625, 0.05096940612792969, 0.050904895782470705, 0.05098310470581055, 0.050950111389160155, 0.05119587326049805, 0.05134723281860352, 0.05103968048095703, 0.05107004928588867, 0.0510513916015625, 0.05117792129516602, 0.05099769592285156, 0.05108127975463867, 0.05111391830444336, 0.05126383972167969, 0.05122627258300781, 0.05140390396118164, 0.05122348785400391, 0.05130022430419922, 0.051175487518310546, 0.051187713623046874, 0.051187358856201175, 0.05111638259887695, 0.05121152114868164, 0.0511286735534668, 0.05111180877685547, 0.05129833602905273, 0.05112063980102539, 0.05117542266845703, 0.05127372741699219, 0.051312641143798826, 0.0512957763671875, 0.051442142486572265, 0.05135772705078125, 0.05141091156005859, 0.05142300796508789, 0.0516343994140625, 0.05142473602294922, 0.05147020721435547, 0.05158364868164062, 0.05138409423828125, 0.05132502365112305, 0.051576961517333986, 0.05143961715698242, 0.05140070343017578, 0.05134284973144531, 0.05146691131591797, 0.05143267059326172, 0.05145459365844727, 0.0514600944519043, 0.05135718536376953, 0.05135977554321289, 0.05155420684814453, 0.05144224166870117, 0.051558399200439455, 0.05144985580444336, 0.05164851379394531, 0.05157068634033203, 0.051566238403320315, 0.05164656066894531, 0.052203392028808596, 0.051250526428222656, 0.050944671630859376, 0.0509277458190918, 0.05085590362548828, 0.05084588623046875, 0.05089059066772461, 0.05082099151611328, 0.05087551879882812, 0.050926719665527344, 0.05088153457641602, 0.05092441558837891, 0.050872127532958986, 0.05094623947143555, 0.05085919952392578, 0.05089363098144531, 0.05091328048706055, 0.050986335754394534, 0.05112080001831055, 0.05191065597534179, 0.05130403137207031, 0.05138678359985351, 0.05118956756591797, 0.05117327880859375, 0.05115251159667969, 0.05125801467895508, 0.05113651275634765, 0.05113446426391602, 0.05126732635498047, 0.05109171295166016, 0.05122867202758789, 0.051320831298828126, 0.05131817626953125, 0.0513004150390625, 0.051280319213867186, 0.05141718292236328, 0.05150515365600586, 0.05152534484863281, 0.051566272735595706, 0.05148041534423828, 0.05171481704711914, 0.05164441680908203, 0.051625633239746095, 0.051937633514404294, 0.05165465545654297, 0.05172582244873047, 0.05164083099365235, 0.05169311904907226, 0.05150384140014649, 0.051561279296875, 0.051708961486816404, 0.051479679107666015, 0.05158905410766602, 0.051769760131835936, 0.051802528381347655, 0.0517259521484375, 0.05163478469848633, 0.05168105697631836, 0.051738624572753904, 0.051697662353515625, 0.051754432678222655, 0.051816097259521486, 0.05162448120117188]",tokens/s,19.47161619477722,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp5qek8jwg/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,1036.673024,10982.653952,0.0,10580.13184,10162.029568,s,1,7.70310986328125,7.70310986328125,0.0,7.70310986328125,7.70310986328125,7.70310986328125,7.70310986328125,[7.70310986328125],,kWh,7.21334057502645e-06,7.812861083311969e-07,4.9358372820051954e-06,1.2930463965362843e-05,,MB,1459.879936,11104.288768,0.0,10689.183744,9358.065152,s,10,1.1642667160034181,0.11642667160034181,0.004076506266006475,0.11774860763549805,0.12000066146850585,0.12046557884216309,0.12083751274108888,"[0.10593302154541015, 0.118912353515625, 0.1176674575805664, 0.11378352355957032, 0.11545164489746093, 0.11860399627685547, 0.11525711822509765, 0.11782975769042969, 0.11989734649658203, 0.12093049621582032]",tokens/s,2198.808885293672,kWh,3.393360044491639e-06,3.742063207057856e-07,2.2493728084368844e-06,6.016939173634308e-06,tokens/kWh,42546549.43526257,MB,1492.811776,11104.288768,0.0,10689.183744,9397.6704,s,10,31.250157958984374,3.1250157958984373,0.006129973101519931,3.125523681640625,3.13277783203125,3.1333392333984373,3.1337883544921876,"[3.133900634765625, 3.132653076171875, 3.12618798828125, 3.12027587890625, 3.116221923828125, 3.124982177734375, 3.12362109375, 3.11515234375, 3.126065185546875, 3.13109765625]",tokens/s,20.159898098015084,kWh,9.121316818383938e-05,1.0058701525185193e-05,6.05864274601617e-05,0.00016185829716918628,tokens/kWh,389229.35124016367,,s,630,31.246278419494615,0.049597267332531156,0.0004190776828819243,0.04953897666931152,0.04992173309326172,0.05019712505340576,0.05152120834350586,"[0.05131516647338867, 0.05004816055297852, 0.04964815902709961, 0.04941651153564453, 0.05049545669555664, 0.04945718383789063, 0.049850528717041015, 0.04956553649902344, 0.049584129333496096, 0.049512161254882815, 0.04979663848876953, 0.05041635131835937, 0.049645599365234376, 0.049534687042236326, 0.049530529022216795, 0.05017385482788086, 0.05026070404052734, 0.04951852798461914, 0.049511550903320316, 0.04989228820800781, 0.049799072265625, 0.049952865600585934, 0.04994355010986328, 0.04959113693237305, 0.049858718872070315, 0.05009196853637695, 0.049799232482910155, 0.04950835037231445, 0.04936294555664063, 0.049549312591552735, 0.04948582458496094, 0.04986880111694336, 0.0495184326171875, 0.04951990509033203, 0.0493147201538086, 0.04949129486083984, 0.049414783477783206, 0.049514785766601566, 0.049426143646240234, 0.049681697845458984, 0.049697376251220705, 0.049735198974609374, 0.04989807891845703, 0.04972323226928711, 0.049524478912353516, 0.049729312896728516, 0.04977318572998047, 0.049468673706054685, 0.04944582366943359, 0.049638816833496094, 0.049812992095947264, 0.04968979263305664, 0.04980092620849609, 0.049805313110351565, 0.049731327056884767, 0.04968582534790039, 0.04975711822509766, 0.04970204925537109, 0.049679328918457034, 0.04970892715454102, 0.04984988784790039, 0.04988963317871094, 0.049868831634521486, 0.05210595321655274, 0.05036851119995117, 0.049669696807861326, 0.049770145416259765, 0.049789505004882814, 0.04982601547241211, 0.049686527252197264, 0.04956118392944336, 0.04944118499755859, 0.04950979232788086, 0.04948438262939453, 0.04959628677368164, 0.04937270355224609, 0.04937993621826172, 0.049408000946044923, 0.049716480255126955, 0.0493507194519043, 0.04953977584838867, 0.049255775451660155, 0.049522689819335934, 0.04961347198486328, 0.04986601638793945, 0.04970512008666992, 0.04971372985839844, 0.04971737670898437, 0.049911678314208986, 0.049604606628417966, 0.04975177764892578, 0.04982812881469727, 0.049840320587158204, 0.04960438537597656, 0.04949961471557617, 0.04945772933959961, 0.049656959533691404, 0.04950944137573242, 0.049903617858886716, 0.049495872497558595, 0.04975807952880859, 0.04958022308349609, 0.049782718658447266, 0.04980348968505859, 0.05013033676147461, 0.049668033599853514, 0.04976275253295898, 0.049438526153564456, 0.04971526336669922, 0.049831905364990235, 0.049753406524658206, 0.04967510223388672, 0.04969046401977539, 0.049643680572509764, 0.04961193466186523, 0.04964220809936523, 0.04961676788330078, 0.04953523254394531, 0.05059721755981445, 0.04955612945556641, 0.049571006774902344, 0.04982457733154297, 0.049825790405273435, 0.049831905364990235, 0.0497413444519043, 0.04966665649414063, 0.05213814544677734, 0.05014716720581055, 0.04956768035888672, 0.049405982971191406, 0.04914179229736328, 0.04931584167480469, 0.04957388687133789, 0.0493996467590332, 0.04932988739013672, 0.04924665451049805, 0.049225727081298826, 0.049186817169189455, 0.04924620819091797, 0.04914790344238281, 0.049063201904296874, 0.04919705581665039, 0.04927936172485352, 0.049462814331054684, 0.04955424118041992, 0.049620990753173826, 0.04955855941772461, 0.049807903289794925, 0.0495247688293457, 0.04952681732177734, 0.04941657638549805, 0.04924985504150391, 0.04925478363037109, 0.049326080322265625, 0.049207359313964846, 0.04970086288452148, 0.049698783874511716, 0.04945030212402344, 0.04938620758056641, 0.04961471939086914, 0.04938687896728516, 0.049435550689697266, 0.04994153594970703, 0.04956639862060547, 0.04965184020996094, 0.04974796676635742, 0.04993180847167969, 0.049799137115478516, 0.04951705551147461, 0.04977769470214844, 0.04966831970214844, 0.04958438491821289, 0.04957606506347656, 0.04997500610351562, 0.049635326385498044, 0.04979983901977539, 0.049659103393554685, 0.049816318511962894, 0.04969193649291992, 0.049644287109375, 0.049465343475341796, 0.04967401504516602, 0.04972671890258789, 0.05087126541137695, 0.0497520637512207, 0.04999161529541016, 0.049960830688476565, 0.05009183883666992, 0.049531265258789064, 0.05150716781616211, 0.050170719146728514, 0.04940185546875, 0.04918278503417969, 0.04911916732788086, 0.04926464080810547, 0.04928307342529297, 0.04918272018432617, 0.049216831207275394, 0.05068777465820312, 0.04930416107177735, 0.0491605110168457, 0.049387104034423826, 0.04931951904296875, 0.049185726165771486, 0.04914691162109375, 0.05089497756958008, 0.049229663848876955, 0.04926655960083008, 0.04964659118652344, 0.049524223327636716, 0.04960099029541016, 0.0496148796081543, 0.04932099151611328, 0.04941308975219726, 0.049294368743896484, 0.049193439483642576, 0.04931619262695312, 0.04930985641479492, 0.049530303955078125, 0.05142723083496094, 0.049451679229736326, 0.049688575744628906, 0.049605823516845705, 0.04947027206420898, 0.04996860885620117, 0.04984003067016601, 0.049568382263183594, 0.04947868728637695, 0.049604862213134766, 0.04948767852783203, 0.04959638214111328, 0.049589183807373045, 0.049358753204345705, 0.049358943939208984, 0.049571521759033205, 0.049353023529052735, 0.0493298225402832, 0.04925811386108398, 0.04924403381347656, 0.04921177673339844, 0.049419776916503906, 0.04924646377563477, 0.049256832122802734, 0.04926499176025391, 0.04925439834594727, 0.049426433563232425, 0.04930559921264648, 0.04966195297241211, 0.049391616821289064, 0.04944668960571289, 0.049602783203125, 0.04949401473999023, 0.050761695861816405, 0.04958755111694336, 0.04915433502197265, 0.04911347198486328, 0.04889120101928711, 0.04886739349365234, 0.04911577606201172, 0.04903107070922851, 0.049001953125, 0.04908502578735351, 0.04912239837646484, 0.04935161590576172, 0.04914176177978516, 0.049201057434082034, 0.04903724670410156, 0.04915830230712891, 0.0492308464050293, 0.049361919403076174, 0.04917452621459961, 0.049590240478515624, 0.049629215240478516, 0.049530879974365234, 0.049588222503662106, 0.04950630569458008, 0.049152000427246094, 0.049192958831787106, 0.04946236801147461, 0.04924467086791992, 0.04926230239868164, 0.0495511360168457, 0.049436607360839845, 0.04958307266235352, 0.0492927360534668, 0.049568321228027346, 0.049465343475341796, 0.04939929580688476, 0.04927721786499024, 0.04936316680908203, 0.04956927871704102, 0.04974774551391602, 0.04964220809936523, 0.049754112243652344, 0.0493383674621582, 0.049719295501708984, 0.049598464965820314, 0.049584095001220706, 0.04939139175415039, 0.049514751434326175, 0.049456928253173826, 0.049631454467773437, 0.049651329040527346, 0.04960496139526367, 0.04968262481689453, 0.049409889221191404, 0.04968447875976562, 0.049430526733398435, 0.04928716659545898, 0.04935619354248047, 0.05004137420654297, 0.049817535400390626, 0.04973347091674805, 0.050698528289794924, 0.049917343139648435, 0.05206809616088867, 0.05062886428833008, 0.04959008026123047, 0.04947372817993164, 0.049579071044921874, 0.04950447845458984, 0.04929193496704101, 0.04948003387451172, 0.04945676803588867, 0.049649505615234374, 0.04945331192016601, 0.049598464965820314, 0.04987820816040039, 0.04983868789672852, 0.049401439666748044, 0.049758846282958985, 0.049579326629638674, 0.049403839111328125, 0.049552127838134764, 0.04970086288452148, 0.04976844787597656, 0.04994662475585938, 0.049719295501708984, 0.04971247863769531, 0.0495643196105957, 0.04960172653198242, 0.05011948776245117, 0.04973344039916992, 0.04962527847290039, 0.04950748825073242, 0.04942486572265625, 0.04963471984863281, 0.04938236618041992, 0.04961798477172852, 0.049329055786132815, 0.04953615951538086, 0.04931059265136719, 0.04952051162719726, 0.049301025390625, 0.049454784393310545, 0.0492982063293457, 0.04975628662109375, 0.05005516815185547, 0.04979507064819336, 0.04941209411621094, 0.04929526519775391, 0.049481281280517576, 0.04979254531860351, 0.04965305709838867, 0.04966368103027344, 0.04929644775390625, 0.04920620727539062, 0.04916604614257813, 0.0492503662109375, 0.049124671936035154, 0.049130401611328124, 0.04910899353027344, 0.04945500946044922, 0.04996515274047852, 0.05011001586914063, 0.049394111633300784, 0.049291263580322264, 0.04926259231567383, 0.051474113464355466, 0.04991424179077148, 0.049219585418701174, 0.04917862319946289, 0.04892671966552734, 0.048952415466308595, 0.048863361358642575, 0.049047870635986326, 0.04917295837402344, 0.0490332145690918, 0.04961856079101563, 0.04928755187988281, 0.04927897644042969, 0.049216960906982424, 0.04959414291381836, 0.05066422271728516, 0.049414142608642575, 0.04945510482788086, 0.04930559921264648, 0.049532894134521485, 0.0496333122253418, 0.04964147186279297, 0.051043582916259767, 0.049446720123291016, 0.04934751892089844, 0.049375232696533204, 0.04929740905761719, 0.04941823959350586, 0.0493568000793457, 0.049484928131103514, 0.04948262405395508, 0.04931731033325195, 0.049250080108642576, 0.049208065032958985, 0.04916841506958008, 0.049562976837158206, 0.05220367813110351, 0.04977305603027344, 0.04944486236572266, 0.049827838897705076, 0.04962259292602539, 0.049821182250976564, 0.04980627059936524, 0.04954291152954102, 0.04948608016967773, 0.04960220718383789, 0.04920902252197266, 0.0492591667175293, 0.049678112030029295, 0.04956086349487305, 0.04956665420532227, 0.04959561538696289, 0.04944771194458008, 0.049430816650390626, 0.049491233825683593, 0.049530303955078125, 0.04964825439453125, 0.0501366081237793, 0.04970751953125, 0.049684192657470705, 0.04971811294555664, 0.049729183197021486, 0.04957212829589844, 0.05152694320678711, 0.05012508773803711, 0.04948774337768555, 0.04920912170410156, 0.04947411346435547, 0.04927849578857422, 0.04917113494873047, 0.0492309455871582, 0.04930038452148437, 0.04921753692626953, 0.04925030517578125, 0.04939571380615235, 0.0495206413269043, 0.0494073600769043, 0.04921366500854492, 0.049672607421875, 0.04912323379516602, 0.049164222717285155, 0.049164352416992185, 0.04971324920654297, 0.04992147064208984, 0.04971782302856445, 0.04969267272949219, 0.04951039886474609, 0.049288799285888675, 0.04917900848388672, 0.049220863342285155, 0.04931459045410156, 0.049169567108154295, 0.04923683166503906, 0.04938956832885742, 0.04917814254760742, 0.04959280014038086, 0.04941619110107422, 0.049377281188964846, 0.049471038818359375, 0.04940972900390625, 0.04939033508300781, 0.049530879974365234, 0.04928307342529297, 0.04937113571166992, 0.0494257583618164, 0.04918544006347656, 0.04926668930053711, 0.0491899528503418, 0.04928979110717773, 0.049234302520751956, 0.049522689819335934, 0.049336193084716796, 0.04973756790161133, 0.049506431579589845, 0.04948112106323242, 0.04933417510986328, 0.04951043319702148, 0.04920556640625, 0.04925247955322266, 0.049533313751220706, 0.04938956832885742, 0.04957183837890625, 0.04938726425170899, 0.049508609771728516, 0.04979897689819336, 0.04973577499389648, 0.051611968994140625, 0.05017804718017578, 0.05045183944702149, 0.04931382369995117, 0.049021537780761716, 0.04934860610961914, 0.04938751983642578, 0.049503841400146485, 0.049584545135498044, 0.04944303894042969, 0.0496343994140625, 0.049351360321044924, 0.049292446136474606, 0.0496525764465332, 0.04953273773193359, 0.04946963119506836, 0.049620990753173826, 0.04955750274658203, 0.04959411239624024, 0.049770751953125, 0.049700286865234374, 0.04970348739624023, 0.04947353744506836, 0.04924415969848633, 0.049235969543457034, 0.049713153839111325, 0.04949615859985351, 0.04965923309326172, 0.049713119506835934, 0.04961955261230469, 0.04986003112792969, 0.04959494400024414, 0.04958367919921875, 0.04960073471069336, 0.04958390426635742, 0.049451454162597656, 0.049481727600097655, 0.049469120025634764, 0.049292926788330076, 0.0494453125, 0.04992435073852539, 0.04965078353881836, 0.04938774490356445, 0.049353248596191404, 0.049406112670898436, 0.049340671539306644, 0.049218849182128904, 0.04933475112915039, 0.05007769775390625, 0.04987667083740235, 0.049823486328125, 0.049549697875976566, 0.04982742309570312, 0.04969507217407226, 0.05006073760986328, 0.049615680694580076, 0.049751487731933594, 0.04963590240478516, 0.049598464965820314, 0.04962303924560547, 0.04970467376708984, 0.04947132873535156, 0.049551807403564456, 0.05192499160766602, 0.05030284881591797, 0.04948582458496094, 0.04980134582519531, 0.04930303955078125, 0.04930201721191406, 0.04928716659545898, 0.049672191619873046, 0.04928704071044922, 0.04920064163208008, 0.04950006484985352, 0.049550048828125, 0.049360897064208986, 0.04953247833251953, 0.04934086227416992, 0.04935235214233399, 0.049205535888671874, 0.049162303924560544, 0.048991935729980465, 0.04928339385986328, 0.0494837760925293, 0.050304096221923826, 0.04954819107055664, 0.049341598510742185, 0.04939247894287109, 0.04957593536376953, 0.049489662170410155, 0.050894432067871094, 0.049430561065673825, 0.04938950347900391, 0.04932883071899414, 0.04953817749023438, 0.04962575912475586, 0.04935222244262695, 0.050434688568115234, 0.04949817657470703, 0.04943254470825195, 0.04956367874145508, 0.0495588493347168, 0.04967903900146484, 0.049649246215820314, 0.05031676864624023, 0.049992286682128906, 0.05021273422241211, 0.0495590705871582, 0.049724353790283206, 0.049692607879638674, 0.049866817474365235, 0.04976230239868164, 0.04964966583251953, 0.04968243026733398, 0.049821697235107425, 0.04991104125976562, 0.04985222244262695, 0.050226337432861326, 0.04976617431640625, 0.0499159049987793, 0.0499010238647461, 0.04993283081054688, 0.04980524826049805, 0.04996211242675781, 0.04978169631958008, 0.04992409515380859]",tokens/s,20.162401151970197,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,786.35008,6169.690112,0.0,5767.168,5561.701376,s,1,7.47811083984375,7.47811083984375,0.0,7.47811083984375,7.47811083984375,7.47811083984375,7.47811083984375,[7.47811083984375],,kWh,4.409054241674919e-06,4.790730313337135e-07,2.9769468259943643e-06,7.865074099002996e-06,,MB,1262.276608,6182.273024,0.0,5767.168,5440.258048,s,10,0.6482793617248535,0.06482793617248535,0.0028759029963433976,0.06546942520141602,0.06742452850341797,0.06830229492187499,0.06900450805664061,"[0.06427021026611328, 0.06918006134033203, 0.06565872192382813, 0.05773977661132813, 0.06603437042236328, 0.06528012847900391, 0.06595785522460937, 0.06323929595947266, 0.0672294692993164, 0.06368947219848632]",tokens/s,3948.9148523696645,kWh,2.047039567453991e-06,2.2575183376776038e-07,1.3677788720000939e-06,3.640570273221845e-06,tokens/kWh,70318653.61396916,MB,1295.417344,6184.370176,0.0,5769.265152,5523.463168,s,10,19.031518798828124,1.9031518798828124,0.0026828927735153634,1.9029502563476561,1.9061058227539063,1.9070198303222656,1.9077510363769532,"[1.90441162109375, 1.9040919189453125, 1.9033114013671875, 1.902324462890625, 1.9016485595703125, 1.902029296875, 1.902589111328125, 1.907933837890625, 1.89727587890625, 1.9059027099609376]",tokens/s,33.102980726834716,kWh,5.5416001822959636e-05,6.112115327122983e-06,3.670200158380158e-05,9.823011873388419e-05,tokens/kWh,641351.1539233061,,s,630,19.028627933502197,0.03020417132301936,0.0005158398761402053,0.030103168487548827,0.030563635063171387,0.030814941692352295,0.032837681198120125,"[0.03131955146789551, 0.030960128784179686, 0.03032806396484375, 0.03038489532470703, 0.030142175674438478, 0.030363872528076173, 0.030206016540527344, 0.030065792083740234, 0.02997260856628418, 0.03018364715576172, 0.030195295333862306, 0.030308511734008788, 0.02997532844543457, 0.03021196746826172, 0.030132352828979494, 0.030007295608520508, 0.03012179183959961, 0.0304334716796875, 0.03015475273132324, 0.03057379150390625, 0.029997856140136718, 0.030390272140502928, 0.03022643280029297, 0.03019366455078125, 0.030117887496948242, 0.03045743942260742, 0.030066175460815428, 0.03046816062927246, 0.030002016067504883, 0.030421279907226564, 0.0300295352935791, 0.030019584655761718, 0.030162239074707033, 0.03061337661743164, 0.03016329574584961, 0.030460384368896483, 0.029961631774902343, 0.030420799255371094, 0.029954208374023437, 0.029981311798095704, 0.030040063858032227, 0.030514720916748048, 0.029970912933349608, 0.030365695953369142, 0.03043231964111328, 0.030124319076538085, 0.029843584060668945, 0.02982761573791504, 0.029813823699951173, 0.030440383911132813, 0.030007295608520508, 0.03054332733154297, 0.0300098876953125, 0.030580415725708007, 0.03015065574645996, 0.030014816284179686, 0.03000214385986328, 0.030535680770874023, 0.030107648849487304, 0.030385791778564455, 0.02986432075500488, 0.030362815856933595, 0.030036735534667968, 0.033402305603027344, 0.03220540618896484, 0.030865375518798827, 0.030459135055541993, 0.03016985511779785, 0.030476287841796876, 0.030099456787109374, 0.03023164749145508, 0.02998067283630371, 0.030200735092163086, 0.02978102493286133, 0.030188512802124024, 0.030038143157958986, 0.030166175842285155, 0.02995382308959961, 0.029780128479003905, 0.02992630386352539, 0.030262847900390626, 0.029956415176391603, 0.030405792236328125, 0.030054336547851564, 0.030262176513671874, 0.029863679885864258, 0.030009599685668947, 0.029857343673706054, 0.030287744522094727, 0.03011631965637207, 0.030457952499389648, 0.0299233283996582, 0.030209247589111327, 0.029784223556518555, 0.030060224533081055, 0.03004921531677246, 0.030517248153686522, 0.030066719055175783, 0.030539743423461913, 0.029886463165283202, 0.030267168045043945, 0.029849151611328124, 0.029820831298828124, 0.029688512802124024, 0.03014249610900879, 0.02970822334289551, 0.030247007369995117, 0.029749248504638674, 0.030275520324707032, 0.02978339195251465, 0.029815519332885742, 0.0305664005279541, 0.030776800155639647, 0.03020240020751953, 0.03042918395996094, 0.029845504760742186, 0.030385440826416015, 0.02979311943054199, 0.029761407852172853, 0.029724672317504884, 0.03030531120300293, 0.029709280014038084, 0.031100095748901366, 0.030612287521362306, 0.03081020736694336, 0.029960096359252928, 0.03311360168457031, 0.03185824012756348, 0.032459327697753906, 0.031906143188476566, 0.03015475273132324, 0.0301711368560791, 0.029831167221069335, 0.030082719802856445, 0.029764991760253906, 0.030257343292236328, 0.02974390411376953, 0.030060672760009767, 0.02979622459411621, 0.030257152557373046, 0.029718751907348632, 0.02975926399230957, 0.030117599487304688, 0.03111350440979004, 0.029861888885498046, 0.03039232063293457, 0.029792255401611328, 0.030017759323120115, 0.029701120376586915, 0.029651968002319336, 0.029632287979125975, 0.030128128051757814, 0.029655040740966795, 0.030148767471313478, 0.029882080078125, 0.0303242244720459, 0.03003251266479492, 0.029825023651123047, 0.030251007080078125, 0.030616863250732422, 0.029942495346069336, 0.030361055374145508, 0.02999760055541992, 0.030443519592285157, 0.030027776718139648, 0.02996019172668457, 0.03000284767150879, 0.030257503509521486, 0.029924896240234374, 0.030328351974487303, 0.029954368591308594, 0.030489055633544922, 0.029949440002441406, 0.029639328002929687, 0.029628416061401368, 0.030242399215698244, 0.029694368362426758, 0.030427135467529298, 0.02995167922973633, 0.030504959106445313, 0.029978559494018554, 0.030042495727539063, 0.029976480484008788, 0.030380064010620118, 0.029751359939575197, 0.030473535537719726, 0.03001350402832031, 0.030618240356445312, 0.029978624343872072, 0.03178512001037598, 0.03117068862915039, 0.030981760025024414, 0.030748992919921874, 0.03027756881713867, 0.030320287704467774, 0.030097375869750975, 0.030167423248291015, 0.03015065574645996, 0.03035955238342285, 0.030070783615112305, 0.03056768035888672, 0.0302108154296875, 0.030246368408203127, 0.030026111602783203, 0.029998495101928712, 0.02989132881164551, 0.03016703987121582, 0.02983065605163574, 0.030245216369628906, 0.02989427185058594, 0.030382463455200195, 0.029763328552246095, 0.02969363212585449, 0.029874431610107423, 0.030425567626953125, 0.030134271621704102, 0.030324735641479493, 0.03003968048095703, 0.03047420883178711, 0.029796768188476562, 0.030031871795654298, 0.029997055053710937, 0.030357376098632812, 0.03014463996887207, 0.030344959259033202, 0.02989286422729492, 0.030341119766235353, 0.030049791336059572, 0.029804288864135744, 0.02982374382019043, 0.030512224197387694, 0.029832096099853517, 0.03036774444580078, 0.02972230339050293, 0.030588672637939452, 0.029860416412353517, 0.02995814323425293, 0.029959423065185547, 0.030409215927124023, 0.029956287384033203, 0.03046723175048828, 0.029827167510986328, 0.03077952003479004, 0.029937408447265626, 0.02982803153991699, 0.02979020881652832, 0.030369855880737304, 0.029823999404907226, 0.030483488082885743, 0.029953088760375977, 0.030644287109375, 0.030020383834838866, 0.032696544647216795, 0.03162665557861328, 0.03081881523132324, 0.03057891273498535, 0.030265344619750976, 0.03010767936706543, 0.03001955223083496, 0.029786016464233397, 0.029845504760742186, 0.030072927474975586, 0.029900800704956054, 0.030115840911865234, 0.030081024169921877, 0.03046976089477539, 0.0300088005065918, 0.03008358383178711, 0.030116159439086913, 0.030336896896362306, 0.030236255645751952, 0.030331520080566405, 0.030134111404418944, 0.030269632339477538, 0.030158784866333006, 0.03007695960998535, 0.029869216918945313, 0.030284639358520507, 0.02971548843383789, 0.030138816833496094, 0.029669919967651368, 0.030287872314453124, 0.02986716842651367, 0.029932384490966798, 0.030013439178466796, 0.030480384826660156, 0.030066688537597655, 0.030502656936645507, 0.030109952926635743, 0.0304005126953125, 0.02999091148376465, 0.029857791900634766, 0.02979840087890625, 0.030312448501586913, 0.030091264724731445, 0.030286016464233397, 0.029855007171630858, 0.03023516845703125, 0.030009248733520507, 0.030048191070556642, 0.029980831146240235, 0.030398399353027343, 0.02990496063232422, 0.030496608734130858, 0.03005446434020996, 0.03042108726501465, 0.02993152046203613, 0.029848575592041016, 0.02975846481323242, 0.03026355171203613, 0.02985513687133789, 0.03044588851928711, 0.02979638481140137, 0.03037798309326172, 0.029843456268310548, 0.032895328521728516, 0.031838144302368164, 0.03067513656616211, 0.03055615997314453, 0.030132383346557618, 0.03030409622192383, 0.030082719802856445, 0.030085472106933592, 0.029889759063720704, 0.030155263900756835, 0.029787519454956054, 0.02997340774536133, 0.02981670379638672, 0.030029951095581056, 0.029691104888916017, 0.029936416625976563, 0.029855743408203125, 0.03043868827819824, 0.029815647125244142, 0.030279487609863282, 0.029911104202270507, 0.03018547248840332, 0.029726879119873047, 0.029942848205566405, 0.029872159957885742, 0.030212160110473632, 0.029901504516601562, 0.030401567459106445, 0.029756383895874025, 0.030109695434570313, 0.029703359603881836, 0.030347871780395507, 0.02976585578918457, 0.030267391204833984, 0.029924448013305665, 0.030201919555664064, 0.02992598342895508, 0.03044166374206543, 0.030101568222045898, 0.029962560653686524, 0.030006975173950196, 0.03061305618286133, 0.029956159591674806, 0.03051884841918945, 0.03013248062133789, 0.030558015823364256, 0.029913536071777345, 0.029931999206542968, 0.030099008560180666, 0.03132649612426758, 0.029954048156738283, 0.03060041618347168, 0.029872287750244142, 0.0304400634765625, 0.029914976119995117, 0.02986614418029785, 0.029906463623046876, 0.030484960556030272, 0.02999635124206543, 0.030427839279174803, 0.029904895782470704, 0.03043328094482422, 0.029962240219116212, 0.033086944580078125, 0.03184079933166504, 0.030854591369628905, 0.030486848831176756, 0.0301363525390625, 0.03019785690307617, 0.02992086410522461, 0.02987676811218262, 0.029926687240600585, 0.030178016662597656, 0.030066368103027343, 0.030539104461669922, 0.03018422317504883, 0.03042099189758301, 0.029991104125976564, 0.030053407669067382, 0.029948352813720703, 0.03043996810913086, 0.03002572822570801, 0.03027257537841797, 0.0298374080657959, 0.03013894462585449, 0.0297260799407959, 0.029721664428710937, 0.030082176208496094, 0.030604000091552733, 0.029771711349487303, 0.030232704162597657, 0.03004204750061035, 0.03041689682006836, 0.029868032455444334, 0.030040063858032227, 0.030038015365600586, 0.03061299133300781, 0.029954336166381837, 0.030492895126342772, 0.02998681640625, 0.030481504440307616, 0.030012319564819336, 0.02989846420288086, 0.02992937660217285, 0.030430751800537108, 0.030145376205444337, 0.030353151321411132, 0.02999475288391113, 0.030390272140502928, 0.029701791763305663, 0.02968390464782715, 0.02982969665527344, 0.03043548774719238, 0.03002566337585449, 0.030373888015747072, 0.029925119400024413, 0.030429439544677736, 0.030434335708618164, 0.029883359909057616, 0.02978201675415039, 0.030228479385375977, 0.029779104232788085, 0.03021295928955078, 0.02975948715209961, 0.030315776824951172, 0.02987468719482422, 0.03340591812133789, 0.03161091232299805, 0.03061347198486328, 0.030519296646118164, 0.03015884780883789, 0.030283775329589844, 0.030205503463745117, 0.030133855819702147, 0.02999177551269531, 0.03057254409790039, 0.033594879150390625, 0.030212032318115235, 0.029975072860717773, 0.03034217643737793, 0.030045280456542967, 0.029796255111694335, 0.029755392074584962, 0.030098655700683593, 0.03001024055480957, 0.030257055282592774, 0.029898080825805664, 0.030127872467041017, 0.030598047256469727, 0.03017523193359375, 0.029985151290893554, 0.030786624908447267, 0.029781984329223632, 0.030179807662963867, 0.02992959976196289, 0.03031449508666992, 0.02992515182495117, 0.029804319381713868, 0.030073280334472655, 0.030496671676635743, 0.029884511947631837, 0.030426271438598634, 0.030065696716308595, 0.030357088088989258, 0.0299849910736084, 0.02999407958984375, 0.03041279983520508, 0.030413728713989258, 0.02983526420593262, 0.030418943405151368, 0.029900800704956054, 0.030434879302978515, 0.030236576080322267, 0.03000912094116211, 0.029907712936401366, 0.03035251235961914, 0.029817728042602538, 0.030283775329589844, 0.0301527042388916, 0.030576608657836915, 0.029999135971069336, 0.02991923141479492, 0.029989919662475585, 0.03051430320739746, 0.0299385929107666, 0.030336320877075194, 0.029789440155029295, 0.03025660705566406, 0.029790784835815428, 0.032632766723632814, 0.0314931526184082, 0.030563327789306642, 0.030510976791381837, 0.030283008575439453, 0.030296607971191405, 0.030034271240234375, 0.029814783096313476, 0.02987788772583008, 0.0301759033203125, 0.029779680252075197, 0.03012540817260742, 0.029755807876586913, 0.030056703567504884, 0.02974220848083496, 0.02977187156677246, 0.02983225631713867, 0.03013350486755371, 0.029839744567871095, 0.030267551422119142, 0.030013376235961915, 0.030346656799316408, 0.0299136962890625, 0.02978201675415039, 0.029775871276855468, 0.03015212821960449, 0.02982969665527344, 0.030054399490356445, 0.029690143585205078, 0.030168064117431642, 0.029673408508300782, 0.02970419120788574, 0.029772415161132812, 0.030437536239624023, 0.029871967315673827, 0.03031046485900879, 0.030042207717895508, 0.030273504257202148, 0.02974006462097168, 0.02972159957885742, 0.029727872848510743, 0.03037887954711914, 0.02971238327026367, 0.030359424591064454, 0.029876352310180664, 0.030398080825805664, 0.02971072006225586, 0.029853696823120116, 0.02981888008117676, 0.0304005126953125, 0.02993078422546387, 0.030573280334472656, 0.03023423957824707, 0.03071526336669922, 0.030219263076782226, 0.029874368667602538, 0.02991904067993164, 0.030353599548339844, 0.02992006492614746, 0.030544160842895508, 0.029965024948120117, 0.03039641571044922, 0.02985539245605469, 0.0332630729675293, 0.03204182434082031, 0.030851072311401367, 0.030656511306762696, 0.030311744689941408, 0.030316959381103514, 0.03009974479675293, 0.030104768753051757, 0.029991039276123045, 0.030247840881347656, 0.02999273681640625, 0.03016908836364746, 0.0297256965637207, 0.03008755111694336, 0.030060863494873045, 0.030049983978271484, 0.030206464767456056, 0.03019699287414551, 0.029905792236328124, 0.030537120819091795, 0.030091615676879884, 0.030246463775634766, 0.030210912704467772, 0.029910879135131838, 0.029796064376831053, 0.030334720611572264, 0.029954559326171876, 0.030337055206298827, 0.029721727371215822, 0.030151552200317382, 0.029661279678344726, 0.029681568145751954, 0.029839359283447265, 0.030463712692260742, 0.03025334358215332, 0.03058211135864258, 0.030034591674804687, 0.03038115119934082, 0.029813663482666015, 0.029949951171875, 0.02987353515625, 0.03030847930908203, 0.029790176391601562, 0.030143007278442383, 0.029845247268676756, 0.0307325439453125, 0.032245216369628904, 0.029804704666137695, 0.029948287963867188, 0.030381887435913087, 0.02988412857055664, 0.030527360916137697, 0.03011222457885742, 0.030449792861938475, 0.030127967834472656, 0.03011369514465332, 0.030200063705444338, 0.03060531234741211, 0.029878047943115233, 0.03037129592895508, 0.029856063842773437, 0.030425535202026368, 0.029760799407958984]",tokens/s,33.10800979459002,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,785.956864,748.552192,0.0,346.03008,335.0016,s,1,7.78228515625,7.78228515625,0.0,7.78228515625,7.78228515625,7.78228515625,7.78228515625,[7.78228515625],,kWh,2.4116776832973604e-06,2.588428176069435e-07,8.961118279637859e-07,3.5666323288680896e-06,,MB,1235.755008,773.718016,0.0,358.612992,302.626816,s,18,0.18764316749572751,0.010424620416429308,0.00033859918789404546,0.010366896152496339,0.01052123498916626,0.010716388607025144,0.011520660381317136,"[0.010472800254821777, 0.010232352256774902, 0.010387647628784179, 0.010372960090637207, 0.010349727630615235, 0.010417216300964356, 0.010513631820678711, 0.011721728324890136, 0.010449503898620606, 0.010322431564331054, 0.01036083221435547, 0.01013424015045166, 0.010342111587524414, 0.010333536148071289, 0.010538975715637207, 0.010285152435302734, 0.010404159545898437, 0.010004159927368163]",tokens/s,24557.24906746162,kWh,3.06497986243879e-07,3.380117864642802e-08,1.8097761910504759e-07,5.212767839953546e-07,tokens/kWh,491101863.4627729,MB,1268.895744,788.39808,0.0,373.293056,302.629376,s,18,10.006541809082034,0.5559189893934461,0.008176475300774503,0.5575623779296874,0.5641414916992187,0.5670306732177735,0.5681636053466798,"[0.5463646850585937, 0.543773681640625, 0.5628875122070313, 0.5607579345703125, 0.5630103759765624, 0.5684468383789063, 0.56678076171875, 0.5604676513671875, 0.555332763671875, 0.555412841796875, 0.5513992919921875, 0.5573483276367187, 0.55756884765625, 0.557555908203125, 0.5590987548828125, 0.558986328125, 0.5447942504882812, 0.5365550537109375]",tokens/s,113.32586438311498,kWh,1.5989046464452625e-05,1.7633127201711717e-06,6.464027696893704e-06,2.4216386881517504e-05,tokens/kWh,2601544.1654544687,,s,1134,9.995785954475398,0.008814626062147621,0.000262234709760821,0.008815968036651613,0.009007359600067138,0.009077089500427247,0.009492299928665167,"[0.008887616157531739, 0.008868127822875977, 0.008694399833679199, 0.008705920219421386, 0.008753439903259278, 0.009267040252685547, 0.008750656127929687, 0.00865120029449463, 0.008630304336547852, 0.008726495742797852, 0.0085764799118042, 0.008731424331665039, 0.008603263854980468, 0.00859171199798584, 0.00868329620361328, 0.008703200340270997, 0.008594079971313477, 0.008565279960632324, 0.008649632453918457, 0.008634623527526855, 0.008581119537353516, 0.009095423698425293, 0.009050399780273437, 0.008760383605957031, 0.008676383972167969, 0.008619199752807618, 0.008698495864868164, 0.0086179838180542, 0.00858515167236328, 0.009003071784973145, 0.008681535720825195, 0.008598784446716308, 0.008827584266662598, 0.008861696243286133, 0.008615936279296875, 0.008600640296936034, 0.008525983810424805, 0.008513376235961915, 0.008561311721801758, 0.008523776054382324, 0.008530207633972168, 0.008507391929626466, 0.0086048002243042, 0.00862831974029541, 0.00860649585723877, 0.008606847763061523, 0.008629119873046875, 0.008611455917358398, 0.008572480201721192, 0.00871020793914795, 0.008472736358642579, 0.008464991569519043, 0.008547840118408203, 0.008444735527038574, 0.008410816192626952, 0.008431615829467774, 0.008737119674682617, 0.008468128204345704, 0.00858521556854248, 0.008754176139831543, 0.00875823974609375, 0.008777759552001952, 0.008679455757141113, 0.008421376228332519, 0.008622079849243165, 0.00859334373474121, 0.008788031578063965, 0.008720383644104004, 0.008664095878601074, 0.008684831619262695, 0.008780575752258301, 0.00872755241394043, 0.008633600234985352, 0.00872105598449707, 0.00859500789642334, 0.00870854377746582, 0.008632320404052735, 0.008547967910766601, 0.008683903694152832, 0.008647968292236328, 0.008553183555603028, 0.008540160179138183, 0.00855196762084961, 0.008973952293395995, 0.008541024208068848, 0.008511487960815429, 0.008643648147583009, 0.0086561279296875, 0.008514847755432129, 0.008497856140136718, 0.00851529598236084, 0.008582592010498047, 0.008485535621643066, 0.008519647598266601, 0.00847049617767334, 0.008574015617370605, 0.0084650239944458, 0.0084749116897583, 0.008578368186950684, 0.008606240272521973, 0.008798111915588379, 0.00877302360534668, 0.008751008033752441, 0.008811455726623535, 0.008693920135498048, 0.008640095710754395, 0.00851583957672119, 0.008552448272705078, 0.008570879936218261, 0.008586655616760254, 0.008553183555603028, 0.008571904182434082, 0.00862070369720459, 0.008761759757995605, 0.008721280097961425, 0.00874182415008545, 0.008686944007873535, 0.008607999801635742, 0.008696512222290039, 0.008554207801818847, 0.008568832397460938, 0.008562687873840333, 0.008566783905029298, 0.008536064147949218, 0.0086527681350708, 0.00867670440673828, 0.008285920143127441, 0.008477215766906738, 0.008511487960815429, 0.008542271614074707, 0.008614975929260254, 0.008849696159362793, 0.010844767570495606, 0.012369919776916503, 0.011897120475769043, 0.009007935523986816, 0.00876425552368164, 0.008822367668151856, 0.008830975532531739, 0.00894819164276123, 0.008917152404785156, 0.008822527885437012, 0.009078816413879394, 0.009140128135681153, 0.008671392440795898, 0.009029151916503907, 0.008763232231140138, 0.008724543571472168, 0.008734975814819336, 0.008659263610839844, 0.008615936279296875, 0.008822784423828126, 0.008977824211120606, 0.009146400451660156, 0.008651071548461915, 0.008696063995361328, 0.008715871810913087, 0.008613439559936523, 0.00877222442626953, 0.008714112281799316, 0.008699935913085937, 0.008646816253662109, 0.008738240242004395, 0.008665823936462402, 0.008654720306396484, 0.008756735801696777, 0.008702591896057129, 0.008677472114562988, 0.009267104148864747, 0.008747008323669434, 0.008731776237487793, 0.00868342399597168, 0.008671392440795898, 0.00885372829437256, 0.008704544067382813, 0.008830080032348633, 0.008745535850524903, 0.008732095718383789, 0.008874943733215332, 0.00885910415649414, 0.008815135955810547, 0.008971776008605957, 0.009054719924926758, 0.009016384124755859, 0.008915904045104981, 0.008798463821411133, 0.008824576377868652, 0.008828672409057617, 0.008786175727844238, 0.00882588768005371, 0.00893712043762207, 0.008969759941101075, 0.0089933443069458, 0.008978431701660156, 0.008942848205566406, 0.008825471878051758, 0.008812735557556153, 0.009056192398071289, 0.008906911849975586, 0.008869952201843261, 0.008921952247619628, 0.009098176002502442, 0.00901734447479248, 0.008941727638244629, 0.008959839820861817, 0.009076831817626953, 0.008970144271850587, 0.009011327743530273, 0.008910719871520995, 0.008970239639282226, 0.008904704093933105, 0.008862784385681152, 0.008879039764404297, 0.008711647987365723, 0.008882719993591309, 0.008887743949890137, 0.008825440406799317, 0.008814559936523438, 0.008871935844421386, 0.008858624458312989, 0.00880947208404541, 0.00876467227935791, 0.008861568450927735, 0.008996831893920898, 0.008831904411315919, 0.00886518383026123, 0.008767487525939942, 0.008798527717590331, 0.008960415840148925, 0.00904793643951416, 0.008939519882202148, 0.008839008331298828, 0.008812255859375, 0.008880576133728028, 0.009033984184265136, 0.00910035228729248, 0.00883782386779785, 0.008806624412536622, 0.00878115177154541, 0.008798399925231933, 0.008826208114624024, 0.008819616317749024, 0.008790016174316406, 0.00875119972229004, 0.008730527877807617, 0.008816320419311524, 0.008734368324279785, 0.009010111808776856, 0.0090928316116333, 0.008884096145629882, 0.008851584434509277, 0.008820320129394531, 0.009141023635864259, 0.008884287834167481, 0.008949631690979004, 0.009265055656433105, 0.009019647598266602, 0.008875136375427245, 0.008890624046325684, 0.008964832305908204, 0.009039456367492676, 0.008852928161621094, 0.008985664367675781, 0.008939359664916992, 0.008924960136413574, 0.008959903717041015, 0.008861023902893067, 0.008966079711914062, 0.009085951805114746, 0.008875904083251953, 0.008886367797851562, 0.008926848411560058, 0.009011199951171875, 0.009045503616333007, 0.008958847999572753, 0.009060352325439454, 0.009138175964355469, 0.008914943695068359, 0.009045856475830078, 0.008859135627746583, 0.008823519706726074, 0.008849696159362793, 0.008822400093078614, 0.008855584144592286, 0.008881183624267578, 0.00886678409576416, 0.008986271858215333, 0.008894816398620605, 0.009065855979919433, 0.009009792327880859, 0.008894207954406739, 0.008945440292358399, 0.00898300838470459, 0.008892704010009765, 0.008906559944152832, 0.008908096313476563, 0.008907360076904297, 0.008863295555114746, 0.008956352233886718, 0.008873984336853028, 0.008902655601501466, 0.00888003158569336, 0.008908896446228028, 0.008837120056152344, 0.008808223724365235, 0.008828672409057617, 0.008909343719482422, 0.008818623542785644, 0.009027584075927735, 0.008876128196716309, 0.00884931182861328, 0.008835200309753419, 0.008834943771362305, 0.008755200386047364, 0.008797344207763671, 0.008763392448425293, 0.008837408065795898, 0.00875449562072754, 0.008845343589782714, 0.008853887557983399, 0.008884223937988281, 0.008875136375427245, 0.008856448173522949, 0.008847359657287598, 0.008847040176391601, 0.009026080131530763, 0.009113344192504882, 0.009453408241271973, 0.01106163215637207, 0.00887945556640625, 0.008953824043273925, 0.008927743911743164, 0.008801504135131836, 0.008970815658569336, 0.008792287826538086, 0.008880127906799316, 0.00889241600036621, 0.008904704093933105, 0.008822303771972656, 0.008814240455627441, 0.008812512397766113, 0.008782688140869141, 0.008812543869018554, 0.008888319969177246, 0.009099424362182617, 0.008924351692199708, 0.00895564842224121, 0.008815423965454101, 0.00875324821472168, 0.008759296417236329, 0.008833312034606933, 0.009019328117370605, 0.008959775924682617, 0.008841024398803712, 0.008935135841369629, 0.008972640037536622, 0.008845439910888672, 0.008853504180908203, 0.009134079933166504, 0.009068544387817384, 0.008890624046325684, 0.008860639572143555, 0.009511455535888672, 0.010399807929992675, 0.009720000267028808, 0.009119615554809571, 0.009520288467407226, 0.009003520011901855, 0.0090098876953125, 0.009039615631103516, 0.008982368469238281, 0.009053536415100098, 0.00907756805419922, 0.00923151969909668, 0.008952256202697753, 0.008874655723571778, 0.008879008293151856, 0.008965248107910157, 0.00881049633026123, 0.008882176399230958, 0.008868927955627442, 0.009080960273742675, 0.008985407829284668, 0.008879360198974609, 0.008860416412353515, 0.008974240303039551, 0.009008352279663085, 0.008934464454650879, 0.008855104446411133, 0.008908479690551759, 0.008821151733398437, 0.008840991973876952, 0.008935839653015136, 0.008869279861450195, 0.008892064094543456, 0.008837632179260254, 0.008858016014099121, 0.00906668758392334, 0.010798111915588379, 0.01099888038635254, 0.008987520217895508, 0.008886400222778321, 0.009063103675842284, 0.009005087852478028, 0.008931584358215332, 0.009383008003234864, 0.008927871704101562, 0.008914943695068359, 0.008929183959960937, 0.008966239929199218, 0.008974559783935547, 0.008902432441711426, 0.008849535942077637, 0.008895359992980956, 0.00885043239593506, 0.008800479888916015, 0.008777055740356445, 0.00890272045135498, 0.008966272354125976, 0.008891807556152344, 0.008847423553466797, 0.008849920272827149, 0.008850848197937012, 0.008979583740234375, 0.008975456237792969, 0.009078847885131835, 0.008876640319824219, 0.008830047607421876, 0.008862624168395996, 0.008836447715759277, 0.008850079536437988, 0.008843263626098634, 0.008888319969177246, 0.00885865592956543, 0.008846431732177735, 0.008820608139038085, 0.00877945613861084, 0.009409152030944824, 0.009270976066589356, 0.009010272026062012, 0.008827808380126954, 0.008810976028442382, 0.008898528099060058, 0.008767616271972657, 0.008783103942871094, 0.008837759971618652, 0.008965727806091308, 0.009222463607788086, 0.008927488327026367, 0.008871295928955077, 0.00890124797821045, 0.009062496185302735, 0.008933279991149902, 0.009135775566101074, 0.009042271614074707, 0.008908255577087403, 0.008884767532348632, 0.008960160255432129, 0.00880835247039795, 0.00883017635345459, 0.008790752410888673, 0.008797439575195312, 0.008844032287597656, 0.008853376388549805, 0.008931455612182617, 0.00875276756286621, 0.008790399551391602, 0.008785920143127441, 0.009157695770263673, 0.009108192443847656, 0.008841440200805665, 0.00925596809387207, 0.008891360282897949, 0.008848896026611328, 0.008784255981445312, 0.009117823600769043, 0.009033727645874023, 0.00893126392364502, 0.008893535614013673, 0.008819552421569824, 0.008771424293518066, 0.008871871948242188, 0.008865376472473145, 0.008831168174743652, 0.008813119888305665, 0.008734720230102539, 0.00889408016204834, 0.008917375564575196, 0.008781824111938476, 0.008884223937988281, 0.008848608016967774, 0.008856351852416993, 0.00879148769378662, 0.008672063827514648, 0.008701696395874023, 0.008734720230102539, 0.008769439697265626, 0.008757184028625489, 0.008720576286315918, 0.008773119926452636, 0.008981247901916505, 0.009094880104064942, 0.009011199951171875, 0.008986623764038085, 0.00854911994934082, 0.008759519577026368, 0.008777631759643554, 0.009008223533630372, 0.008985183715820312, 0.009137920379638672, 0.008878080368041993, 0.008761280059814453, 0.008704511642456055, 0.00872873592376709, 0.00884716796875, 0.008845120429992677, 0.008824159622192383, 0.008681344032287599, 0.008662015914916991, 0.008837120056152344, 0.008753151893615722, 0.008712448120117188, 0.00865238380432129, 0.008612000465393066, 0.008726016044616699, 0.008733087539672852, 0.008736576080322266, 0.008880255699157714, 0.008831199645996093, 0.008945823669433594, 0.008787391662597656, 0.008845664024353028, 0.008983839988708496, 0.00883356761932373, 0.008906944274902344, 0.00917404842376709, 0.008893407821655274, 0.008844287872314453, 0.008920063972473144, 0.008824831962585449, 0.008851167678833007, 0.00881603240966797, 0.008841216087341308, 0.008696703910827637, 0.008775679588317872, 0.008812543869018554, 0.009564160346984863, 0.008742176055908202, 0.008737504005432129, 0.008690879821777344, 0.008835007667541504, 0.008674495697021485, 0.008632255554199218, 0.00875875186920166, 0.008637920379638672, 0.008676159858703613, 0.008607647895812988, 0.008801759719848633, 0.008868512153625488, 0.008680864334106446, 0.008683679580688476, 0.008747424125671387, 0.00871014404296875, 0.008779775619506837, 0.008767359733581543, 0.00896457576751709, 0.008811200141906738, 0.008412320137023926, 0.008626784324645996, 0.008564512252807617, 0.00853660774230957, 0.008734272003173829, 0.008857248306274414, 0.008868127822875977, 0.008753151893615722, 0.008883808135986328, 0.008853376388549805, 0.008675647735595702, 0.00870479965209961, 0.008727423667907715, 0.00898742389678955, 0.008912960052490234, 0.008815903663635254, 0.008739487648010254, 0.008747008323669434, 0.008652799606323243, 0.009008416175842285, 0.008934111595153808, 0.008775679588317872, 0.00859552001953125, 0.008980416297912597, 0.008560640335083008, 0.008590463638305665, 0.008636927604675293, 0.008764863967895508, 0.008656864166259765, 0.008651455879211426, 0.008781824111938476, 0.009072928428649903, 0.008862015724182128, 0.008828351974487305, 0.008748991966247559, 0.008933695793151855, 0.008923135757446288, 0.009109503746032714, 0.008859359741210937, 0.008757184028625489, 0.008786272048950196, 0.008779775619506837, 0.008945055961608887, 0.008793888092041015, 0.00875391960144043, 0.008747072219848632, 0.008831104278564454, 0.008840576171875, 0.008845408439636231, 0.008923551559448243, 0.008876031875610351, 0.008835071563720704, 0.009033535957336426, 0.008898143768310546, 0.008772383689880371, 0.008824000358581543, 0.008835712432861327, 0.008987872123718262, 0.008972512245178223, 0.008796031951904298, 0.009025535583496093, 0.008810848236083984, 0.00879036808013916, 0.0085413761138916, 0.00873964786529541, 0.008720735549926757, 0.008678432464599609, 0.00885209560394287, 0.008843263626098634, 0.008914943695068359, 0.008874176025390625, 0.009041440010070801, 0.00881056022644043, 0.008790240287780762, 0.00897663974761963, 0.008785663604736328, 0.00881049633026123, 0.008779552459716797, 0.008712608337402344, 0.008967967987060546, 0.00877292823791504, 0.00869983959197998, 0.008579872131347656, 0.008607744216918945, 0.008755200386047364, 0.008764800071716308, 0.008944255828857422, 0.008832768440246582, 0.008724224090576172, 0.008765952110290527, 0.008824831962585449, 0.008857184410095215, 0.008751040458679199, 0.008794591903686524, 0.008736096382141113, 0.00868825626373291, 0.008658975601196288, 0.008687616348266602, 0.008707967758178711, 0.008777695655822754, 0.008763551712036133, 0.008660448074340821, 0.008722623825073243, 0.008863136291503907, 0.008756159782409668, 0.008676575660705567, 0.00862070369720459, 0.008671168327331544, 0.008701279640197753, 0.008751680374145508, 0.008554880142211913, 0.008553983688354493, 0.008615743637084962, 0.008551008224487304, 0.00853600025177002, 0.008504575729370117, 0.008499456405639648, 0.008757792472839355, 0.00875932788848877, 0.008809887886047363, 0.008866687774658204, 0.008763263702392578, 0.009061599731445312, 0.008747072219848632, 0.008667200088500977, 0.008618464469909668, 0.00873692798614502, 0.008955904006958008, 0.008683520317077637, 0.008689215660095215, 0.008705856323242188, 0.009173407554626464, 0.008794336318969727, 0.008767487525939942, 0.00869705581665039, 0.00881439971923828, 0.008817184448242187, 0.008849856376647949, 0.008715999603271485, 0.008763903617858887, 0.008883999824523927, 0.008964096069335938, 0.009066495895385742, 0.009211872100830078, 0.00892899227142334, 0.008974656105041504, 0.008826399803161621, 0.008802016258239747, 0.008812543869018554, 0.00885209560394287, 0.00889254379272461, 0.008826815605163574, 0.008751168251037597, 0.00886291217803955, 0.008935744285583497, 0.008886560440063477, 0.008842495918273926, 0.00880684757232666, 0.0087926082611084, 0.0086976318359375, 0.008856927871704102, 0.008856608390808105, 0.008869728088378907, 0.009023455619812012, 0.009013279914855957, 0.008968031883239747, 0.008843392372131347, 0.008779808044433594, 0.008798080444335938, 0.00878604793548584, 0.008790240287780762, 0.008778719902038575, 0.008850144386291505, 0.008748895645141601, 0.008807904243469238, 0.008760095596313477, 0.008852959632873535, 0.008775424003601075, 0.008786720275878906, 0.008844608306884766, 0.008706015586853027, 0.008753888130187988, 0.008723711967468262, 0.008880895614624023, 0.008880224227905274, 0.008732576370239258, 0.008761024475097657, 0.008884736061096191, 0.008834336280822754, 0.00863862419128418, 0.008839391708374023, 0.009058079719543456, 0.008804896354675292, 0.00880620765686035, 0.008809823989868164, 0.008888992309570312, 0.008818016052246094, 0.008911520004272462, 0.008921088218688965, 0.00884233570098877, 0.008846240043640137, 0.008836928367614747, 0.008863936424255371, 0.008845600128173828, 0.008834783554077148, 0.008820735931396484, 0.008849408149719238, 0.008785920143127441, 0.00880025577545166, 0.009002240180969238, 0.008907648086547851, 0.00900601577758789, 0.009148447990417481, 0.008998016357421874, 0.008832799911499023, 0.00880025577545166, 0.008861696243286133, 0.008836319923400879, 0.008991071701049805, 0.008901056289672851, 0.00902348804473877, 0.008873151779174806, 0.00884819221496582, 0.008764479637145996, 0.008889280319213867, 0.008814016342163085, 0.008823360443115234, 0.008812543869018554, 0.008732992172241211, 0.008735487937927246, 0.008823007583618163, 0.008836064338684082, 0.008789312362670899, 0.008694208145141602, 0.008742367744445801, 0.008645248413085938, 0.008663999557495116, 0.008716352462768555, 0.008763423919677734, 0.008749183654785156, 0.008731391906738281, 0.008732159614562989, 0.00874342441558838, 0.009235520362854004, 0.008747776031494141, 0.008734911918640136, 0.00871776008605957, 0.00887286376953125, 0.008865440368652344, 0.008939519882202148, 0.008837120056152344, 0.008890368461608887, 0.008663040161132812, 0.008958720207214355, 0.008673088073730469, 0.008732864379882813, 0.008757311820983887, 0.008773823738098145, 0.008771424293518066, 0.0087673921585083, 0.008840991973876952, 0.008894847869873048, 0.008754719734191894, 0.008737088203430176, 0.008720416069030762, 0.008681632041931153, 0.008664159774780274, 0.00882089614868164, 0.008923711776733399, 0.009086976051330567, 0.008939807891845702, 0.008976096153259278, 0.00892092800140381, 0.008986207962036133, 0.008969984054565429, 0.008944448471069335, 0.008857600212097168, 0.008820384025573731, 0.008909152030944825, 0.009033599853515624, 0.008988736152648925, 0.009014816284179687, 0.009033760070800782, 0.009026047706604003, 0.008984288215637208, 0.008909088134765625, 0.00901859188079834, 0.00893827247619629, 0.009376031875610352, 0.008890080451965331, 0.008861696243286133, 0.009027327537536621, 0.008860992431640626, 0.008714528083801269, 0.008937536239624023, 0.008772192001342773, 0.008742015838623047, 0.008706944465637207, 0.008771488189697266, 0.00884335994720459, 0.008820063591003418, 0.008675680160522461, 0.00894598388671875, 0.00878774356842041, 0.00882915210723877, 0.008744735717773438, 0.008673503875732421, 0.008700991630554199, 0.008739775657653808, 0.008668800354003907, 0.008714112281799316, 0.008618495941162109, 0.008673279762268067, 0.008666848182678223, 0.008669471740722657, 0.008656895637512207, 0.008777728080749512, 0.008783871650695801, 0.008855551719665527, 0.008839167594909669, 0.008855551719665527, 0.008833344459533692, 0.008818400382995606, 0.008854623794555663, 0.008841183662414551, 0.009630592346191406, 0.008927264213562013, 0.00888371181488037, 0.008843615531921386, 0.008917152404785156, 0.008953344345092774, 0.008870400428771973, 0.008707327842712402, 0.008751872062683106, 0.008835359573364258, 0.008728096008300781, 0.008756959915161133, 0.008718463897705078, 0.008716640472412109, 0.008781311988830566, 0.008900799751281739, 0.008820768356323241, 0.00881817626953125, 0.00870851230621338, 0.008679424285888672, 0.008838848114013672, 0.00896070384979248, 0.008832256317138671, 0.008818719863891602, 0.009026016235351562, 0.008941344261169434, 0.008962528228759765, 0.008869888305664063, 0.00885251235961914, 0.009118687629699708, 0.008900416374206544, 0.008919232368469239, 0.008847392082214356, 0.00894553565979004, 0.008961664199829102, 0.008894559860229492, 0.008909088134765625, 0.008880543708801269, 0.008869888305664063, 0.008918720245361328, 0.009033727645874023, 0.008802304267883301, 0.008836383819580079, 0.00880303955078125, 0.008824831962585449, 0.00881446361541748, 0.00880454444885254, 0.008826815605163574, 0.008977984428405762, 0.00885372829437256, 0.0088189115524292, 0.008965248107910157, 0.008801152229309082, 0.00867807960510254, 0.008896512031555176, 0.008851743698120118, 0.008865504264831544, 0.008873087882995605, 0.00881161594390869, 0.008969311714172363, 0.00884224033355713, 0.008859583854675293, 0.008867168426513672, 0.008796575546264649, 0.00889241600036621, 0.009000864028930664, 0.008974176406860351, 0.009174400329589844, 0.009200544357299804, 0.009039104461669922, 0.008987648010253906, 0.008951519966125488, 0.0088853759765625, 0.008891263961791991, 0.008902655601501466, 0.008943615913391113, 0.00912720012664795, 0.008807135581970214, 0.008826208114624024, 0.008991168022155762, 0.00883664035797119, 0.008883135795593262, 0.008822527885437012, 0.008920512199401855, 0.009026111602783203, 0.00897433567047119, 0.008857855796813965, 0.008890111923217774, 0.008972288131713867, 0.008848544120788573, 0.00879702377319336, 0.008816639900207519, 0.008928319931030274, 0.008936544418334962, 0.008781696319580078, 0.00883017635345459, 0.008835519790649414, 0.008743200302124023, 0.008798239707946777, 0.008706048011779785, 0.008695008277893067, 0.008714655876159667, 0.008790111541748047, 0.008761887550354003, 0.008733440399169921, 0.00870691204071045, 0.008771200180053711, 0.008857248306274414, 0.008754048347473144, 0.008831071853637695, 0.008848287582397462, 0.00878326416015625, 0.008797280311584473, 0.008691424369812012, 0.008856032371520996, 0.008706368446350098, 0.008727519989013673, 0.00878764820098877, 0.008708415985107422, 0.008741215705871582, 0.008756319999694824, 0.008838784217834473, 0.008960415840148925, 0.008837568283081054, 0.008917087554931641, 0.008884256362915038, 0.008943584442138672, 0.00884883213043213, 0.00886614418029785, 0.0088242244720459, 0.008761311531066894, 0.008774815559387207, 0.008705727577209473, 0.008619808197021484, 0.008751328468322755, 0.008641599655151367, 0.008557503700256347, 0.008611583709716797, 0.008712767601013183, 0.008728256225585937, 0.008734720230102539, 0.008721823692321778, 0.00874351978302002, 0.0086725435256958, 0.00864463996887207, 0.008700192451477051, 0.008699711799621581, 0.008694368362426758, 0.008613599777221679, 0.008584511756896973, 0.008566944122314454, 0.008682463645935059, 0.008555551528930665, 0.00866915225982666, 0.008553312301635743, 0.008529791831970215, 0.008554368019104005, 0.008589568138122559, 0.008584704399108887, 0.008573439598083496, 0.008531744003295899, 0.008601823806762695, 0.008535103797912598, 0.008627167701721191, 0.008494175910949708, 0.008435903549194336, 0.008538911819458008, 0.00847043228149414, 0.008437760353088379, 0.00848025608062744, 0.008468992233276367, 0.008445952415466309, 0.00850534439086914, 0.008425472259521484, 0.008421664237976073, 0.00856777572631836, 0.008331616401672364, 0.008339872360229492, 0.008441856384277344, 0.008156959533691405, 0.008348544120788575, 0.008316543579101563, 0.008323200225830078, 0.008374176025390624, 0.008517824172973632, 0.00859596824645996, 0.008658656120300293, 0.008662816047668456, 0.008515263557434082, 0.008608223915100098, 0.00865891170501709, 0.008630656242370605, 0.008738271713256836, 0.008538496017456055, 0.008599007606506347, 0.008601951599121093, 0.008540448188781738, 0.008512351989746094, 0.008498047828674317, 0.008533503532409668, 0.008493280410766602, 0.008554592132568359, 0.008466976165771484, 0.008453791618347167, 0.008447423934936523, 0.008370783805847168, 0.0084071683883667, 0.008463871955871583, 0.008399264335632324, 0.008480704307556153, 0.008453632354736328, 0.008628735542297363, 0.008531519889831543, 0.008457856178283691, 0.008403712272644043, 0.008417344093322754, 0.008425663948059083, 0.008406720161437987, 0.008405119895935058, 0.008414463996887207, 0.008452863693237304, 0.008523103713989257, 0.008499903678894043, 0.00841708755493164, 0.008450207710266113, 0.00841113567352295, 0.00881443214416504, 0.008681632041931153, 0.00861580753326416, 0.008657024383544923, 0.0086364164352417, 0.008589632034301757, 0.008553440093994141, 0.0084999361038208, 0.008519680023193359, 0.008515744209289551, 0.008435744285583496, 0.008456000328063965, 0.008554495811462403, 0.00843785572052002, 0.008619456291198731, 0.008655263900756835]",tokens/s,113.44780742251443,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 890, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 822, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 382, in __init__ self.fc1 = nn.Linear(self.embed_dim, config.ffn_dim, bias=config.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 72.12 MiB is free. Process 391715 has 14.67 GiB memory in use. Of the allocated memory 14.55 GiB is allocated by PyTorch, and 2.19 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 705, in __init__ self.model = XGLMModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 517, in __init__ self.layers = nn.ModuleList([XGLMDecoderLayer(config) for _ in range(config.num_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 517, in self.layers = nn.ModuleList([XGLMDecoderLayer(config) for _ in range(config.num_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 351, in __init__ self.self_attn = XGLMAttention( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 219, in __init__ self.q_proj = nn.Linear(embed_dim, embed_dim, bias=bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 64.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 56.12 MiB is free. Process 421538 has 14.68 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 4.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 890, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 822, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 373, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 252, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 116, in __init__ self.q_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 120.12 MiB is free. Process 408206 has 14.62 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 2.29 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1011, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 777, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 418, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 342, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 706, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.96 GiB. GPU 0 has a total capacity of 14.74 GiB of which 662.12 MiB is free. Process 422309 has 14.09 GiB memory in use. Of the allocated memory 13.97 GiB is allocated by PyTorch, and 6.66 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,787.050496,14136.77056,0.0,13734.248448,13728.777216,s,1,7.47038916015625,7.47038916015625,0.0,7.47038916015625,7.47038916015625,7.47038916015625,7.47038916015625,[7.47038916015625],,kWh,6.913818537471646e-06,7.551447945368221e-07,3.822780835993633e-06,1.1491744168002101e-05,,MB,1118.04416,14149.353472,0.0,13736.3456,13487.53408,s,10,12.292834716796875,1.2292834716796874,0.003434884028246975,1.2290482788085937,1.2335140014648438,1.2337478454589845,1.2339349206542969,"[1.2214801025390625, 1.2274324951171875, 1.2271624755859376, 1.228467041015625, 1.22891357421875, 1.231551025390625, 1.2291829833984376, 1.2334620361328126, 1.2312012939453125, 1.233981689453125]",tokens/s,208.25139676709617,kWh,3.587702285707867e-05,3.9567405855050255e-06,2.3799630150797557e-05,6.363339359338125e-05,tokens/kWh,4023044.9068274666,MB,1143.087104,14149.353472,0.0,13736.3456,13661.262848,s,10,37.59324096679688,3.7593240966796877,0.0019999530943198155,3.7591628417968748,3.761039404296875,3.7623450195312502,3.76338951171875,"[3.76012939453125, 3.758396240234375, 3.763650634765625, 3.756097900390625, 3.756988525390625, 3.758545166015625, 3.760749267578125, 3.758891357421875, 3.759434326171875, 3.760358154296875]",tokens/s,16.758331652129407,kWh,0.00010986977552833652,1.211945716989281e-05,7.307514179340502e-05,0.00019506437449163437,tokens/kWh,322970.3023126955,,s,630,37.5897363967896,0.05966624824887231,0.0001974103021507679,0.0596657772064209,0.05988564682006836,0.05995559101104736,0.06034710460662842,"[0.06011884689331055, 0.05948211288452149, 0.059355136871337894, 0.05934080123901367, 0.05944934463500977, 0.05941203308105469, 0.05932620620727539, 0.05952377700805664, 0.059504161834716796, 0.0596929931640625, 0.05944784164428711, 0.059344894409179685, 0.059584415435791016, 0.05957344055175781, 0.05950147247314453, 0.059463680267333986, 0.05951081466674805, 0.059714656829833984, 0.05951168060302734, 0.059850753784179686, 0.059529216766357425, 0.059529216766357425, 0.05945702362060547, 0.05953792190551758, 0.05955583953857422, 0.06014771270751953, 0.05958390426635742, 0.059641441345214846, 0.05956630325317383, 0.059761440277099606, 0.05961523056030273, 0.05980364990234375, 0.059676673889160155, 0.059711456298828125, 0.05970537567138672, 0.059630943298339845, 0.059799774169921875, 0.059918785095214845, 0.05971353530883789, 0.05967174530029297, 0.05963654327392578, 0.05978486251831055, 0.05980604934692383, 0.05974156951904297, 0.059665023803710936, 0.0596434555053711, 0.0595747184753418, 0.05977907180786133, 0.059709121704101566, 0.05987929534912109, 0.059751937866210934, 0.059813919067382815, 0.05973904037475586, 0.05975244903564453, 0.05962956619262695, 0.05975584030151367, 0.060639934539794924, 0.05989091110229492, 0.05979747009277344, 0.05999289703369141, 0.05993452835083008, 0.05987529754638672, 0.05975471878051758, 0.0600241584777832, 0.05959852981567383, 0.059339710235595707, 0.0593551025390625, 0.05933660888671875, 0.05939353561401367, 0.05935782241821289, 0.05937910461425781, 0.059378398895263675, 0.059537281036376954, 0.05939104080200195, 0.05944121551513672, 0.0594334716796875, 0.059636096954345706, 0.05962956619262695, 0.059577983856201173, 0.05979171371459961, 0.05984873580932617, 0.05969488143920899, 0.05963158416748047, 0.059547008514404295, 0.05977791976928711, 0.059606014251708986, 0.059663360595703124, 0.059526687622070314, 0.059592609405517576, 0.05964857482910156, 0.05954927825927735, 0.05954396820068359, 0.0596049919128418, 0.059606494903564455, 0.05977328109741211, 0.05974035263061524, 0.05981184005737305, 0.05978291320800781, 0.059746559143066404, 0.05961910247802735, 0.05954111862182617, 0.05961568069458008, 0.059555038452148434, 0.05962438583374023, 0.05961040115356445, 0.05964195251464844, 0.059771518707275394, 0.059624542236328126, 0.059663265228271485, 0.05971334457397461, 0.05972393417358399, 0.05985283279418945, 0.059858943939208986, 0.059731201171875, 0.05983718490600586, 0.0598853759765625, 0.05980179214477539, 0.05974761581420898, 0.05986703872680664, 0.05976876831054687, 0.05972835159301758, 0.05976425552368164, 0.059730815887451175, 0.05971744155883789, 0.05988780975341797, 0.05985263824462891, 0.06001750564575195, 0.0595333137512207, 0.05935030364990234, 0.0593438720703125, 0.05938521575927734, 0.0595316162109375, 0.05952102279663086, 0.059594753265380856, 0.059676673889160155, 0.05960688018798828, 0.059506591796875, 0.059709697723388674, 0.059547454833984374, 0.05972592163085937, 0.059570270538330077, 0.059568126678466796, 0.05967257690429688, 0.05972921752929688, 0.0596220817565918, 0.05973606491088867, 0.05996748733520508, 0.05968070220947266, 0.05964774322509766, 0.05959302520751953, 0.05960051345825195, 0.060158241271972654, 0.05975664138793945, 0.0597391357421875, 0.05969776153564453, 0.05970070266723633, 0.060136383056640624, 0.05964799880981445, 0.059678207397460936, 0.05974649429321289, 0.059703617095947265, 0.059756542205810545, 0.059889663696289064, 0.059960800170898436, 0.05976073455810547, 0.05964025497436523, 0.059675712585449216, 0.059792320251464845, 0.05977088165283203, 0.05973606491088867, 0.05976473617553711, 0.05985612869262695, 0.059902721405029294, 0.05975244903564453, 0.05978883361816406, 0.05993724822998047, 0.05990399932861328, 0.05977017593383789, 0.05970550537109375, 0.059682464599609374, 0.05967731094360352, 0.05971900939941406, 0.059653022766113284, 0.06044672012329102, 0.059842559814453126, 0.05994905471801758, 0.05981798553466797, 0.05988351821899414, 0.05985209655761719, 0.05989507293701172, 0.05958889770507812, 0.059340896606445315, 0.05929996871948242, 0.05925904083251953, 0.059349056243896484, 0.05949564743041992, 0.059456287384033205, 0.05946569442749024, 0.059451423645019534, 0.05940825653076172, 0.059590782165527344, 0.059345951080322264, 0.059448001861572265, 0.05951897430419922, 0.05958176040649414, 0.05954848098754883, 0.05957238388061523, 0.059496448516845706, 0.059611137390136716, 0.059312126159667966, 0.059328510284423826, 0.05943046569824219, 0.05948393630981445, 0.05946537780761719, 0.05953023910522461, 0.0595266227722168, 0.05954601669311523, 0.059627647399902346, 0.05958041763305664, 0.05969523239135742, 0.05956390380859375, 0.05974016189575195, 0.05973606491088867, 0.05961638259887695, 0.05965299224853516, 0.05969673538208008, 0.05963203048706055, 0.05965366363525391, 0.05965817642211914, 0.05976102447509766, 0.0597751350402832, 0.05977840042114258, 0.05984076690673828, 0.059678592681884766, 0.059937313079833986, 0.059603073120117187, 0.059811233520507816, 0.05968300628662109, 0.05968515014648437, 0.05964992141723633, 0.05972351837158203, 0.05967500686645508, 0.059883136749267575, 0.0596995849609375, 0.05981184005737305, 0.05973606491088867, 0.059770751953125, 0.059650177001953124, 0.059703296661376956, 0.059764640808105465, 0.05978121566772461, 0.060122177124023436, 0.05998223876953125, 0.05967628860473633, 0.05929817581176758, 0.059289600372314455, 0.05917475128173828, 0.05954291152954101, 0.059293632507324216, 0.059302143096923825, 0.05933935928344727, 0.05963980865478516, 0.0594595832824707, 0.0594595832824707, 0.059428768157958986, 0.05946582412719727, 0.059477184295654295, 0.059590721130371095, 0.059585281372070316, 0.05959881591796875, 0.059506591796875, 0.05950681686401367, 0.060319454193115234, 0.05949059295654297, 0.059394046783447264, 0.0594183349609375, 0.05937385559082031, 0.059687934875488284, 0.05992118453979492, 0.05958268737792969, 0.05967462539672851, 0.05964700698852539, 0.059692001342773436, 0.059650047302246094, 0.05966438293457031, 0.05971475219726562, 0.059574241638183596, 0.05960585784912109, 0.059584510803222655, 0.06012895965576172, 0.059760959625244144, 0.059774143218994144, 0.05958067321777344, 0.05961072158813477, 0.059560928344726566, 0.059632831573486325, 0.05954348754882813, 0.05961763381958008, 0.059596511840820314, 0.05975120162963867, 0.05970057678222656, 0.05975315093994141, 0.059797439575195316, 0.059670238494873046, 0.05974828720092774, 0.05970937728881836, 0.05964028930664062, 0.05955788803100586, 0.05962659072875977, 0.05986191940307617, 0.06026825714111328, 0.05980815887451172, 0.05977180862426758, 0.05982102584838867, 0.059757759094238284, 0.06001059341430664, 0.059533599853515626, 0.05932032012939453, 0.05936703872680664, 0.05931455993652344, 0.059276767730712894, 0.05934543991088867, 0.05928956985473633, 0.05926710510253906, 0.05937356948852539, 0.059243614196777344, 0.05940316772460937, 0.05957632064819336, 0.05954969787597656, 0.05947817611694336, 0.05944918441772461, 0.059594753265380856, 0.059756481170654296, 0.0596286735534668, 0.059534271240234374, 0.0595387191772461, 0.059576480865478516, 0.05954003143310547, 0.05943475341796875, 0.05957043075561524, 0.05947715377807617, 0.05951513671875, 0.05947356796264648, 0.05950467300415039, 0.05966531372070313, 0.060233184814453125, 0.05984505462646485, 0.05969110488891602, 0.0596478385925293, 0.05976278305053711, 0.05981600189208985, 0.059641502380371095, 0.059677024841308594, 0.05985007858276367, 0.06000032043457031, 0.05966704177856445, 0.05958860778808594, 0.05968896102905273, 0.05968003082275391, 0.05962211227416992, 0.05992652893066406, 0.059912353515625, 0.059754207611083986, 0.060062911987304686, 0.05994591903686523, 0.05977254486083984, 0.059762176513671876, 0.059915103912353516, 0.05989788818359375, 0.059852031707763674, 0.05993139266967774, 0.05981184005737305, 0.05967462539672851, 0.05970671844482422, 0.059794078826904296, 0.059719680786132816, 0.0599183349609375, 0.05978636932373047, 0.05999158477783203, 0.05947878265380859, 0.059698528289794925, 0.059388256072998045, 0.059459487915039064, 0.05956240081787109, 0.05948940658569336, 0.059509567260742184, 0.059490367889404296, 0.05955491256713867, 0.05951580810546875, 0.05946691131591797, 0.05933142471313477, 0.05938380813598633, 0.059440673828125, 0.059671009063720706, 0.05968896102905273, 0.05965983963012695, 0.05962591934204101, 0.05951638412475586, 0.059713729858398436, 0.05980144119262695, 0.059490814208984374, 0.05944319915771484, 0.05950831985473633, 0.05956390380859375, 0.0603583984375, 0.05968345642089844, 0.059609249114990236, 0.059619327545166016, 0.059568126678466796, 0.059668033599853516, 0.059679168701171875, 0.05966873550415039, 0.059632831573486325, 0.05964851379394531, 0.05961119842529297, 0.060093791961669925, 0.05989033508300781, 0.059795135498046874, 0.05961520004272461, 0.059805248260498045, 0.05954147338867188, 0.05981705474853516, 0.05959036636352539, 0.059643905639648435, 0.05968467330932617, 0.05992777633666992, 0.05982035064697266, 0.05990079879760742, 0.05969488143920899, 0.059710590362548825, 0.05977791976928711, 0.05995110321044922, 0.059666431427001954, 0.05975449752807617, 0.05986304092407226, 0.059858943939208986, 0.059885406494140626, 0.060653728485107423, 0.05976784133911133, 0.05976163101196289, 0.05973299026489258, 0.06003507232666016, 0.05952412796020508, 0.05943801498413086, 0.05946540832519531, 0.05926448059082031, 0.059411296844482424, 0.05931008148193359, 0.059496448516845706, 0.059668479919433595, 0.05938560104370117, 0.0594332160949707, 0.05932646560668945, 0.05950668716430664, 0.059660289764404295, 0.059496192932128905, 0.05976444625854492, 0.05966697692871094, 0.059676673889160155, 0.059652095794677736, 0.05967462539672851, 0.05960704040527344, 0.05952505493164063, 0.05960505676269531, 0.05953945541381836, 0.05950668716430664, 0.0596049919128418, 0.05973561477661133, 0.05960518264770508, 0.05942217636108398, 0.05957097625732422, 0.059647422790527344, 0.059805374145507816, 0.05970428848266601, 0.05971548843383789, 0.05971468734741211, 0.059687232971191405, 0.05967628860473633, 0.059678943634033206, 0.05966921615600586, 0.05960396957397461, 0.05958124923706055, 0.059821311950683596, 0.05965027236938476, 0.059732608795166016, 0.05975628662109375, 0.059867359161376955, 0.05974028778076172, 0.059690113067626956, 0.059865345001220704, 0.05980838394165039, 0.059854209899902346, 0.05980838394165039, 0.05989376068115235, 0.05981388854980469, 0.059686912536621096, 0.05982342529296875, 0.05971014404296875, 0.059650047302246094, 0.05991535949707031, 0.05988240051269531, 0.059772865295410156, 0.059942977905273434, 0.05979471969604492, 0.06015235137939453, 0.059545600891113284, 0.05942998504638672, 0.0594535026550293, 0.05934985733032227, 0.05939199829101562, 0.059666431427001954, 0.05949385452270508, 0.05949017715454102, 0.05957494354248047, 0.05944934463500977, 0.05947536087036133, 0.059454048156738284, 0.05971532821655273, 0.059508991241455075, 0.059703296661376956, 0.05961308670043945, 0.059635231018066406, 0.05954617691040039, 0.059666240692138675, 0.05944543838500976, 0.059840286254882816, 0.05949788665771484, 0.05950751876831055, 0.05947596740722656, 0.05947187042236328, 0.059998207092285157, 0.059578369140625, 0.05961484909057617, 0.05946716690063476, 0.0595629768371582, 0.05964323043823242, 0.060486305236816404, 0.05976780700683594, 0.059635841369628906, 0.05954444885253906, 0.0595599365234375, 0.0595333137512207, 0.05977907180786133, 0.05976063919067383, 0.05978316879272461, 0.05958230209350586, 0.05950070571899414, 0.05958041763305664, 0.05954079818725586, 0.059580257415771484, 0.05967279815673828, 0.05975270462036133, 0.05986751937866211, 0.05982009506225586, 0.05991417694091797, 0.05993695831298828, 0.05978275299072266, 0.0597212142944336, 0.05957910537719727, 0.05960294342041016, 0.05971558380126953, 0.05974630355834961, 0.05991219329833984, 0.05983411026000977, 0.06056985473632812, 0.059865345001220704, 0.05975833511352539, 0.06039532852172851, 0.05977926254272461, 0.0595494384765625, 0.05944313430786133, 0.05932867050170899, 0.059579872131347654, 0.05943926239013672, 0.05935187149047851, 0.05941196823120117, 0.059338497161865233, 0.0595296630859375, 0.05950656127929688, 0.059428417205810546, 0.059536224365234376, 0.05939817428588867, 0.05970943832397461, 0.059772830963134765, 0.059721759796142575, 0.059709312438964844, 0.05959231948852539, 0.059611713409423825, 0.05954502487182617, 0.05960259246826172, 0.05974249649047852, 0.05961996841430664, 0.05958860778808594, 0.05962057495117187, 0.059592830657958985, 0.0595536003112793, 0.059699935913085936, 0.059664543151855466, 0.059703262329101565, 0.05991408157348633, 0.05987094497680664, 0.059714111328125, 0.059760513305664065, 0.059701248168945314, 0.05976268768310547, 0.05958041763305664, 0.059649120330810546, 0.05973699188232422, 0.059786495208740235, 0.059625438690185543, 0.059758369445800784, 0.059641887664794925, 0.05992515182495117, 0.05973961639404297, 0.059824993133544925, 0.05985468673706055, 0.05995926284790039, 0.059994304656982425, 0.059840511322021485, 0.05973526382446289, 0.05979964828491211, 0.059676929473876955, 0.05975699234008789, 0.059815937042236325, 0.059667648315429686, 0.05985772705078125, 0.05973606491088867, 0.05973606491088867, 0.05976201629638672, 0.059738399505615235]",tokens/s,16.759894066557134,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1556, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1647, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpyfybhaej/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 890, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 822, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 382, in __init__ self.fc1 = nn.Linear(self.embed_dim, config.ffn_dim, bias=config.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 348.12 MiB is free. Process 399477 has 14.40 GiB memory in use. Of the allocated memory 14.28 GiB is allocated by PyTorch, and 3.01 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 890, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 822, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 373, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 115, in __init__ self.v_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 403278 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 3.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 890, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 822, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 373, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 252, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 115, in __init__ self.v_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 405151 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 3.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 890, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 822, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 605, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 373, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 116, in __init__ self.q_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 120.12 MiB is free. Process 406282 has 14.62 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 2.29 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,784.879616,1140.719616,0.0,738.197504,715.772928,s,1,7.73265576171875,7.73265576171875,0.0,7.73265576171875,7.73265576171875,7.73265576171875,7.73265576171875,[7.73265576171875],,kWh,3.092105858377181e-06,3.3370031823780907e-07,1.0833342000138302e-06,4.509140376628821e-06,,MB,1120.186368,1180.565504,0.0,767.557632,723.637248,s,11,0.6765427513122559,0.06150388648293236,0.0012493268987857967,0.061223518371582034,0.061680416107177734,0.06349779319763184,0.06495169486999512,"[0.06531517028808594, 0.061223518371582034, 0.06139065551757812, 0.06097983932495117, 0.06044812774658203, 0.061394271850585935, 0.06104025650024414, 0.06098400115966797, 0.06136700820922852, 0.060719486236572265, 0.061680416107177734]",tokens/s,4162.3385876767525,kWh,1.963821231879236e-06,2.1657109046627956e-07,1.2965390685502348e-06,3.47693139089575e-06,tokens/kWh,73628142.52542602,MB,1145.167872,1220.411392,0.0,807.40352,735.775744,s,11,10.160821411132812,0.9237110373757101,0.00817215186033644,0.9253666381835938,0.9339949951171875,0.9350167236328125,0.9358341064453125,"[0.9191524047851563, 0.9279426879882813, 0.9295740966796875, 0.9279035034179688, 0.9339949951171875, 0.9360384521484375, 0.9253666381835938, 0.9087274780273438, 0.915688720703125, 0.914076904296875, 0.9223555297851562]",tokens/s,68.20314735978995,kWh,2.6535802076454166e-05,2.9264544704510633e-06,1.1928016965631658e-05,4.139027351253688e-05,tokens/kWh,1522096.7307915387,,s,693,10.152974267959598,0.014650756519422214,0.0004857946698515739,0.014599231719970704,0.014818899536132812,0.015012652397155761,0.01716679168701172,"[0.014310111999511719, 0.0144650239944458, 0.014630144119262695, 0.014571328163146973, 0.014516415596008301, 0.014387455940246582, 0.01478656005859375, 0.014598655700683593, 0.014553279876708984, 0.014455743789672851, 0.014494400024414062, 0.014563520431518555, 0.0147806396484375, 0.014536479949951171, 0.01456281566619873, 0.014543295860290528, 0.014464927673339843, 0.014598496437072754, 0.014845791816711426, 0.01444438362121582, 0.01445263957977295, 0.014483424186706543, 0.014512384414672851, 0.01455519962310791, 0.014387231826782226, 0.014682016372680663, 0.014680159568786622, 0.014470911979675294, 0.014563488006591798, 0.014499103546142578, 0.014488287925720215, 0.014503935813903808, 0.014526399612426757, 0.014606528282165528, 0.014532608032226562, 0.014540448188781738, 0.014768320083618164, 0.014698783874511718, 0.014503680229187011, 0.014542495727539063, 0.01446127986907959, 0.014483455657958985, 0.01454419231414795, 0.014547648429870606, 0.014624768257141114, 0.014608320236206054, 0.014489983558654785, 0.014584704399108887, 0.014512672424316406, 0.014731552124023438, 0.01466982364654541, 0.01460409641265869, 0.014552319526672363, 0.01450489616394043, 0.014593759536743163, 0.014510175704956055, 0.014466336250305176, 0.014549920082092285, 0.014702879905700683, 0.015550175666809081, 0.01477222442626953, 0.014605664253234863, 0.014651328086853028, 0.014455007553100587, 0.017244096755981445, 0.01720515251159668, 0.01468819236755371, 0.01463548755645752, 0.0147774076461792, 0.014626751899719239, 0.014712512016296387, 0.014605567932128907, 0.014513216018676757, 0.014621376037597656, 0.01466329574584961, 0.014458527565002441, 0.01480777645111084, 0.014671872138977051, 0.014632991790771484, 0.014616543769836426, 0.014638303756713867, 0.014607135772705077, 0.014850048065185547, 0.014597567558288574, 0.015444543838500977, 0.014651391983032227, 0.014671872138977051, 0.014724448204040527, 0.014647583961486816, 0.01474931240081787, 0.014826208114624024, 0.01452239990234375, 0.014679615974426269, 0.014660032272338867, 0.014644703865051269, 0.014627327919006347, 0.014542816162109375, 0.014639167785644532, 0.014623071670532227, 0.014742815971374511, 0.014482111930847167, 0.01453219223022461, 0.014461024284362793, 0.014626815795898437, 0.01481113624572754, 0.014555232048034668, 0.014597184181213379, 0.014590815544128418, 0.014532608032226562, 0.01455513572692871, 0.014608384132385254, 0.014528384208679199, 0.01453990364074707, 0.014564607620239258, 0.014569215774536133, 0.014566975593566895, 0.014560000419616699, 0.014586879730224609, 0.014607040405273438, 0.014641471862792969, 0.014583616256713868, 0.014496831893920899, 0.014751935958862305, 0.01461235237121582, 0.01461292839050293, 0.014603903770446777, 0.014305407524108887, 0.01463548755645752, 0.014605952262878417, 0.014796832084655761, 0.018518592834472655, 0.017163455963134764, 0.014739328384399415, 0.014658271789550781, 0.014636287689208985, 0.014573760032653808, 0.01463967990875244, 0.014630911827087402, 0.014596416473388671, 0.014666975975036621, 0.014526944160461425, 0.014569472312927247, 0.014534496307373046, 0.01466988754272461, 0.01470473575592041, 0.014610431671142577, 0.01481062412261963, 0.01453718376159668, 0.01481116771697998, 0.014532608032226562, 0.014542176246643066, 0.014666751861572265, 0.014542207717895508, 0.014702688217163086, 0.014538399696350098, 0.01459008026123047, 0.014531167984008789, 0.014565407752990723, 0.014933792114257813, 0.014727168083190918, 0.014583807945251465, 0.01458790397644043, 0.014788607597351074, 0.015205599784851075, 0.014653247833251953, 0.015242143630981446, 0.014704704284667969, 0.014632800102233886, 0.014571999549865723, 0.014687935829162597, 0.014501728057861329, 0.014612640380859375, 0.01459340763092041, 0.014477952003479004, 0.014710559844970703, 0.014608832359313964, 0.014605183601379395, 0.014498880386352539, 0.0145633602142334, 0.0145283203125, 0.014531647682189942, 0.015125439643859864, 0.014659392356872558, 0.014469311714172364, 0.014551199913024903, 0.014593888282775878, 0.014539936065673827, 0.014430975914001465, 0.014798944473266601, 0.014090239524841308, 0.014458880424499512, 0.014329567909240722, 0.01442144012451172, 0.014359392166137696, 0.01451417636871338, 0.014810720443725585, 0.014528160095214843, 0.019440383911132814, 0.014612480163574219, 0.014544447898864747, 0.014429632186889649, 0.014532959938049317, 0.014529184341430664, 0.014411904335021973, 0.014487423896789552, 0.01449779224395752, 0.014406815528869629, 0.014346112251281738, 0.014643648147583007, 0.014449055671691895, 0.015527423858642577, 0.014499903678894043, 0.014786815643310547, 0.014460351943969727, 0.014485568046569823, 0.014433088302612305, 0.014340096473693848, 0.014426112174987793, 0.014395392417907715, 0.01439948844909668, 0.014522527694702149, 0.01445628833770752, 0.014749600410461425, 0.014836031913757325, 0.01448307228088379, 0.014670463562011718, 0.014750720024108887, 0.014348992347717285, 0.014384384155273438, 0.014653823852539062, 0.014520223617553712, 0.014781344413757324, 0.014453727722167969, 0.014454784393310547, 0.01444048023223877, 0.01457596778869629, 0.01484825611114502, 0.014510272026062011, 0.017814720153808594, 0.016986944198608397, 0.01456287956237793, 0.014533056259155273, 0.014542847633361817, 0.014700127601623534, 0.014330559730529786, 0.014511839866638183, 0.014524415969848633, 0.014780415534973144, 0.014521599769592285, 0.014772992134094238, 0.014694399833679199, 0.014817184448242187, 0.014282912254333495, 0.014640416145324707, 0.014612544059753417, 0.014471839904785157, 0.014511839866638183, 0.014508000373840332, 0.01462713623046875, 0.014599231719970704, 0.014351008415222168, 0.014673312187194825, 0.014739616394042969, 0.019632863998413085, 0.01464249610900879, 0.01464367961883545, 0.014641632080078125, 0.015179871559143066, 0.016338720321655273, 0.01559119987487793, 0.014561375617980958, 0.014551039695739745, 0.014819328308105468, 0.01462502384185791, 0.014947936058044434, 0.014588064193725585, 0.014543168067932129, 0.014546848297119141, 0.014620448112487793, 0.014723072052001953, 0.014626815795898437, 0.014680064201354981, 0.014637056350708008, 0.014702176094055176, 0.014550944328308106, 0.014752127647399903, 0.014911616325378418, 0.014528512001037597, 0.014617983818054198, 0.01515174388885498, 0.016058368682861326, 0.016470016479492186, 0.014698495864868164, 0.014694239616394043, 0.014619839668273927, 0.014609151840209961, 0.0145282564163208, 0.014678496360778809, 0.014505375862121582, 0.014651552200317382, 0.014429792404174805, 0.015565407752990723, 0.01457817554473877, 0.01460201644897461, 0.014620863914489746, 0.014536128044128419, 0.014690655708312989, 0.014452735900878906, 0.014346400260925292, 0.014516096115112304, 0.014706624031066895, 0.014536288261413574, 0.014565823554992675, 0.01461888027191162, 0.014625984191894531, 0.014401535987854003, 0.014970879554748535, 0.014639103889465332, 0.014481568336486816, 0.014519295692443847, 0.014512991905212403, 0.014460927963256836, 0.014710783958435059, 0.014618592262268066, 0.014524224281311036, 0.015026399612426758, 0.014647295951843262, 0.014832703590393067, 0.01505891227722168, 0.01907811164855957, 0.015073535919189453, 0.015867648124694825, 0.01614860725402832, 0.015332799911499023, 0.01569593620300293, 0.014822815895080567, 0.014622783660888672, 0.01494713592529297, 0.015197504043579101, 0.014680319786071778, 0.014630751609802246, 0.014656191825866699, 0.014866047859191895, 0.014602304458618164, 0.0147042236328125, 0.014680959701538087, 0.014638431549072265, 0.014699007987976074, 0.014747008323669434, 0.014698783874511718, 0.01469052791595459, 0.01480076789855957, 0.014665504455566407, 0.01464303970336914, 0.014709376335144042, 0.014667776107788086, 0.014643199920654297, 0.014561280250549317, 0.01483129596710205, 0.01469279956817627, 0.014717951774597168, 0.014867456436157226, 0.01459990406036377, 0.01479695987701416, 0.014659423828125, 0.014546239852905273, 0.01460217571258545, 0.014801568031311035, 0.014639360427856445, 0.014649344444274901, 0.014789952278137206, 0.014538880348205566, 0.01463967990875244, 0.014579872131347656, 0.014706527709960938, 0.014933631896972657, 0.014627200126647949, 0.0149071683883667, 0.014170559883117675, 0.014691807746887208, 0.014635744094848633, 0.014595487594604491, 0.014654047966003418, 0.01464134407043457, 0.014547871589660645, 0.014680447578430176, 0.0147359037399292, 0.014509407997131348, 0.01484275245666504, 0.01468553638458252, 0.014598431587219239, 0.014692511558532715, 0.014954463958740234, 0.014733344078063966, 0.014694399833679199, 0.015003487586975098, 0.01481065559387207, 0.014872639656066895, 0.014849599838256835, 0.014625791549682618, 0.014588191986083984, 0.014679776191711427, 0.014702591896057129, 0.014573856353759766, 0.014681504249572755, 0.014762304306030273, 0.01458790397644043, 0.014630847930908204, 0.014698559761047364, 0.014755840301513673, 0.01489510440826416, 0.014693951606750489, 0.014604736328125, 0.014585536003112793, 0.014596384048461914, 0.014513343811035155, 0.014721023559570312, 0.014609600067138672, 0.014530207633972168, 0.01479475212097168, 0.014751744270324708, 0.014570752143859863, 0.0147774076461792, 0.014711551666259766, 0.014682656288146972, 0.014601951599121093, 0.014879648208618163, 0.01458563232421875, 0.014753791809082031, 0.01465452766418457, 0.01473964786529541, 0.014612768173217773, 0.014610143661499023, 0.0146943359375, 0.01475391960144043, 0.014670592308044433, 0.014578816413879394, 0.014678848266601563, 0.014567520141601563, 0.014604191780090332, 0.014757887840270996, 0.014193087577819825, 0.01461558437347412, 0.014726112365722657, 0.014585023880004882, 0.014521151542663575, 0.01445702362060547, 0.014447839736938476, 0.014324288368225097, 0.014282784461975097, 0.014272192001342773, 0.01419273567199707, 0.01433199977874756, 0.01404867172241211, 0.013955807685852051, 0.013911168098449707, 0.013952032089233398, 0.013924192428588868, 0.013912063598632812, 0.013913567543029786, 0.01427132797241211, 0.014323424339294434, 0.014272064208984374, 0.01416643238067627, 0.014185759544372558, 0.01461023998260498, 0.014576543807983398, 0.01441584014892578, 0.014233504295349121, 0.014350367546081542, 0.0143505277633667, 0.014421695709228516, 0.014443903923034669, 0.01449772834777832, 0.014400511741638184, 0.014366623878479003, 0.014381376266479493, 0.014761664390563966, 0.014514047622680664, 0.014563648223876954, 0.014522175788879394, 0.014712832450866698, 0.014605855941772461, 0.014622591972351074, 0.01456777572631836, 0.014592255592346191, 0.014601408004760742, 0.014719807624816894, 0.014683903694152832, 0.014549471855163575, 0.014479071617126464, 0.014642815589904785, 0.014550623893737792, 0.014484319686889648, 0.01452841567993164, 0.014495840072631836, 0.014562463760375977, 0.014623456001281739, 0.01453171157836914, 0.014601247787475586, 0.014489567756652832, 0.014433759689331055, 0.014403264045715332, 0.01434671974182129, 0.014171680450439454, 0.014625247955322265, 0.014538751602172852, 0.014513983726501464, 0.014558624267578125, 0.014575712203979492, 0.014521023750305177, 0.014694399833679199, 0.01459609603881836, 0.014563424110412598, 0.014618528366088868, 0.01459126377105713, 0.014559295654296875, 0.014732192039489746, 0.014638784408569337, 0.014528575897216798, 0.01455299186706543, 0.01451583957672119, 0.01455356788635254, 0.014724575996398925, 0.014594207763671875, 0.014492032051086425, 0.015059264183044433, 0.014672896385192872, 0.014570176124572754, 0.014813183784484863, 0.014958592414855957, 0.014618623733520507, 0.0145664644241333, 0.014469663619995117, 0.014643232345581054, 0.014633343696594239, 0.014428159713745118, 0.014479104042053223, 0.014518752098083496, 0.014509856224060058, 0.014661439895629883, 0.014751935958862305, 0.014574943542480468, 0.014512800216674804, 0.014553183555603028, 0.014597536087036133, 0.014631423950195312, 0.014448512077331542, 0.014428640365600586, 0.014482463836669922, 0.014334752082824706, 0.0143788480758667, 0.014342016220092773, 0.01459763240814209, 0.01493280029296875, 0.01442313575744629, 0.014471903800964355, 0.014303232192993164, 0.014548864364624024, 0.01429248046875, 0.014307968139648438, 0.014221664428710937, 0.014193663597106934, 0.014185279846191405, 0.014326848030090333, 0.014070560455322265, 0.014087167739868164, 0.014017631530761718, 0.014206080436706544, 0.014116640090942384, 0.014305184364318848, 0.014107808113098145, 0.01401360034942627, 0.015034144401550292, 0.014161727905273437, 0.014200511932373048, 0.014481696128845215, 0.01439798355102539, 0.014415455818176269, 0.014233695983886718, 0.01412217617034912, 0.014048064231872558, 0.014691840171813965, 0.014276479721069336, 0.014167679786682128, 0.014183423995971679, 0.014131008148193359, 0.01417024040222168, 0.014145824432373047, 0.014238752365112305, 0.014310144424438476, 0.014228480339050293, 0.01488105583190918, 0.014475775718688964, 0.01458140754699707, 0.014578240394592285, 0.01542460823059082, 0.014969984054565429, 0.01464527988433838, 0.01463475227355957, 0.014618816375732422, 0.014651103973388672, 0.01464134407043457, 0.014574687957763672, 0.014664671897888183, 0.014622528076171875, 0.014660767555236816, 0.014646143913269042, 0.014663680076599121, 0.014888959884643555, 0.014698495864868164, 0.01579529571533203, 0.014801440238952636, 0.014663328170776367, 0.014738495826721192, 0.01473423957824707, 0.014671808242797851, 0.01457151985168457, 0.014702560424804688, 0.014711071968078613, 0.014589920043945312, 0.014670175552368164, 0.01434876823425293, 0.014415648460388184, 0.014335647583007812, 0.014533087730407715, 0.014461824417114258, 0.014281439781188964, 0.01415503978729248, 0.014274592399597169, 0.014057472229003906, 0.014577631950378417, 0.014561568260192871, 0.01441158390045166, 0.014440383911132813, 0.014467071533203125, 0.014499232292175293, 0.01465609645843506, 0.014577664375305176, 0.01452073574066162, 0.014634464263916016, 0.014953599929809571, 0.01458249568939209, 0.014538559913635254, 0.014649760246276856, 0.014522432327270508, 0.014777952194213868, 0.014696864128112793, 0.014614272117614745, 0.014606911659240723, 0.014472895622253418, 0.014657152175903321, 0.014566080093383789, 0.014496959686279298, 0.01462118434906006, 0.01460745620727539, 0.01458678436279297, 0.015128576278686523, 0.014646431922912598, 0.014637920379638672, 0.014841567993164063, 0.014903776168823242, 0.014674847602844238, 0.014715807914733887, 0.014856191635131836, 0.014634271621704102, 0.014584544181823731, 0.014704511642456055, 0.01463923168182373, 0.014738783836364747, 0.01458348846435547, 0.01461961555480957, 0.014606656074523926, 0.014646592140197754, 0.014557567596435547, 0.014636704444885254, 0.014595999717712402, 0.014776032447814941, 0.014643136024475098, 0.014651647567749023, 0.014567584037780761, 0.014679967880249023, 0.014782912254333496, 0.014677472114562988, 0.01467404842376709, 0.014874496459960938, 0.014623295783996583, 0.014415871620178223, 0.014565376281738282, 0.014472352027893067, 0.014578720092773438, 0.014573408126831054, 0.014737248420715332]",tokens/s,68.25586096351543,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,881.307648,14646.378496,0.0,14243.856384,14221.3376,s,1,7.381302734375,7.381302734375,0.0,7.381302734375,7.381302734375,7.381302734375,7.381302734375,[7.381302734375],,kWh,1.1269719941666758e-05,1.2356717917496266e-06,5.4752821579961974e-06,1.7980673891412583e-05,,MB,1230.606336,14742.847488,0.0,14329.839616,14290.688,s,10,13.118953857421873,1.3118953857421876,0.0046871773602393816,1.3110763549804687,1.313641259765625,1.3194674194335938,1.3241283471679688,"[1.3123465576171875, 1.3114776611328125, 1.3092620849609375, 1.31085400390625, 1.3112987060546875, 1.310318603515625, 1.3252935791015625, 1.3070848388671874, 1.30951904296875, 1.311498779296875]",tokens/s,195.137510797152,kWh,3.83807521912513e-05,4.232934776940693e-06,2.5429548121400776e-05,6.804323508959277e-05,tokens/kWh,3762313.765107198,MB,1264.201728,14757.527552,0.0,14344.51968,14290.69056,s,10,38.77709912109375,3.877709912109375,0.0017913384193059157,3.878015380859375,3.8793688232421877,3.8800430541992186,3.8805824389648436,"[3.874580078125, 3.875171630859375, 3.877640625, 3.876857666015625, 3.87839013671875, 3.88071728515625, 3.876917236328125, 3.879201416015625, 3.878404052734375, 3.879218994140625]",tokens/s,16.246702674499343,kWh,0.00011324432787791236,1.249175119579394e-05,7.519033792999854e-05,0.00020092641700370488,tokens/kWh,313547.6207632685,,s,630,38.77381826400756,0.061545743276202484,0.00021573617028195752,0.06153548812866211,0.06176049690246582,0.06185023517608642,0.06245889717102051,"[0.06252748870849609, 0.06170624160766602, 0.061263198852539065, 0.061352481842041014, 0.061110401153564455, 0.06123929595947265, 0.061214847564697264, 0.06119580841064453, 0.061304512023925783, 0.06122358322143555, 0.06121062469482422, 0.06127206420898437, 0.0612760009765625, 0.06144015884399414, 0.061267967224121096, 0.06146041488647461, 0.06147078323364258, 0.061628414154052735, 0.06155878448486328, 0.06148502349853516, 0.061380638122558596, 0.06143932723999023, 0.061282302856445314, 0.06137424087524414, 0.06129926300048828, 0.06142784118652344, 0.061429279327392575, 0.06168438339233399, 0.06146451187133789, 0.06145977783203125, 0.0614202880859375, 0.06151116943359375, 0.06147532653808594, 0.061553760528564455, 0.06148188781738281, 0.061515777587890626, 0.06150044631958008, 0.061513729095458984, 0.0614615364074707, 0.06156691360473633, 0.061429759979248044, 0.061476097106933594, 0.06141414260864258, 0.06156288146972656, 0.06158313751220703, 0.06157865524291992, 0.06155129623413086, 0.06164038467407226, 0.06154694366455078, 0.06171443176269531, 0.06160995101928711, 0.06151990509033203, 0.061483009338378906, 0.06162220764160156, 0.06168172836303711, 0.06165817642211914, 0.06157574462890625, 0.06166470336914062, 0.061644737243652346, 0.06179481506347656, 0.06172723388671875, 0.06171648025512695, 0.0616376953125, 0.06244147109985351, 0.06137446212768555, 0.06126156616210938, 0.06132902526855469, 0.06105875015258789, 0.061205024719238284, 0.06113468933105469, 0.061129280090332035, 0.061169281005859374, 0.0612724494934082, 0.061478240966796875, 0.06152873611450195, 0.061358081817626954, 0.06150137710571289, 0.061362239837646486, 0.06152505493164063, 0.06151814270019531, 0.06147340774536133, 0.06156492614746094, 0.06155878448486328, 0.06134790420532227, 0.06151155090332031, 0.061400928497314454, 0.061384063720703125, 0.061322078704833985, 0.06138838577270508, 0.06145059204101563, 0.061664382934570314, 0.061555648803710936, 0.061607872009277344, 0.061487136840820314, 0.06155267333984375, 0.06148633575439453, 0.06163113784790039, 0.061593631744384765, 0.0615035514831543, 0.06137241744995117, 0.061437950134277344, 0.06131062316894531, 0.06141484832763672, 0.06157814407348633, 0.06149078369140625, 0.06150099182128906, 0.061700958251953125, 0.061489151000976565, 0.06161203384399414, 0.061552448272705076, 0.06149750518798828, 0.061521953582763675, 0.06156435012817383, 0.06152016067504883, 0.06153993606567383, 0.06150627136230469, 0.06176918411254883, 0.06149321746826172, 0.061698593139648435, 0.06156697463989258, 0.06171347045898438, 0.06173519897460938, 0.06183113479614258, 0.061876926422119144, 0.06171023941040039, 0.06168137741088867, 0.06252339172363282, 0.06146047973632812, 0.061159423828125, 0.06119964981079101, 0.06113763046264648, 0.06119014358520508, 0.06125158309936524, 0.06117539215087891, 0.06110863876342774, 0.0612410888671875, 0.061318878173828126, 0.06144425582885742, 0.06132160186767578, 0.06147212982177734, 0.06134233474731445, 0.06140681457519531, 0.061616542816162106, 0.06156288146972656, 0.06144812774658203, 0.06156867218017578, 0.06135849761962891, 0.06137036895751953, 0.06135603332519531, 0.06154444885253906, 0.06138880157470703, 0.06137353515625, 0.06149574279785156, 0.06150841522216797, 0.06147155380249023, 0.0618135986328125, 0.06154240036010742, 0.061476577758789064, 0.0614766731262207, 0.06149372863769531, 0.06156492614746094, 0.0615546875, 0.06158540725708008, 0.06178611373901367, 0.06151270294189453, 0.06166361618041992, 0.06145270538330078, 0.061462688446044925, 0.06165731048583984, 0.06157705688476563, 0.06154035186767578, 0.06166518402099609, 0.06164019012451172, 0.06167148971557617, 0.06181737518310547, 0.06192127990722656, 0.06174105453491211, 0.06166527938842774, 0.06157721710205078, 0.061712383270263675, 0.061687808990478515, 0.06180166244506836, 0.06184412765502929, 0.0618804817199707, 0.061753345489501954, 0.06168511962890625, 0.06171043014526367, 0.06176412963867187, 0.06180656051635742, 0.06243468856811524, 0.06150003051757812, 0.06136646270751953, 0.061330623626708984, 0.061159870147705075, 0.06126611328125, 0.06117171096801758, 0.06128025436401367, 0.06136617660522461, 0.06145779037475586, 0.061379295349121094, 0.061341697692871094, 0.06144204711914063, 0.0614389762878418, 0.06149427032470703, 0.06168985748291016, 0.06158943939208984, 0.06173260879516602, 0.06149516677856445, 0.06157766342163086, 0.061306880950927733, 0.06142531204223633, 0.0612949447631836, 0.06125708770751953, 0.061262367248535156, 0.06151299285888672, 0.06140192031860352, 0.061513729095458984, 0.06146985626220703, 0.061520736694335935, 0.06150758361816406, 0.061489025115966794, 0.06146790313720703, 0.06150841522216797, 0.061550655364990235, 0.061537822723388674, 0.06152444839477539, 0.061617408752441406, 0.06141823959350586, 0.061488574981689456, 0.061577247619628905, 0.06165875244140625, 0.06155487823486328, 0.0617470703125, 0.061665950775146486, 0.06164284896850586, 0.06169120025634765, 0.06156512069702148, 0.061634719848632814, 0.061688159942626955, 0.061550689697265626, 0.06170991897583008, 0.061540000915527346, 0.06162448120117187, 0.061733470916748044, 0.06154425430297852, 0.061644542694091795, 0.06165526580810547, 0.06164633560180664, 0.06179087829589844, 0.061616191864013674, 0.061789695739746096, 0.061698558807373044, 0.06247232055664063, 0.06144646453857422, 0.06132121658325195, 0.06123110580444336, 0.06107955169677735, 0.06167552185058594, 0.06120627212524414, 0.06113510513305664, 0.06122905731201172, 0.06137241744995117, 0.06134320068359375, 0.06145897674560547, 0.06140230560302734, 0.0615208969116211, 0.06148076629638672, 0.061624065399169925, 0.061585662841796875, 0.06166732788085937, 0.061480960845947265, 0.061396415710449216, 0.061263809204101564, 0.061362815856933595, 0.06131302261352539, 0.0613265266418457, 0.061348670959472655, 0.061488895416259765, 0.06147507095336914, 0.06156435012817383, 0.06157984161376953, 0.06151926422119141, 0.06153087997436523, 0.061537952423095704, 0.06168975830078125, 0.06242127990722656, 0.06150758361816406, 0.06154387283325195, 0.061499969482421875, 0.06145024108886719, 0.06156288146972656, 0.06145843124389649, 0.06158540725708008, 0.06167305755615234, 0.06164112091064453, 0.06174515151977539, 0.0619048957824707, 0.061652511596679685, 0.061555168151855466, 0.061607872009277344, 0.06160806274414062, 0.06171539306640625, 0.061608959197998046, 0.06165708923339844, 0.06173875045776367, 0.06166553497314453, 0.06153580856323242, 0.061688255310058594, 0.06156902313232422, 0.06161542510986328, 0.06169193649291992, 0.06188304138183594, 0.06174284744262695, 0.06175155258178711, 0.061682910919189454, 0.06254329681396484, 0.061518016815185546, 0.061378944396972654, 0.06133145523071289, 0.06113075256347656, 0.06120425415039062, 0.061112545013427735, 0.0612044792175293, 0.061480960845947265, 0.06142550277709961, 0.06125993728637695, 0.06136627197265625, 0.061394271850585935, 0.06147113418579102, 0.06174512100219726, 0.06173494338989258, 0.06177519989013672, 0.06193641662597656, 0.06153023910522461, 0.06163391876220703, 0.06144883346557617, 0.06149324798583984, 0.06130601501464844, 0.06135894393920899, 0.06160969543457031, 0.06149967956542969, 0.061472766876220705, 0.06155673599243164, 0.061470016479492184, 0.06150624084472656, 0.061429630279541014, 0.06156915283203125, 0.061652896881103515, 0.061757537841796876, 0.061701568603515625, 0.06163497543334961, 0.0615200309753418, 0.06156041717529297, 0.06145065689086914, 0.06147686386108398, 0.061619743347167966, 0.06163667297363281, 0.061626785278320315, 0.061638656616210936, 0.06155673599243164, 0.06163840103149414, 0.061634815216064454, 0.06182912063598633, 0.061736255645751956, 0.0618353271484375, 0.0616589126586914, 0.06176019287109375, 0.06190505599975586, 0.06185574340820312, 0.061661182403564455, 0.06177587127685547, 0.06200729751586914, 0.061710334777832034, 0.06166463851928711, 0.061782657623291014, 0.06168502426147461, 0.06173529434204102, 0.06177772903442383, 0.062373855590820315, 0.061472766876220705, 0.06117171096801758, 0.06123721694946289, 0.06118403244018555, 0.06131216049194336, 0.06114595031738281, 0.06122441482543945, 0.061480575561523435, 0.06141177749633789, 0.06129267120361328, 0.06139020919799805, 0.06135257720947265, 0.061529502868652344, 0.06148601531982422, 0.06148825454711914, 0.06146332931518555, 0.061505630493164064, 0.06154143905639648, 0.0615351676940918, 0.06152601623535156, 0.06157926559448242, 0.06136627197265625, 0.061472766876220705, 0.061603839874267576, 0.061548545837402345, 0.06155782318115234, 0.061629375457763674, 0.06141952133178711, 0.061517822265625, 0.06148313522338867, 0.06148031997680664, 0.061532608032226564, 0.06162233734130859, 0.06152304077148438, 0.06166566467285156, 0.061505313873291015, 0.061506145477294924, 0.06152617645263672, 0.06153798294067383, 0.06150912094116211, 0.06169068908691406, 0.06151772689819336, 0.06152816009521484, 0.06151887893676758, 0.061561119079589846, 0.06153286361694336, 0.06150316619873047, 0.06152771377563476, 0.06162435150146484, 0.06166387176513672, 0.06167097473144531, 0.061595390319824216, 0.06163731384277344, 0.06180044937133789, 0.061742656707763674, 0.06172060775756836, 0.06169232177734375, 0.061650367736816404, 0.06174163055419922, 0.06157926559448242, 0.061740032196044924, 0.06160486221313476, 0.06256639862060547, 0.06152921676635742, 0.06122380828857422, 0.06127561569213867, 0.06108419036865234, 0.06129459381103516, 0.0612147216796875, 0.06128416061401367, 0.06126406478881836, 0.06135363388061523, 0.06137686538696289, 0.06137036895751953, 0.061275775909423826, 0.06145267105102539, 0.06137184143066406, 0.06158393478393555, 0.06164633560180664, 0.0615838737487793, 0.0615546875, 0.061396991729736325, 0.061327457427978516, 0.06143171310424805, 0.06130483245849609, 0.06158870315551758, 0.061542369842529296, 0.06162720108032226, 0.06153334426879883, 0.06179296112060547, 0.06145235061645508, 0.06142899322509766, 0.06146953582763672, 0.06157833480834961, 0.061510623931884764, 0.06151919937133789, 0.06165974426269531, 0.06166732788085937, 0.06172025680541992, 0.06177199935913086, 0.061763233184814456, 0.0616247673034668, 0.06157926559448242, 0.06182083129882812, 0.0616464958190918, 0.06161452865600586, 0.06149097442626953, 0.06154230499267578, 0.06152592086791992, 0.061665695190429685, 0.06151168060302734, 0.06164684677124024, 0.061530113220214844, 0.06200083160400391, 0.06162668609619141, 0.061713920593261716, 0.061569534301757815, 0.06159689712524414, 0.06189136123657227, 0.06185903930664063, 0.061954113006591795, 0.06177865600585938, 0.06164012908935547, 0.06175801467895508, 0.061880126953125, 0.06263398361206055, 0.06160793685913086, 0.061306880950927733, 0.061301792144775394, 0.06126425552368164, 0.061263904571533204, 0.061225536346435544, 0.061277217864990234, 0.06126895904541016, 0.061294559478759766, 0.06126137542724609, 0.06136265563964844, 0.061306880950927733, 0.06151478576660156, 0.061459423065185544, 0.061496543884277347, 0.061594112396240235, 0.06168195343017578, 0.061571071624755856, 0.061579006195068356, 0.06136038589477539, 0.061426815032958985, 0.06134223937988281, 0.0616267204284668, 0.06144518280029297, 0.061539264678955076, 0.06142303848266602, 0.06166969680786133, 0.061462783813476564, 0.06148710250854492, 0.06146662521362305, 0.061677566528320314, 0.06152396774291992, 0.06185523223876953, 0.061679710388183595, 0.06163411331176758, 0.06146294403076172, 0.0615052490234375, 0.06149603271484375, 0.06151168060302734, 0.061513729095458984, 0.06156492614746094, 0.06151964950561523, 0.06159996795654297, 0.06154035186767578, 0.06165708923339844, 0.06169139099121094, 0.06156662368774414, 0.06155145645141601, 0.06160793685913086, 0.06163251113891602, 0.061859840393066405, 0.06173491287231445, 0.06176963043212891, 0.061578689575195314, 0.06175513458251953, 0.06167155075073242, 0.0616517448425293, 0.061624160766601564, 0.061722721099853516, 0.061704254150390624, 0.061859840393066405, 0.06182025527954101, 0.06246601486206055, 0.06157926559448242, 0.06126182556152344, 0.06127939224243164, 0.06127196884155273, 0.06126073455810547, 0.0611328010559082, 0.06112771224975586, 0.06124969482421875, 0.06133833694458008, 0.06122505569458008, 0.061399040222167967, 0.06135334396362305, 0.06164748764038086, 0.06155043029785156, 0.06159116744995117, 0.061567520141601564, 0.06200121688842773, 0.06165702438354492, 0.061515777587890626, 0.061462528228759764, 0.061505535125732425, 0.061315071105957034, 0.061300735473632816, 0.06143692779541016, 0.06148339080810547, 0.06140377426147461, 0.06151987075805664, 0.061456382751464846, 0.06159465789794922, 0.061473087310791014, 0.06151235198974609, 0.06176335906982422, 0.06179248046875, 0.06174720001220703, 0.06177996826171875, 0.06150972747802735, 0.061519294738769534, 0.06140975952148438, 0.06133555221557617, 0.061504608154296876, 0.061623199462890625, 0.061546142578125, 0.06154684829711914, 0.061601249694824216, 0.06172848129272461, 0.06161648178100586, 0.061704673767089845, 0.06165862274169922, 0.061663745880126956, 0.06173286437988281, 0.06187532806396484, 0.061815681457519533, 0.061927425384521485, 0.06168156814575195, 0.06162835311889649, 0.0616385612487793, 0.06194755172729492, 0.06172428894042969, 0.061676513671875, 0.06166527938842774, 0.06179971313476563, 0.061758174896240234]",tokens/s,16.248077393626406,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciLM-7B/c3c9f4226801dc0433f32aebffe0aac68ee2f051/modeling_decilm.py"", line 311, in __init__ self.model = DeciLMModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciLM-7B/c3c9f4226801dc0433f32aebffe0aac68ee2f051/modeling_decilm.py"", line 182, in __init__ self.layers = nn.ModuleList([DeciLMDecoderLayer(config, layer_idx) for layer_idx File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciLM-7B/c3c9f4226801dc0433f32aebffe0aac68ee2f051/modeling_decilm.py"", line 182, in self.layers = nn.ModuleList([DeciLMDecoderLayer(config, layer_idx) for layer_idx File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciLM-7B/c3c9f4226801dc0433f32aebffe0aac68ee2f051/modeling_decilm.py"", line 149, in __init__ self.mlp = LlamaMLP(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciLM-7B/c3c9f4226801dc0433f32aebffe0aac68ee2f051/transformers_v4_35_2__modeling_llama.py"", line 236, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 162.12 MiB is free. Process 171696 has 14.58 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 25.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,881.31584,14646.378496,0.0,14243.856384,14221.3376,s,1,7.524400390625,7.524400390625,0.0,7.524400390625,7.524400390625,7.524400390625,7.524400390625,[7.524400390625],,kWh,1.0667238691644342e-05,1.1691701679572987e-06,4.300281217999025e-06,1.6136690077600666e-05,,MB,1359.634432,14744.94464,0.0,14329.839616,14290.688,s,10,1.9798347778320313,0.1979834777832031,0.004561095443371463,0.1992624206542969,0.20129905395507813,0.20193789520263672,0.2024489682006836,"[0.18552589416503906, 0.2002303924560547, 0.20115708923339845, 0.19838890075683593, 0.19825209045410155, 0.19538563537597656, 0.19796095275878905, 0.2025767364501953, 0.2002211456298828, 0.20013594055175782]",tokens/s,1293.0371911151417,kWh,5.755707551061812e-06,6.346573527210754e-07,3.843053183372572e-06,1.023341808715546e-05,tokens/kWh,25016079.45846755,MB,1381.933056,14761.721856,0.0,14344.51968,14290.69056,s,10,38.1987265625,3.8198726562500007,0.0017638941195386468,3.820326171875,3.8214107177734378,3.8220240844726563,3.822514777832031,"[3.81607861328125, 3.818225830078125, 3.819842041015625, 3.820150390625, 3.8209033203125, 3.822637451171875, 3.8212744140625, 3.81840283203125, 3.820501953125, 3.820709716796875]",tokens/s,16.492696398378786,kWh,0.00011167352653893908,1.2317759114175193e-05,7.400303687122748e-05,0.00019799432252434174,tokens/kWh,318190.94202690927,,s,630,38.19504713821409,0.0606270589495462,0.00047340756900450477,0.060518991470336916,0.06091783103942871,0.06109026126861573,0.06370457962036134,"[0.06346752166748047, 0.06137241744995117, 0.06053887939453125, 0.060427680969238284, 0.06037062454223633, 0.060436927795410156, 0.0603895034790039, 0.06032419204711914, 0.06037894439697265, 0.06038137435913086, 0.060477439880371096, 0.06027264022827149, 0.06036070251464844, 0.060321151733398436, 0.06032777786254883, 0.06099571228027344, 0.061012542724609375, 0.06104012680053711, 0.0607279052734375, 0.06057779312133789, 0.06050563049316406, 0.06041551971435547, 0.06043539047241211, 0.06032144165039063, 0.06030985641479492, 0.06029212951660156, 0.060308448791503905, 0.06041190338134766, 0.06049359893798828, 0.06028870391845703, 0.060297760009765625, 0.06044467163085938, 0.06066995239257812, 0.060964000701904296, 0.06078140640258789, 0.060765472412109375, 0.06064406585693359, 0.060432384490966794, 0.060569278717041014, 0.06054729461669922, 0.06040934371948242, 0.06040636825561523, 0.060286975860595705, 0.0603135986328125, 0.0605359992980957, 0.060410686492919925, 0.06045695877075195, 0.06044185638427734, 0.060753822326660156, 0.06076649475097656, 0.06080934524536133, 0.06075641632080078, 0.06065151977539063, 0.060571456909179686, 0.06044076919555664, 0.06054092788696289, 0.060476833343505856, 0.060367454528808595, 0.06049792098999023, 0.06050156784057617, 0.06032563018798828, 0.06044851303100586, 0.06045587158203125, 0.06384022521972656, 0.061572288513183596, 0.060478271484375, 0.06027264022827149, 0.0603054084777832, 0.060411392211914064, 0.06045132827758789, 0.060386337280273435, 0.06035065460205078, 0.06037583923339844, 0.06031769561767578, 0.060170238494873046, 0.06024579238891602, 0.06016019058227539, 0.060389278411865234, 0.06069427108764648, 0.06112422561645508, 0.061537025451660156, 0.06088687896728515, 0.06048988723754883, 0.060948192596435545, 0.06038288116455078, 0.060267135620117186, 0.060317249298095704, 0.06031814575195313, 0.06033190536499024, 0.06035263824462891, 0.06061827087402344, 0.060443103790283205, 0.06035660934448242, 0.06053478240966797, 0.06061875152587891, 0.060947616577148436, 0.06090019226074219, 0.060782302856445314, 0.06080742263793945, 0.06081539154052734, 0.06051225662231445, 0.06054297637939453, 0.06044467163085938, 0.06045286560058594, 0.0603504638671875, 0.060311038970947264, 0.06038985443115234, 0.06045600128173828, 0.06049427032470703, 0.06051279830932617, 0.060604225158691405, 0.06058963012695313, 0.06069023895263672, 0.060781375885009765, 0.06092105484008789, 0.06069942474365234, 0.06055696105957031, 0.0603590087890625, 0.06046105575561524, 0.06042828750610352, 0.060469249725341796, 0.06061670303344727, 0.06047948837280273, 0.060456417083740235, 0.06044316864013672, 0.060631038665771485, 0.06433254241943359, 0.06177791976928711, 0.060510208129882816, 0.06032588958740234, 0.06026755142211914, 0.060351455688476566, 0.060440574645996094, 0.060368545532226564, 0.0605863037109375, 0.06055734252929688, 0.06041574478149414, 0.0602421760559082, 0.060345630645751956, 0.060350849151611326, 0.060362239837646485, 0.060787551879882815, 0.06117516708374023, 0.06110815811157227, 0.06087545776367188, 0.06054502487182617, 0.06054822540283203, 0.06039436721801758, 0.06033817672729492, 0.060326942443847655, 0.06029411315917969, 0.06037913513183594, 0.06032998275756836, 0.06039862442016602, 0.060638175964355466, 0.06040371322631836, 0.060348094940185545, 0.06053715133666992, 0.060665855407714846, 0.06084579086303711, 0.06070915222167969, 0.06072063827514648, 0.06062646484375, 0.06048662567138672, 0.060590080261230465, 0.06043852615356445, 0.06037036895751953, 0.06041427230834961, 0.06053231811523437, 0.060450592041015626, 0.060531455993652346, 0.06057283020019531, 0.060555328369140626, 0.06060246276855469, 0.06072198486328125, 0.060878177642822266, 0.06082627105712891, 0.060872097015380856, 0.06088150405883789, 0.0607825927734375, 0.06063718414306641, 0.060633087158203126, 0.060610145568847654, 0.06044918441772461, 0.060519775390625, 0.06053955078125, 0.06045695877075195, 0.06046105575561524, 0.060499969482421874, 0.06385014343261719, 0.061387073516845705, 0.060509567260742185, 0.060545345306396485, 0.06036630249023438, 0.06043529510498047, 0.06035660934448242, 0.06031769561767578, 0.06040281677246094, 0.06035881423950195, 0.060349151611328124, 0.06030054473876953, 0.06038985443115234, 0.0604183349609375, 0.06081232070922851, 0.06071705627441406, 0.061000320434570314, 0.06099507141113281, 0.061004638671875, 0.060801025390625, 0.060530017852783204, 0.060379806518554686, 0.06052249526977539, 0.06047654342651367, 0.06032595062255859, 0.060435264587402344, 0.060577598571777344, 0.06050601577758789, 0.06042620849609375, 0.06029660797119141, 0.060299678802490236, 0.06032844924926758, 0.06061056137084961, 0.06078585433959961, 0.06101264190673828, 0.06092575836181641, 0.060753952026367186, 0.060545345306396485, 0.06051430511474609, 0.06045286560058594, 0.06040563201904297, 0.06053696060180664, 0.060418048858642576, 0.06034841537475586, 0.060505535125732424, 0.06045753479003906, 0.060458751678466795, 0.060663326263427735, 0.060708736419677736, 0.060916000366210935, 0.06080160140991211, 0.060878849029541014, 0.06089481735229492, 0.06079900741577148, 0.06054905700683594, 0.06058848190307617, 0.06051808166503906, 0.06051388931274414, 0.06062768173217774, 0.060590080261230465, 0.06052249526977539, 0.06054431915283203, 0.06056006240844727, 0.06372979354858399, 0.06165296173095703, 0.06051820755004883, 0.060483680725097654, 0.06031779098510742, 0.06066995239257812, 0.060524543762207034, 0.06048767852783203, 0.06048767852783203, 0.060368896484375, 0.06045625686645508, 0.060244449615478514, 0.06032611083984375, 0.06031155014038086, 0.06053673553466797, 0.06088508987426758, 0.06127356719970703, 0.061087745666503906, 0.0609093132019043, 0.060674846649169924, 0.06050406265258789, 0.060474945068359376, 0.06034995269775391, 0.06038521575927734, 0.06033055877685547, 0.06041030502319336, 0.060430271148681644, 0.0605307502746582, 0.060544479370117185, 0.06038764953613281, 0.060477664947509766, 0.06057984161376953, 0.06083993530273438, 0.06101347351074219, 0.06089577484130859, 0.06069247817993164, 0.060581886291503906, 0.060477439880371096, 0.060499969482421874, 0.06039091110229492, 0.060332542419433595, 0.060416000366210934, 0.06045267105102539, 0.06045100784301758, 0.06054707336425781, 0.06044672012329102, 0.06057779312133789, 0.0607059211730957, 0.0607828483581543, 0.06087107086181641, 0.06085039901733398, 0.060765697479248044, 0.060770816802978515, 0.060624897003173826, 0.06059523010253906, 0.0606258544921875, 0.06057577514648437, 0.0605305290222168, 0.06052675247192383, 0.06055731201171875, 0.060544033050537106, 0.060585952758789065, 0.060609535217285154, 0.06396172714233399, 0.061607295989990235, 0.06052499389648437, 0.06042230224609375, 0.060405696868896484, 0.06048070526123047, 0.06043328094482422, 0.060368896484375, 0.060418048858642576, 0.06045001602172852, 0.06046582412719727, 0.06032579040527344, 0.06037097549438476, 0.0604815673828125, 0.060727455139160155, 0.06113894271850586, 0.06129459381103516, 0.0610524787902832, 0.060902976989746095, 0.060697441101074216, 0.060559326171875, 0.06035823822021484, 0.06032022476196289, 0.06033206558227539, 0.060395488739013674, 0.06045695877075195, 0.060419326782226564, 0.060461822509765624, 0.06045654296875, 0.060281246185302735, 0.06034431838989258, 0.06077439880371094, 0.06076006317138672, 0.06091151809692383, 0.060880992889404295, 0.06077849578857422, 0.06065971374511719, 0.06050604629516602, 0.06059961700439453, 0.060504833221435544, 0.0603422737121582, 0.06045695877075195, 0.060585086822509765, 0.0604189453125, 0.06057289505004883, 0.06048137664794922, 0.0605623664855957, 0.060706817626953125, 0.06107331085205078, 0.06107926559448242, 0.06106355285644531, 0.06094438552856445, 0.06084403228759765, 0.06078425598144531, 0.06055155181884766, 0.06049331283569336, 0.06050419235229492, 0.06053100967407227, 0.06060166549682617, 0.06060518264770508, 0.06052438354492187, 0.060540576934814454, 0.06069504165649414, 0.06404710388183593, 0.06162432098388672, 0.06061587142944336, 0.06051411056518555, 0.060437278747558595, 0.060434654235839845, 0.06042819213867188, 0.06045868682861328, 0.060370655059814454, 0.06031635284423828, 0.06036275100708008, 0.06032284927368164, 0.06035494232177734, 0.06024662399291992, 0.06041334533691406, 0.06073545455932617, 0.061079647064208986, 0.06106780624389648, 0.06097510528564453, 0.06076176071166992, 0.06054332733154297, 0.06036275100708008, 0.0602213134765625, 0.060288959503173825, 0.06050527954101562, 0.06043545532226562, 0.060370655059814454, 0.06057731246948242, 0.060580608367919925, 0.06034790420532227, 0.06044518280029297, 0.06055936050415039, 0.06070272064208984, 0.06102220916748047, 0.060873985290527344, 0.06134246444702148, 0.06067814254760742, 0.06064035034179688, 0.06064799880981445, 0.06051670455932617, 0.06071065521240234, 0.06045222473144531, 0.06049635314941406, 0.06034441757202148, 0.06047897720336914, 0.06039225769042969, 0.06047334289550781, 0.06060031890869141, 0.06067609786987305, 0.06073884963989258, 0.06091747283935547, 0.060827713012695316, 0.06102239990234375, 0.06075059127807617, 0.06052864074707031, 0.06067161560058594, 0.06047577667236328, 0.060426239013671876, 0.06065049743652344, 0.060611583709716796, 0.06038880157470703, 0.06042476654052734, 0.06048972702026367, 0.06364284896850586, 0.061410079956054686, 0.0603702392578125, 0.06037369537353516, 0.06039299011230469, 0.060369377136230466, 0.06055875015258789, 0.060533344268798826, 0.06033203125, 0.060467201232910155, 0.060440574645996094, 0.06029056167602539, 0.06031718444824219, 0.0604846076965332, 0.06052355194091797, 0.06079296112060547, 0.06103535842895508, 0.06116556930541992, 0.06083152008056641, 0.06057731246948242, 0.06042284774780273, 0.06036428833007813, 0.06030992126464844, 0.0603441276550293, 0.06031184005737305, 0.06032572937011719, 0.06027407836914062, 0.06040447998046875, 0.06044876861572265, 0.0603504638671875, 0.06047334289550781, 0.06058803176879883, 0.06077030563354492, 0.060859710693359374, 0.06070547103881836, 0.060677345275878904, 0.06070463943481445, 0.06041241455078125, 0.06048604965209961, 0.06054707336425781, 0.06038118362426758, 0.060409854888916016, 0.06037081527709961, 0.06028915023803711, 0.060532127380371094, 0.06050876617431641, 0.060634368896484374, 0.06056012725830078, 0.060915550231933596, 0.060981216430664065, 0.06093024063110351, 0.060731391906738284, 0.06060236740112305, 0.06066995239257812, 0.06052223968505859, 0.06061286544799805, 0.06048972702026367, 0.06051430511474609, 0.06069452667236328, 0.06050352096557617, 0.060424480438232425, 0.06040396881103516, 0.06062899017333984, 0.06376908874511719, 0.06155260848999024, 0.06055526351928711, 0.06053235244750976, 0.06036108779907227, 0.06037475204467774, 0.06037465667724609, 0.06042486572265625, 0.06048972702026367, 0.060405567169189454, 0.060371135711669924, 0.060319297790527346, 0.060403327941894534, 0.06029804611206055, 0.06044876861572265, 0.06079401779174805, 0.06108041763305664, 0.06085945510864258, 0.06076102447509766, 0.06061260986328125, 0.060483009338378906, 0.060394046783447265, 0.06042009735107422, 0.0603955192565918, 0.06033567810058594, 0.06043600082397461, 0.06047836685180664, 0.06037094497680664, 0.06035660934448242, 0.060550689697265625, 0.06043081665039062, 0.06056508636474609, 0.06076457595825195, 0.061044254302978516, 0.06069500732421875, 0.06069990539550781, 0.06086937713623047, 0.06061654281616211, 0.060659870147705075, 0.060581886291503906, 0.06037484741210938, 0.060485633850097656, 0.06054054260253906, 0.06038380813598633, 0.060485153198242186, 0.0604984016418457, 0.060619998931884765, 0.06059497451782227, 0.060677345275878904, 0.060719745635986325, 0.06072265625, 0.06109231948852539, 0.0608831672668457, 0.0607375373840332, 0.06062080001831055, 0.06061388778686523, 0.06112713623046875, 0.06049785614013672, 0.060590431213378905, 0.060469249725341796, 0.06045695877075195, 0.06045484924316406, 0.06059785461425781, 0.0635228157043457, 0.061662273406982424, 0.06049683380126953, 0.060416000366210934, 0.06034841537475586, 0.06035561752319336, 0.06049270248413086, 0.060504127502441406, 0.060387039184570314, 0.06033347320556641, 0.06041689682006836, 0.06025737762451172, 0.06035958480834961, 0.060262401580810546, 0.06050611114501953, 0.06081536102294922, 0.06110003280639648, 0.06097715377807617, 0.060819263458251956, 0.060485824584960934, 0.06047929763793945, 0.060450592041015626, 0.06032835388183594, 0.06038470458984375, 0.060428863525390626, 0.06054912185668945, 0.06036684799194336, 0.06048767852783203, 0.060507457733154295, 0.060428993225097656, 0.06058137512207031, 0.06067660903930664, 0.060903423309326174, 0.06088294219970703, 0.06070204925537109, 0.06071363067626953, 0.06063654327392578, 0.06054771041870117, 0.06057139205932617, 0.06061286544799805, 0.060424190521240234, 0.06049529647827148, 0.06053740692138672, 0.060520416259765626, 0.06068841552734375, 0.06048153686523437, 0.06054051208496094, 0.06068214416503906, 0.06094483184814453, 0.06084771347045898, 0.06080508804321289, 0.06062745666503906, 0.06065478515625, 0.0606478385925293, 0.06059151840209961, 0.06083686447143555, 0.06057331085205078, 0.06059036636352539, 0.0607006721496582, 0.06062828826904297, 0.06051424026489258, 0.06072406387329102, 0.06056073760986328]",tokens/s,16.494285181014625,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3826, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1565, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1731, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,881.225728,14646.378496,0.0,14243.856384,14221.3376,s,1,7.509537109375,7.509537109375,0.0,7.509537109375,7.509537109375,7.509537109375,7.509537109375,[7.509537109375],,kWh,1.0481846112512965e-05,1.1490394774605478e-06,5.470282154002026e-06,1.710116774397554e-05,,MB,1370.652672,14744.94464,0.0,14329.839616,14290.688,s,10,1.978707992553711,0.1978707992553711,0.0037086557329024123,0.1978182907104492,0.2023212417602539,0.20234135360717773,0.2023574430847168,"[0.1897382354736328, 0.19566435241699218, 0.2023167724609375, 0.19947273254394532, 0.19823091125488282, 0.1957361602783203, 0.195970947265625, 0.20236146545410155, 0.19740567016601562, 0.20181074523925782]",tokens/s,1293.773517686193,kWh,5.730616950081432e-06,6.3197694071282e-07,3.8135270159608063e-06,1.0176120906755057e-05,tokens/kWh,25156933.800782915,MB,1393.221632,14761.721856,0.0,14344.51968,14290.69056,s,10,38.193401367187505,3.8193401367187496,0.005413697317999283,3.8185838623046875,3.82819423828125,3.8286310546875,3.8289805078124997,"[3.810307861328125, 3.814501708984375, 3.81763916015625, 3.819787841796875, 3.819528564453125, 3.816742431640625, 3.8170693359375, 3.820659423828125, 3.82809716796875, 3.82906787109375]",tokens/s,16.494995927260934,kWh,0.00011166672969575172,1.231714281939937e-05,7.417314648443981e-05,0.00019815701899959088,tokens/kWh,317929.6919082643,,s,630,38.18985108947754,0.06061881125313894,0.00048121554023533835,0.0605130558013916,0.06092756614685059,0.061078613090515134,0.06377705879211426,"[0.0632319679260254, 0.06125568008422851, 0.06029926300048828, 0.06019276809692383, 0.060134624481201174, 0.060197441101074216, 0.060266048431396484, 0.06026918411254883, 0.06024399948120117, 0.06030131149291992, 0.060268543243408204, 0.06018585586547852, 0.06017715072631836, 0.06022265625, 0.060397632598876955, 0.06069641494750976, 0.06087961578369141, 0.0609749755859375, 0.060743968963623045, 0.06052249526977539, 0.06036195373535156, 0.0603656005859375, 0.06034431838989258, 0.060246017456054686, 0.06032144165039063, 0.0605044174194336, 0.06023158264160156, 0.06024201583862305, 0.060252159118652344, 0.06020915222167969, 0.06026851272583008, 0.060432415008544925, 0.060723201751708984, 0.06077167892456055, 0.060715007781982425, 0.060580127716064455, 0.060582271575927736, 0.060405216217041015, 0.060387168884277344, 0.060271297454833984, 0.06030950546264648, 0.06035660934448242, 0.06035660934448242, 0.06024127960205078, 0.060314239501953124, 0.06025830459594726, 0.06034182357788086, 0.06047174453735352, 0.06076172637939453, 0.06067238235473633, 0.06076963043212891, 0.060613025665283204, 0.060688640594482424, 0.06063232040405273, 0.060463871002197266, 0.060491073608398435, 0.0603449592590332, 0.06036896133422852, 0.06050201416015625, 0.060459007263183595, 0.06031135940551758, 0.06029331207275391, 0.06031731033325195, 0.06382134246826172, 0.06139068984985351, 0.0604571533203125, 0.06036726379394531, 0.060217342376708984, 0.06039545440673828, 0.06030956649780273, 0.06023116683959961, 0.060241600036621094, 0.0602297592163086, 0.06023238372802735, 0.06024780654907227, 0.06026422500610352, 0.06016457748413086, 0.06031564712524414, 0.06087478256225586, 0.061079521179199216, 0.060958271026611326, 0.06083555221557617, 0.06050275039672852, 0.060510208129882816, 0.060333118438720704, 0.06029520034790039, 0.060295169830322265, 0.06032783889770508, 0.060307838439941405, 0.060228286743164064, 0.06061868667602539, 0.06036070251464844, 0.060210590362548826, 0.06038179016113281, 0.06053478240966797, 0.060657665252685546, 0.06089292907714844, 0.060879104614257815, 0.0606453742980957, 0.06045491027832031, 0.0604664306640625, 0.060580608367919925, 0.060391422271728515, 0.06033407974243164, 0.06034009552001953, 0.060547199249267575, 0.060439743041992185, 0.0605335693359375, 0.06048767852783203, 0.06044672012329102, 0.060610015869140624, 0.06054556655883789, 0.06068428802490235, 0.060818912506103516, 0.06068892669677734, 0.060651359558105467, 0.06052880096435547, 0.06043648147583008, 0.06041740798950195, 0.060487743377685546, 0.06038713455200195, 0.060557758331298825, 0.06040607833862305, 0.0603579216003418, 0.06037712097167969, 0.060568256378173826, 0.06397724914550781, 0.06152489471435547, 0.060467201232910155, 0.0603422737121582, 0.06016204833984375, 0.0602806396484375, 0.060362945556640624, 0.060331745147705076, 0.060447006225585936, 0.06036479949951172, 0.06047238540649414, 0.0603043212890625, 0.06039961624145508, 0.060511966705322266, 0.06037500762939453, 0.0607130241394043, 0.06092211151123047, 0.06093724822998047, 0.060826591491699215, 0.06057779312133789, 0.06041596984863281, 0.06044403076171875, 0.06036547088623047, 0.06029011154174805, 0.06034646224975586, 0.06033075332641601, 0.06046831893920898, 0.06043545532226562, 0.060370784759521484, 0.060383136749267576, 0.06039078521728516, 0.06057664108276367, 0.060708831787109375, 0.060921375274658206, 0.06067782211303711, 0.060676929473876956, 0.06058598327636719, 0.06054912185668945, 0.06061670303344727, 0.0607825927734375, 0.060362369537353515, 0.060393600463867186, 0.060408065795898434, 0.06043648147583008, 0.06049337768554688, 0.06044073486328125, 0.06055145645141601, 0.060524543762207034, 0.060794208526611326, 0.0608713264465332, 0.06070806503295898, 0.060614879608154294, 0.060631614685058594, 0.06066531372070313, 0.06056195068359375, 0.06058918380737305, 0.06064822387695312, 0.060371040344238285, 0.06058393478393555, 0.06055523300170899, 0.06041337585449219, 0.06050848007202148, 0.060512542724609375, 0.06395148849487305, 0.061448192596435545, 0.06044086456298828, 0.06036684799194336, 0.06020691299438476, 0.060354625701904294, 0.06043388748168945, 0.06046361541748047, 0.060438686370849606, 0.0603770866394043, 0.060375038146972655, 0.060270591735839846, 0.06030950546264648, 0.0603089599609375, 0.06051894378662109, 0.060927425384521484, 0.061219390869140626, 0.06113276672363281, 0.06086249542236328, 0.06051356887817383, 0.06061539077758789, 0.06042793655395508, 0.06030588912963867, 0.06029299163818359, 0.06059110260009765, 0.060348926544189455, 0.06054115295410156, 0.06038719940185547, 0.06037321472167969, 0.060510303497314455, 0.06046307373046875, 0.0610489616394043, 0.06083993530273438, 0.06085836791992188, 0.060923263549804686, 0.06080985641479492, 0.06088006210327149, 0.06044079971313476, 0.06055382537841797, 0.06043648147583008, 0.060284927368164064, 0.06054460906982422, 0.06045302581787109, 0.06039363098144531, 0.06054207992553711, 0.06040265655517578, 0.06043814468383789, 0.06050649642944336, 0.0607061767578125, 0.06069667053222656, 0.06072988891601563, 0.06094847869873047, 0.060780544281005856, 0.06065951919555664, 0.0606346549987793, 0.06062556838989258, 0.06045801544189453, 0.060418590545654294, 0.06075027084350586, 0.0604114875793457, 0.06045884704589844, 0.060368896484375, 0.060652095794677736, 0.06405014038085938, 0.061491073608398436, 0.0604796142578125, 0.060447967529296875, 0.0602446403503418, 0.06038745498657227, 0.060400737762451175, 0.060437408447265625, 0.06030233764648438, 0.06036172866821289, 0.060409854888916016, 0.06034131240844726, 0.060356895446777345, 0.06039004898071289, 0.06048470306396484, 0.060914592742919924, 0.061031551361083985, 0.06100057601928711, 0.060801025390625, 0.060493824005126956, 0.060440574645996094, 0.0603054084777832, 0.060375038146972655, 0.06050191879272461, 0.0604304313659668, 0.06041907119750976, 0.060572673797607425, 0.060751327514648436, 0.060520606994628905, 0.06056998443603516, 0.060631038665771485, 0.06062259292602539, 0.06092006301879883, 0.061060382843017576, 0.060985343933105465, 0.060795616149902344, 0.06078611373901367, 0.06061494445800781, 0.06064361572265625, 0.060475391387939455, 0.06029283142089844, 0.06040121459960938, 0.06054576110839844, 0.06060031890869141, 0.06053683090209961, 0.06041798400878906, 0.06045907211303711, 0.060565502166748046, 0.06061056137084961, 0.060693889617919924, 0.06067855834960938, 0.06079510498046875, 0.06070272064208984, 0.06061651229858398, 0.06046329498291016, 0.06048742294311524, 0.0603911361694336, 0.06033225631713867, 0.060534271240234375, 0.06048851013183594, 0.06045017623901367, 0.060469886779785154, 0.06042611312866211, 0.06366864013671875, 0.06152188873291015, 0.060442623138427735, 0.060344192504882814, 0.06029939270019531, 0.060343807220458984, 0.060493473052978516, 0.06041686248779297, 0.060388481140136716, 0.060366718292236325, 0.06030438232421875, 0.060276737213134764, 0.06031932830810547, 0.060270912170410154, 0.060467422485351564, 0.06076156616210938, 0.06097939300537109, 0.060887264251708983, 0.06081126403808594, 0.060609886169433594, 0.06042192077636719, 0.060350975036621096, 0.06027302551269531, 0.06017843246459961, 0.06024806213378906, 0.06033203125, 0.0602204475402832, 0.060351455688476566, 0.060447872161865236, 0.06035955047607422, 0.0603171501159668, 0.06036102294921875, 0.060674270629882815, 0.0607907829284668, 0.06092310333251953, 0.060921825408935544, 0.06078323364257812, 0.060542240142822265, 0.06057174301147461, 0.06035948944091797, 0.060278785705566405, 0.06037299346923828, 0.06032179260253906, 0.06044672012329102, 0.06064857482910156, 0.06068928146362305, 0.060551265716552734, 0.0608109130859375, 0.06065507125854492, 0.06060031890869141, 0.06074800109863281, 0.060784671783447264, 0.06104118347167969, 0.06076416015625, 0.06048767852783203, 0.060509376525878907, 0.06037136077880859, 0.06035497665405273, 0.060467201232910155, 0.0605030403137207, 0.06039769744873047, 0.06045587158203125, 0.060757537841796876, 0.06362710571289062, 0.06154902267456055, 0.06058790588378906, 0.06044992065429688, 0.06026079940795898, 0.06036316680908203, 0.06043628692626953, 0.0604304313659668, 0.06032793426513672, 0.06035238265991211, 0.06034236907958984, 0.06042569732666016, 0.060461822509765624, 0.060432384490966794, 0.060407806396484375, 0.06072032165527344, 0.0609288330078125, 0.060913375854492184, 0.060987358093261716, 0.06055353546142578, 0.06044211196899414, 0.06030182266235352, 0.060229633331298826, 0.06030131149291992, 0.06026444625854492, 0.060286975860595705, 0.06028902435302735, 0.0604870719909668, 0.06055996704101563, 0.060375038146972655, 0.06042144012451172, 0.060616798400878906, 0.06082112121582031, 0.06105596923828125, 0.06079657745361328, 0.060606239318847656, 0.06049030303955078, 0.06045084762573242, 0.060542945861816404, 0.060459007263183595, 0.0603106575012207, 0.060382080078125, 0.060418048858642576, 0.060397342681884764, 0.06058406448364258, 0.060348480224609376, 0.06051548767089844, 0.0606503677368164, 0.06073324966430664, 0.061112510681152345, 0.06086380767822266, 0.06068428802490235, 0.06064144134521485, 0.06054147338867188, 0.06039263916015625, 0.06048441696166992, 0.06051833724975586, 0.060399681091308596, 0.060550430297851565, 0.06051094436645508, 0.06032979202270508, 0.06046937561035156, 0.0605733757019043, 0.06397507095336914, 0.06140972900390625, 0.06043814468383789, 0.060358783721923825, 0.060383392333984376, 0.06030249786376953, 0.06034233474731445, 0.06036355209350586, 0.06031155014038086, 0.0603842887878418, 0.060310497283935546, 0.060216575622558596, 0.060357376098632814, 0.0602655029296875, 0.060684257507324216, 0.060993953704833986, 0.06123702239990234, 0.0610332145690918, 0.06091987228393555, 0.06062694549560547, 0.06049587249755859, 0.06036207962036133, 0.06027536010742188, 0.06032998275756836, 0.06038083267211914, 0.06040752029418945, 0.06028684616088867, 0.06038969421386719, 0.06047747039794922, 0.060456993103027344, 0.060397022247314455, 0.060658592224121094, 0.06095801544189453, 0.0610577278137207, 0.06093619155883789, 0.06073251342773438, 0.06052726364135742, 0.06044697570800781, 0.060493663787841795, 0.06047760009765625, 0.060468734741210936, 0.06059059143066406, 0.06048060989379883, 0.060504993438720706, 0.06046515274047851, 0.060417121887207034, 0.06067020797729492, 0.06069443130493164, 0.06093283081054687, 0.06105500793457031, 0.06101401519775391, 0.06091273498535156, 0.060865310668945315, 0.06073516845703125, 0.06048191833496094, 0.06058195114135742, 0.06056755065917969, 0.06055875015258789, 0.060553279876708985, 0.06051065444946289, 0.060565185546875, 0.06051881790161133, 0.06062080001831055, 0.06419967651367188, 0.06155440139770508, 0.06050332641601563, 0.060455936431884766, 0.06036275100708008, 0.06046441650390625, 0.06070345687866211, 0.06054297637939453, 0.060665855407714846, 0.06053068923950195, 0.06059417724609375, 0.06038937759399414, 0.06045286560058594, 0.06064547348022461, 0.06061398315429688, 0.06076268768310547, 0.061077503204345705, 0.061001728057861325, 0.060911617279052734, 0.060633087158203126, 0.06048972702026367, 0.060390625, 0.06040041732788086, 0.06043353652954102, 0.06071795272827148, 0.06055875015258789, 0.060566112518310546, 0.060614654541015625, 0.060599361419677734, 0.06051721572875977, 0.060760128021240235, 0.06102742385864258, 0.06090643310546875, 0.06101193618774414, 0.060868640899658204, 0.06073344039916992, 0.06095667266845703, 0.06069657516479492, 0.06066751861572266, 0.060647422790527344, 0.060747776031494144, 0.060830078125, 0.06063241577148438, 0.06060851287841797, 0.06083139038085938, 0.06060748672485351, 0.06066790390014649, 0.0607723503112793, 0.06071295928955078, 0.061001728057861325, 0.06082559967041016, 0.06142505645751953, 0.06065827178955078, 0.06072707366943359, 0.06061475372314453, 0.06080934524536133, 0.060843265533447266, 0.060613376617431644, 0.06085152053833008, 0.06069475173950195, 0.06047347259521484, 0.060500190734863284, 0.06060047912597656, 0.06400393676757812, 0.0615665283203125, 0.06071788787841797, 0.060574657440185545, 0.06045481491088867, 0.06060947036743164, 0.06063718414306641, 0.06057164764404297, 0.06053273773193359, 0.060626079559326175, 0.06063766479492187, 0.06044076919555664, 0.0604769287109375, 0.060504703521728515, 0.060702785491943356, 0.06116339111328125, 0.061479038238525394, 0.061152286529541015, 0.06093308639526367, 0.060781631469726566, 0.06053984069824219, 0.06049542236328125, 0.06049631881713867, 0.06048767852783203, 0.060606369018554686, 0.060512351989746097, 0.060450721740722656, 0.06088297653198242, 0.06070035171508789, 0.060506591796875, 0.060747295379638674, 0.06084422302246094, 0.061107681274414065, 0.06115395355224609, 0.06105913543701172, 0.06078668975830078, 0.06077439880371094, 0.06061590576171875, 0.06060521697998047, 0.06053059387207031, 0.06046675109863281, 0.06066230392456055, 0.06055116653442383, 0.06053478240966797, 0.06061670303344727, 0.06047654342651367, 0.06069747161865234, 0.060837886810302735, 0.060923263549804686, 0.0611354866027832, 0.061029441833496095, 0.06086275100708008, 0.06085494232177734, 0.060672000885009764, 0.0605629768371582, 0.06061308670043945, 0.060604320526123044, 0.06057788848876953, 0.06078668975830078, 0.06072659301757812, 0.06070105743408203, 0.0606129264831543, 0.06067161560058594]",tokens/s,16.4965293665045,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,881.737728,14646.378496,0.0,14243.856384,14221.3376,s,1,7.416140625,7.416140625,0.0,7.416140625,7.416140625,7.416140625,7.416140625,[7.416140625],,kWh,1.0886991324976709e-05,1.1934384775641619e-06,4.620281474000676e-06,1.6700711276541547e-05,,MB,1211.338752,14742.847488,0.0,14329.839616,14290.688,s,10,13.138295166015626,1.3138295166015626,0.006342446196092568,1.3112207641601563,1.3255044189453125,1.3259571655273439,1.3263193627929688,"[1.3136585693359375, 1.309341796875, 1.30727099609375, 1.3088173828125, 1.3111453857421875, 1.32540380859375, 1.3109852294921875, 1.3139659423828125, 1.311296142578125, 1.326409912109375]",tokens/s,194.8502425658592,kWh,3.832485108666939e-05,4.22676365275941e-06,2.5319548033399947e-05,6.787116277282875e-05,tokens/kWh,3771852.279249383,MB,1245.20448,14757.527552,0.0,14344.51968,14290.69056,s,10,38.796280029296874,3.8796280029296875,0.0020251822238042975,3.88016650390625,3.8821514892578124,3.8824365844726563,3.882664660644531,"[3.87691259765625, 3.8771923828125, 3.876863037109375, 3.87857177734375, 3.882088134765625, 3.880194091796875, 3.880827392578125, 3.880138916015625, 3.88077001953125, 3.8827216796875]",tokens/s,16.238670293240943,kWh,0.0001133932943062473,1.2507942235545938e-05,7.551531041220042e-05,0.00020141654695399365,tokens/kWh,312784.62942962715,,s,630,38.79277999496464,0.06157584126184857,0.00021642238888502878,0.06156942367553711,0.06178697052001953,0.061869399642944335,0.06248925708770752,"[0.06248787307739258, 0.06146854400634766, 0.06123193740844726, 0.061249534606933595, 0.06121881484985352, 0.06145228958129883, 0.0614090576171875, 0.061300033569335936, 0.06129961776733398, 0.06146585464477539, 0.06120268630981445, 0.061245025634765624, 0.061262752532958986, 0.06140313720703125, 0.06133103942871094, 0.06180428695678711, 0.061659168243408204, 0.06164688110351563, 0.06150928115844727, 0.061563838958740236, 0.061396991729736325, 0.06140105438232422, 0.06144944000244141, 0.061389663696289065, 0.06144198226928711, 0.061458271026611326, 0.06135804748535156, 0.06136444854736328, 0.06132748794555664, 0.061523841857910155, 0.06151987075805664, 0.06153193664550781, 0.06150700759887695, 0.06153705596923828, 0.06154035186767578, 0.06154150390625, 0.06152486419677734, 0.06151987075805664, 0.06149280166625976, 0.06164524841308594, 0.06162361526489258, 0.06160044860839844, 0.061532161712646485, 0.06153603363037109, 0.061591838836669924, 0.061659072875976564, 0.06157132720947266, 0.06164025497436523, 0.0615978889465332, 0.06169190216064453, 0.06150716781616211, 0.06161667251586914, 0.06160166549682617, 0.06159564971923828, 0.061685760498046874, 0.06165043258666992, 0.061626880645751954, 0.06170828628540039, 0.0617529296875, 0.06176620864868164, 0.061689697265625, 0.06191487884521484, 0.06168396759033203, 0.06248134231567383, 0.06150758361816406, 0.061200382232666016, 0.06122086334228516, 0.061308704376220706, 0.06141523361206055, 0.06114102554321289, 0.0612437744140625, 0.06134783935546875, 0.06140742492675781, 0.06135980987548828, 0.061423744201660156, 0.06125158309936524, 0.061515777587890626, 0.0615546875, 0.06152601623535156, 0.06176153564453125, 0.06159759902954102, 0.06142575836181641, 0.0613458251953125, 0.061248703002929686, 0.06135683059692383, 0.061439998626708986, 0.06132121658325195, 0.06140518569946289, 0.061451648712158205, 0.06151785659790039, 0.06148566436767578, 0.06134492874145508, 0.061471584320068356, 0.06152601623535156, 0.061521663665771484, 0.06150486373901367, 0.06155356979370117, 0.061497344970703124, 0.061628414154052735, 0.061470718383789064, 0.061472286224365236, 0.06153263854980469, 0.06160793685913086, 0.06203945541381836, 0.06165795135498047, 0.061617919921875, 0.06160179138183594, 0.06151916885375976, 0.061719230651855465, 0.061644798278808595, 0.06159692764282226, 0.06152278518676758, 0.06154025650024414, 0.06160496139526367, 0.061672351837158204, 0.061591552734375, 0.06167871856689453, 0.06159151840209961, 0.06180752182006836, 0.061634334564208984, 0.061870048522949216, 0.06169216156005859, 0.06168384170532227, 0.061668415069580075, 0.061741920471191404, 0.06172463989257813, 0.062365535736083985, 0.061435585021972654, 0.06116191864013672, 0.061241249084472656, 0.061128799438476565, 0.06125315093994141, 0.061241825103759764, 0.06141084671020508, 0.061341697692871094, 0.06129507064819336, 0.06139424133300781, 0.061391551971435546, 0.06137651062011719, 0.06155238342285156, 0.06142736053466797, 0.061545055389404295, 0.06160793685913086, 0.061563934326171875, 0.06139388656616211, 0.061448192596435545, 0.061306880950927733, 0.06141952133178711, 0.061315071105957034, 0.061429759979248044, 0.06135507202148437, 0.061466720581054686, 0.06141996765136719, 0.06152163314819336, 0.061462432861328124, 0.061497440338134764, 0.06157174301147461, 0.06148099136352539, 0.06152601623535156, 0.0616734733581543, 0.06151887893676758, 0.06152495956420898, 0.061456382751464846, 0.06145228958129883, 0.06161407852172852, 0.061548545837402345, 0.06148710250854492, 0.0615997428894043, 0.061515777587890626, 0.061652992248535154, 0.06163455963134765, 0.061666561126708985, 0.06194384002685547, 0.06160047912597656, 0.06161724853515625, 0.06175625610351562, 0.061550785064697265, 0.06169792175292969, 0.06156083297729492, 0.06168345642089844, 0.06160960006713867, 0.06173267364501953, 0.06160380935668945, 0.061774593353271484, 0.0617083854675293, 0.06170009613037109, 0.06171139144897461, 0.06185420989990234, 0.06173539352416992, 0.06251919937133789, 0.061505535125732425, 0.06121267318725586, 0.06123436737060547, 0.06116022491455078, 0.06122662353515625, 0.06122124862670898, 0.061233184814453126, 0.0611328010559082, 0.061456382751464846, 0.06146662521362305, 0.06143385696411133, 0.06133964920043945, 0.061421119689941406, 0.06139539337158203, 0.061629856109619144, 0.06159606552124024, 0.0619747200012207, 0.06156697463989258, 0.06146047973632812, 0.061439998626708986, 0.06136217498779297, 0.06144966506958008, 0.061407806396484375, 0.06136012649536133, 0.0615362548828125, 0.06150143814086914, 0.06155782318115234, 0.06154931259155273, 0.06144838333129883, 0.06144217681884766, 0.061505664825439454, 0.06150896072387695, 0.061577632904052736, 0.06156288146972656, 0.06174105453491211, 0.0615280647277832, 0.061538303375244144, 0.06166527938842774, 0.06153817749023437, 0.06156816101074219, 0.0616478385925293, 0.06165481567382813, 0.061868255615234374, 0.06158678436279297, 0.06158812713623047, 0.061644001007080076, 0.0615819206237793, 0.06161324691772461, 0.06165401458740234, 0.06161103820800781, 0.06204630279541016, 0.06159369659423828, 0.06169270324707031, 0.0616734733581543, 0.06179545593261719, 0.061647743225097654, 0.06166473770141601, 0.06166751861572266, 0.06177609634399414, 0.06168345642089844, 0.06169334411621094, 0.061879199981689455, 0.062473377227783206, 0.0615219841003418, 0.061296577453613284, 0.06124848175048828, 0.06161145782470703, 0.061241920471191404, 0.06122700881958008, 0.061454334259033204, 0.06123519897460938, 0.06122716903686523, 0.061207649230957034, 0.06157779312133789, 0.06154438400268555, 0.06167372894287109, 0.06157459259033203, 0.06181740951538086, 0.06172252655029297, 0.061655136108398435, 0.061532352447509764, 0.0614766731262207, 0.06136214447021485, 0.06170163345336914, 0.06145283126831055, 0.061720577239990235, 0.061439998626708986, 0.06170364761352539, 0.061596351623535155, 0.06163987350463867, 0.06178678512573242, 0.06171968078613281, 0.06179280090332031, 0.061968257904052734, 0.06175923156738281, 0.06196406555175781, 0.061719486236572266, 0.06166527938842774, 0.06151987075805664, 0.061587455749511716, 0.06151113510131836, 0.061433601379394534, 0.0614197769165039, 0.061607521057128904, 0.06149801635742187, 0.06165436935424805, 0.06147782516479492, 0.06154444885253906, 0.0616036491394043, 0.06184364700317383, 0.06184550476074219, 0.06183321762084961, 0.06175948715209961, 0.061838497161865236, 0.061784671783447265, 0.06174054336547852, 0.06173567962646485, 0.06167552185058594, 0.061583518981933594, 0.06176054382324219, 0.06156371307373047, 0.0617589111328125, 0.06156041717529297, 0.06166207885742187, 0.06164080047607422, 0.06258070373535156, 0.0615464973449707, 0.061249568939208986, 0.06119366455078125, 0.061209152221679684, 0.061392478942871094, 0.06115164947509766, 0.0611610221862793, 0.0612479362487793, 0.06128163146972656, 0.06128844833374023, 0.0614546890258789, 0.06137478256225586, 0.06152601623535156, 0.06159273529052734, 0.06167228698730469, 0.061704193115234375, 0.06174105453491211, 0.061562271118164064, 0.0614815673828125, 0.061384288787841794, 0.0614661750793457, 0.06129699325561523, 0.06131558227539063, 0.06133350372314453, 0.061497119903564455, 0.06145561599731445, 0.061668319702148436, 0.061710334777832034, 0.06162227249145508, 0.061570144653320315, 0.061620800018310544, 0.061572769165039065, 0.06198860931396484, 0.061653377532958985, 0.061692192077636716, 0.06151200103759766, 0.061689823150634766, 0.06142748641967773, 0.06259062576293946, 0.06147052764892578, 0.06149388885498047, 0.0615324478149414, 0.061640544891357424, 0.06177587127685547, 0.061695999145507815, 0.061738815307617184, 0.06166473770141601, 0.06165385437011719, 0.061663326263427735, 0.061615745544433595, 0.06170435333251953, 0.06162192153930664, 0.06167958450317383, 0.06180204772949219, 0.06156780624389648, 0.0617611198425293, 0.06171279907226562, 0.06168524932861328, 0.061667839050292966, 0.061652992248535154, 0.06171977615356445, 0.06185859298706055, 0.06254256057739258, 0.061573089599609374, 0.061484382629394534, 0.06144480133056641, 0.06130201721191406, 0.061261886596679686, 0.06119196701049805, 0.061301025390625, 0.061284671783447264, 0.06135830307006836, 0.06136556625366211, 0.061528190612792966, 0.06151382446289062, 0.061514209747314454, 0.061620384216308596, 0.06161577606201172, 0.0615909423828125, 0.06166003036499024, 0.06149324798583984, 0.06156636810302735, 0.061596256256103515, 0.06168076705932617, 0.061352737426757814, 0.0613223991394043, 0.06145119857788086, 0.06149529647827148, 0.061538303375244144, 0.061685760498046874, 0.061468479156494144, 0.061572383880615235, 0.06160601425170899, 0.06160841751098633, 0.06155500793457031, 0.06168985748291016, 0.06152703857421875, 0.06159983825683594, 0.061508544921875, 0.06205641555786133, 0.06156697463989258, 0.06152339172363281, 0.061868606567382814, 0.061619998931884766, 0.06155027389526367, 0.061674015045166015, 0.061693984985351565, 0.06171839904785156, 0.06179235076904297, 0.06171136093139649, 0.06160819244384766, 0.06175126266479492, 0.06161692810058594, 0.061668575286865236, 0.06149193572998047, 0.06153631973266602, 0.0616275520324707, 0.06167228698730469, 0.06169107055664062, 0.061715103149414065, 0.061667488098144534, 0.061818878173828126, 0.061794303894042966, 0.06181228637695312, 0.06174873733520508, 0.062489822387695314, 0.06150835037231445, 0.061259777069091796, 0.06140518569946289, 0.06119833755493164, 0.06139871978759766, 0.06130720138549805, 0.06121638488769531, 0.06118950271606445, 0.06140959930419922, 0.061407936096191405, 0.06151126480102539, 0.06144435119628906, 0.06157670211791992, 0.06155107116699219, 0.061611774444580075, 0.06160579299926758, 0.061671966552734374, 0.06143590545654297, 0.06141132736206055, 0.06143791961669922, 0.06137971115112305, 0.06152489471435547, 0.06153964614868164, 0.0613741455078125, 0.06157823944091797, 0.06145843124389649, 0.06189215850830078, 0.061647296905517575, 0.06161612701416016, 0.061568702697753906, 0.06155401611328125, 0.061528190612792966, 0.06164566421508789, 0.0615280647277832, 0.06172230529785156, 0.06151119995117187, 0.061580062866210934, 0.06169968032836914, 0.0616346549987793, 0.06154595184326172, 0.061593887329101565, 0.061549121856689454, 0.061753345489501954, 0.06166892623901367, 0.06169440078735351, 0.06159312057495117, 0.06166150283813476, 0.061585662841796875, 0.06165862274169922, 0.06168364715576172, 0.061593536376953126, 0.06154089736938476, 0.06163983917236328, 0.06179283142089844, 0.06179459381103516, 0.061647903442382815, 0.06169411087036133, 0.06167219161987305, 0.06176361465454101, 0.06178976058959961, 0.06185007858276367, 0.061942977905273436, 0.062494720458984375, 0.061537025451660156, 0.06136848068237305, 0.06137452697753906, 0.061224960327148435, 0.06121376037597656, 0.06117881774902344, 0.06131257629394531, 0.061233665466308596, 0.061247425079345705, 0.06139289474487305, 0.06174105453491211, 0.06156083297729492, 0.06164879989624023, 0.061519809722900394, 0.06171254348754883, 0.06156691360473633, 0.061582462310791015, 0.0615167350769043, 0.06149955368041992, 0.06150332641601562, 0.06150348663330078, 0.06135772705078125, 0.06148729705810547, 0.06141763305664062, 0.061414463043212894, 0.06144044876098633, 0.06153574371337891, 0.0615392951965332, 0.061591358184814454, 0.06152556610107422, 0.06174582290649414, 0.06163008117675781, 0.06171891021728516, 0.06162214279174805, 0.061839073181152344, 0.06187868881225586, 0.06171648025512695, 0.06173081588745117, 0.06145843124389649, 0.06156492614746094, 0.061532161712646485, 0.06152326583862305, 0.06174745559692383, 0.061682113647460936, 0.0615813102722168, 0.06157075119018555, 0.06164092636108399, 0.06176502227783203, 0.06179411315917969, 0.061776351928710935, 0.06191145706176758, 0.061779617309570316, 0.06178863906860352, 0.06165804672241211, 0.061703102111816406, 0.06159283065795899, 0.061723392486572264, 0.06183686447143555, 0.06174550247192383, 0.06158755111694336, 0.06168166351318359, 0.061660606384277346, 0.06267039871215821, 0.061680065155029294, 0.06140108871459961, 0.06127974319458008, 0.06126233673095703, 0.06130483245849609, 0.0612044792175293, 0.06129654312133789, 0.06118204879760742, 0.06129180908203125, 0.06143356704711914, 0.06157183837890625, 0.061550304412841796, 0.06181942367553711, 0.061515777587890626, 0.062176639556884766, 0.061620864868164066, 0.06171852874755859, 0.06154415893554688, 0.06153788757324219, 0.06157571029663086, 0.06143401718139648, 0.06128044891357422, 0.06151353454589844, 0.0614005126953125, 0.061489727020263674, 0.06138409423828125, 0.0615081901550293, 0.06169190216064453, 0.06173286437988281, 0.06172876739501953, 0.06180044937133789, 0.06180454254150391, 0.061988319396972656, 0.06169977569580078, 0.061747390747070315, 0.061970718383789064, 0.061520256042480466, 0.06141952133178711, 0.06147686386108398, 0.061468128204345704, 0.06149343872070313, 0.061505889892578124, 0.061663230895996096, 0.06154444885253906, 0.06169606399536133, 0.061674625396728515, 0.0616927375793457, 0.061925376892089844, 0.06187007904052735, 0.06173286437988281, 0.061835262298583986, 0.06175129699707031, 0.061763072967529295, 0.0616473617553711, 0.061677566528320314, 0.06155059051513672, 0.06169804763793945, 0.06158143997192383, 0.06172227096557617, 0.06170345687866211, 0.062134273529052736, 0.061836223602294925]",tokens/s,16.240135408748113,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 891, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 823, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 374, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 253, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 117, in __init__ self.q_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 120.12 MiB is free. Process 128194 has 14.62 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 2.29 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1121, in __init__ self.embed_out = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 592.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 300.12 MiB is free. Process 195153 has 14.45 GiB memory in use. Of the allocated memory 14.33 GiB is allocated by PyTorch, and 1.52 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp067jjgdl/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp6j38625m/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,810.139648,14639.104,0.0,14243.856384,14221.3376,s,1,7.5382119140625,7.5382119140625,0.0,7.5382119140625,7.5382119140625,7.5382119140625,7.5382119140625,[7.5382119140625],,kWh,1.5008527887503457e-05,1.6481913225325745e-06,8.933062701999006e-06,2.558978191203504e-05,,MB,1123.958784,14735.572992,0.0,14329.839616,14290.688,s,10,14.040768432617188,1.4040768432617188,0.0048848589672792955,1.405280517578125,1.4091292724609374,1.4101729736328126,1.4110079345703126,"[1.3953970947265626, 1.399153076171875, 1.3992520751953126, 1.400876953125, 1.404310302734375, 1.4079962158203125, 1.40741796875, 1.4112166748046875, 1.4088973388671875, 1.406250732421875]",tokens/s,182.32620331897448,kWh,4.105210496041688e-05,4.5276012484695975e-06,2.716368839760005e-05,7.274339460648653e-05,tokens/kWh,3519219.873981142,MB,1152.958464,14750.253056,0.0,14344.51968,14290.69056,s,10,39.413060791015624,3.9413060791015617,0.003247624827919594,3.9413275146484374,3.943664404296875,3.946299365234375,3.948407333984375,"[3.9380703125, 3.93835595703125, 3.936633544921875, 3.941100830078125, 3.94066552734375, 3.94183056640625, 3.94155419921875, 3.942836669921875, 3.943078857421875, 3.948934326171875]",tokens/s,15.984548963109491,kWh,0.00011535337135291625,1.2724405187126664e-05,7.679317254560004e-05,0.00020487094908564293,tokens/kWh,307510.65625055454,,s,630,39.40928758239751,0.0625544247339642,0.0002290329818859967,0.06253827095031739,0.0628556354522705,0.06295140991210937,0.06317164352416993,"[0.06301465606689453, 0.062229217529296874, 0.062402591705322266, 0.062215137481689456, 0.06197545623779297, 0.06248233413696289, 0.062167102813720704, 0.06237936019897461, 0.06253241729736328, 0.06231763076782226, 0.062069793701171876, 0.062396289825439454, 0.06242675018310547, 0.06287807846069336, 0.06252073669433594, 0.06271958541870117, 0.062311424255371096, 0.06230534362792969, 0.06220233535766601, 0.062204383850097654, 0.061972415924072266, 0.0622490234375, 0.06250291061401367, 0.062382080078125, 0.06239846420288086, 0.06239641571044922, 0.062407936096191406, 0.062497535705566404, 0.06253321456909179, 0.0627347526550293, 0.06249676895141602, 0.06255820846557616, 0.06289801788330078, 0.06245977783203125, 0.062368030548095706, 0.06234294509887695, 0.062488800048828126, 0.06255193710327149, 0.062400192260742185, 0.06242566299438477, 0.06238422393798828, 0.06246377563476563, 0.06233283233642578, 0.06340208053588867, 0.0627231674194336, 0.06289497756958008, 0.06259478378295899, 0.06267526245117187, 0.06269132614135742, 0.06253772735595703, 0.06240256118774414, 0.06249628829956055, 0.062396030426025394, 0.0631099853515625, 0.06281798553466797, 0.06265478515625, 0.06256028747558594, 0.0625909423828125, 0.06259423828125, 0.06275360107421875, 0.0624189453125, 0.06265862274169921, 0.06274041748046875, 0.06332227325439453, 0.06269110488891602, 0.0620689582824707, 0.062261280059814454, 0.062285247802734374, 0.06233961486816406, 0.061960193634033205, 0.06228124618530274, 0.06250294494628907, 0.0627242546081543, 0.06233660888671875, 0.06266336059570313, 0.06259219360351563, 0.06235638427734375, 0.06241059112548828, 0.06237571334838867, 0.062286113739013674, 0.06242502212524414, 0.062356990814208986, 0.0627116470336914, 0.06240943908691406, 0.06232835388183594, 0.06231497573852539, 0.062282783508300785, 0.06221676635742188, 0.06266307067871094, 0.062443519592285154, 0.0624947509765625, 0.06279727935791016, 0.06278505706787109, 0.06251824188232422, 0.062410751342773435, 0.06253500747680664, 0.06250358581542968, 0.06235033416748047, 0.06241094589233399, 0.062329662322998046, 0.06229756927490234, 0.062376480102539066, 0.062328865051269534, 0.06246329498291016, 0.06270383834838868, 0.0628023681640625, 0.06274662399291993, 0.06262764739990234, 0.06250617599487304, 0.06235855865478516, 0.06241494369506836, 0.06248767852783203, 0.06250777435302735, 0.062912353515625, 0.06270582580566406, 0.06268313598632813, 0.06253263854980469, 0.06242201614379883, 0.06253769683837891, 0.06255599975585938, 0.06254784011840821, 0.06275305557250976, 0.0627856330871582, 0.0629466552734375, 0.06270601654052735, 0.0625539207458496, 0.06293852615356445, 0.0622372817993164, 0.06213014221191406, 0.062183521270751954, 0.06217932891845703, 0.0624202880859375, 0.06216099166870117, 0.06233967971801758, 0.06250086212158203, 0.06250627136230469, 0.0625650863647461, 0.062306304931640626, 0.06214041519165039, 0.06239231872558594, 0.06254991912841797, 0.06258899307250977, 0.06262543869018555, 0.06276729583740234, 0.06234857559204102, 0.06225603103637695, 0.06215238571166992, 0.06246368026733398, 0.06224140930175781, 0.06246604919433594, 0.062273536682128906, 0.06258892822265626, 0.06288544082641602, 0.062472640991210936, 0.06227305603027344, 0.06248291015625, 0.06254959869384766, 0.06262214279174805, 0.06241891098022461, 0.06261759948730469, 0.06264774322509765, 0.06248883056640625, 0.06232633590698242, 0.062416831970214845, 0.06231865692138672, 0.06256921768188477, 0.062316574096679685, 0.06247222518920898, 0.0625458869934082, 0.06247529602050781, 0.06251820755004883, 0.0627094383239746, 0.06255791854858399, 0.06266684722900391, 0.06259107208251953, 0.06271590423583985, 0.0625115852355957, 0.06242720031738281, 0.062319583892822265, 0.06252022552490234, 0.06263001632690429, 0.06289395141601563, 0.06253158569335937, 0.06262707138061524, 0.06262246322631836, 0.06255324935913086, 0.062499679565429685, 0.06261350250244141, 0.06258483123779297, 0.0634106559753418, 0.06242652893066406, 0.06216953659057617, 0.062212032318115236, 0.062185630798339844, 0.06234511947631836, 0.062220287322998044, 0.06227507019042969, 0.06251718521118164, 0.062281982421875, 0.06216633605957031, 0.06251417541503906, 0.06242230224609375, 0.06253846359252929, 0.062394367218017575, 0.06223388671875, 0.062405441284179686, 0.06263596725463867, 0.06250086212158203, 0.06238819122314453, 0.06207078552246094, 0.062227680206298826, 0.06280047988891602, 0.06242700958251953, 0.06267939376831054, 0.06247366333007812, 0.06235087966918945, 0.06262790298461914, 0.06261238479614258, 0.06269286346435547, 0.06244972610473633, 0.062419296264648434, 0.0627276496887207, 0.06273500823974609, 0.06277072143554688, 0.06293532943725585, 0.06271811294555664, 0.062312095642089844, 0.062371711730957034, 0.06251772689819336, 0.06245587158203125, 0.06246118545532227, 0.06244966506958008, 0.0627207374572754, 0.06277228927612305, 0.06295641708374024, 0.062644287109375, 0.06251484680175781, 0.06247663879394531, 0.06255516815185547, 0.06256940841674805, 0.06266652679443359, 0.062486785888671875, 0.06263391876220703, 0.06278729629516601, 0.06276335906982422, 0.06284297561645508, 0.06266979217529296, 0.06253577423095703, 0.06285523223876953, 0.06299523162841797, 0.06285286331176758, 0.06290422439575195, 0.06305772781372071, 0.06225526428222656, 0.062169281005859375, 0.06220800018310547, 0.062195358276367185, 0.062406368255615234, 0.06218937683105469, 0.06230307388305664, 0.0623595199584961, 0.062455329895019535, 0.062519775390625, 0.06225823974609375, 0.062292831420898434, 0.06233715057373047, 0.062488544464111326, 0.06287062454223633, 0.06252803039550782, 0.06259955215454102, 0.06234112167358399, 0.062304031372070315, 0.062333152770996096, 0.06255820846557616, 0.062216159820556644, 0.06237392044067383, 0.06270083236694336, 0.06277548980712891, 0.06260377502441407, 0.06266643142700196, 0.06256880187988281, 0.06241689682006836, 0.062470142364501956, 0.06276300811767578, 0.06253673553466797, 0.062388225555419924, 0.06237216186523437, 0.06257251358032226, 0.06256438446044922, 0.06264284896850586, 0.06272735977172851, 0.0626102409362793, 0.0626684799194336, 0.06266502380371093, 0.06284672164916992, 0.06243318557739258, 0.06276108932495117, 0.062619873046875, 0.06270115280151367, 0.06265078353881835, 0.062437374114990236, 0.06260265731811523, 0.062473857879638675, 0.06240262222290039, 0.06235023880004883, 0.06258470535278321, 0.06271603012084961, 0.06295142364501953, 0.06271753692626954, 0.06297027206420898, 0.06266060638427734, 0.06264012908935547, 0.06292835235595703, 0.0626849594116211, 0.06275513458251954, 0.0631978874206543, 0.062252639770507816, 0.06214083099365234, 0.06196364974975586, 0.06255007934570313, 0.062578369140625, 0.062434177398681644, 0.06225715255737305, 0.06245580673217774, 0.06233695983886719, 0.062390335083007814, 0.062473342895507815, 0.06230499267578125, 0.06231260681152344, 0.06221619033813477, 0.06243260955810547, 0.06287635040283203, 0.06271996688842774, 0.06230220794677734, 0.06239004898071289, 0.062288127899169925, 0.06266249465942383, 0.06271603012084961, 0.06234521484375, 0.062416927337646484, 0.0625656967163086, 0.06255683135986329, 0.06266470336914062, 0.06252044677734375, 0.06263897705078125, 0.06251849746704101, 0.06256105422973633, 0.06273843383789063, 0.06262086486816407, 0.06225132751464844, 0.06250342559814454, 0.06252748870849609, 0.06261356735229492, 0.06278953552246094, 0.0625316162109375, 0.06254182434082031, 0.06255785751342774, 0.06260976028442383, 0.0626319351196289, 0.06247219085693359, 0.06250495910644531, 0.06271491241455078, 0.06287980651855468, 0.06284281539916992, 0.06256534576416016, 0.06244895935058594, 0.062576416015625, 0.06257977676391602, 0.06276889419555665, 0.06302278518676757, 0.0627729606628418, 0.0628037109375, 0.06280239868164063, 0.06281036758422852, 0.06300227355957032, 0.06278204727172852, 0.0626769905090332, 0.06248239898681641, 0.06316787338256837, 0.06242889785766602, 0.06221030426025391, 0.062058368682861326, 0.06238063812255859, 0.06222652816772461, 0.062117889404296876, 0.06243673706054687, 0.0625814094543457, 0.06250492858886719, 0.0627732810974121, 0.062349281311035155, 0.06234454345703125, 0.06228044891357422, 0.06233216094970703, 0.06267337417602539, 0.06269318389892578, 0.06286374282836914, 0.06248448181152344, 0.062339038848876954, 0.062426944732666016, 0.06212944030761719, 0.06226220703125, 0.062386207580566407, 0.06247011184692383, 0.06273833465576172, 0.06254131317138673, 0.06270550537109375, 0.06250576019287109, 0.06247625732421875, 0.06254771041870118, 0.06278374481201172, 0.0625860481262207, 0.06286214447021485, 0.06241628646850586, 0.06263868713378906, 0.06245775985717773, 0.06260528182983398, 0.0625830078125, 0.06261955261230469, 0.06253363037109375, 0.06256633758544922, 0.062475841522216795, 0.0625558090209961, 0.0625590705871582, 0.0626770896911621, 0.06266668701171875, 0.06276006317138672, 0.06269369506835938, 0.06278355026245117, 0.06251375961303711, 0.06251660919189453, 0.06242969512939453, 0.06260943984985351, 0.0630123519897461, 0.0627163848876953, 0.06255759811401367, 0.06290697479248047, 0.06272819137573242, 0.0628592643737793, 0.062740478515625, 0.06270361709594727, 0.06262979125976563, 0.06318255996704102, 0.062269599914550784, 0.06216511917114258, 0.062209022521972655, 0.06236656188964844, 0.0624901123046875, 0.06224137496948242, 0.06230227279663086, 0.0625541114807129, 0.0627276496887207, 0.062277568817138675, 0.06256646347045898, 0.06241535949707031, 0.06251087951660156, 0.06259875106811523, 0.06269161605834961, 0.06253948974609375, 0.06244572830200195, 0.06253807830810547, 0.06231670379638672, 0.062205951690673826, 0.06230809783935547, 0.06222441482543945, 0.06222396850585937, 0.06266329574584961, 0.0627691535949707, 0.06263603210449219, 0.0628326416015625, 0.06250675201416016, 0.06260268783569337, 0.06282118225097656, 0.06252953720092773, 0.06229196929931641, 0.06249679946899414, 0.06246806335449219, 0.06257664108276367, 0.062349342346191404, 0.062487583160400394, 0.062446529388427735, 0.06250230407714844, 0.06258544158935547, 0.06250291061401367, 0.06247622299194336, 0.06269343948364257, 0.06265651321411132, 0.06283065414428711, 0.06296979141235351, 0.06273027038574219, 0.06263804626464843, 0.0625885124206543, 0.06251356887817383, 0.06255401611328125, 0.062304031372070315, 0.06287308883666992, 0.06284371185302734, 0.06287360000610352, 0.06291254425048828, 0.06288927841186523, 0.06267878341674804, 0.06300969696044922, 0.06287926483154296, 0.06291215896606445, 0.06317318344116211, 0.06300243377685547, 0.0621956787109375, 0.062082942962646485, 0.062284286499023435, 0.062397823333740235, 0.06235529708862304, 0.06255081558227539, 0.06257459259033203, 0.062467838287353514, 0.06260966491699219, 0.06244147109985351, 0.062281726837158206, 0.06223427200317383, 0.062494686126708984, 0.06245974349975586, 0.06258848190307617, 0.06235030364990234, 0.06236735916137695, 0.06239401626586914, 0.062362335205078126, 0.062255104064941405, 0.06236511993408203, 0.06247436904907227, 0.06276959991455078, 0.06257254409790039, 0.06281126403808594, 0.0627534065246582, 0.0625973777770996, 0.06246745681762695, 0.06252108764648437, 0.062473087310791015, 0.06248432159423828, 0.06224297714233398, 0.06265856170654296, 0.06258480072021484, 0.06259238433837891, 0.06244419097900391, 0.06264012908935547, 0.06264403152465821, 0.06293318557739258, 0.06257600021362304, 0.06259779357910156, 0.06295139312744141, 0.06298009490966797, 0.06270361709594727, 0.062814208984375, 0.06276451110839844, 0.06250345611572265, 0.06233705520629883, 0.0624964485168457, 0.06267516708374024, 0.06274208068847656, 0.06265292739868164, 0.06295062255859375, 0.06288668823242187, 0.06278963088989258, 0.06269337463378906, 0.0628056640625, 0.06295695877075196, 0.06284384155273437, 0.06281795120239257, 0.06277155303955079, 0.06258483123779297, 0.06310518264770508, 0.06265372848510742, 0.06231711959838867, 0.06251929473876953, 0.062287200927734376, 0.06242355346679687, 0.062416702270507815, 0.06248483276367187, 0.062740478515625, 0.06252953720092773, 0.06231449508666992, 0.06223180770874023, 0.06230435180664062, 0.0627344970703125, 0.06274054336547852, 0.06266518402099609, 0.0627421760559082, 0.06252988815307617, 0.06260451126098633, 0.062488414764404296, 0.06236662292480469, 0.062422271728515624, 0.06280476760864258, 0.06304111862182617, 0.06304764938354492, 0.06302102279663085, 0.0627204475402832, 0.062461952209472656, 0.06250086212158203, 0.06234483337402344, 0.06247257614135742, 0.06258428955078126, 0.06279363250732421, 0.06295971298217773, 0.0624686393737793, 0.06277088165283203, 0.06257430267333984, 0.0625547218322754, 0.0627476806640625, 0.06265472030639649, 0.06250979232788086, 0.06255567932128907, 0.06261759948730469, 0.06296134567260742, 0.06260406494140625, 0.06279116821289063, 0.06278927993774414, 0.06263040161132813, 0.0630071029663086, 0.06263123321533202, 0.06268175888061524, 0.06276265716552734, 0.06282070541381836, 0.06276857757568359, 0.06290899276733398, 0.06280944061279296, 0.06292752075195313, 0.06318694305419922, 0.06268928146362304, 0.06277260971069336, 0.06304412841796875, 0.06288790512084962, 0.06301507186889649]",tokens/s,15.986079390112998,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1219, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1020, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 737, in forward self_attn_output, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 655, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 461, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 353, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 154878 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1235, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1037, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 763, in forward attn_outputs, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 557, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpv4ht1_zd/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1174, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 894, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 507, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 436, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1219, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1020, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 737, in forward self_attn_output, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 655, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 171470 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 270, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 85998 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 905.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 73947 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpak6ibufh/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 1248, in __init__ self.transformer = FalconModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 973, in __init__ self.h = nn.ModuleList([FalconDecoderLayer(config, layer_idx=i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 973, in self.h = nn.ModuleList([FalconDecoderLayer(config, layer_idx=i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 721, in __init__ self.self_attention = FALCON_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 587, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 366, in __init__ self.query_key_value = FalconLinear(self.hidden_size, qkv_out_dim, bias=config.bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 450.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 424.12 MiB is free. Process 204812 has 14.32 GiB memory in use. Of the allocated memory 14.20 GiB is allocated by PyTorch, and 6.16 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1039, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 816, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 551, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 384, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 1195, in __init__ self.model = MixtralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in __init__ [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 757, in __init__ self.block_sparse_moe = MixtralSparseMoeBlock(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in __init__ self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 672, in __init__ self.w3 = nn.Linear(self.hidden_dim, self.ffn_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 100.12 MiB is free. Process 181098 has 14.64 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 49.54 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 891, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 823, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 374, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 253, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 116, in __init__ self.v_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 125389 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 3.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1262, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1030, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1030, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 797, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 492, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 402, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 82871 has 14.73 GiB memory in use. Of the allocated memory 12.27 GiB is allocated by PyTorch, and 2.34 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 23262 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 718, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 176.12 MiB is free. Process 47651 has 14.57 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmptuhnrs2t/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpz3z_5brg/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 270, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 70957 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 905.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 234.12 MiB is free. Process 168255 has 14.51 GiB memory in use. Of the allocated memory 14.39 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 362.12 MiB is free. Process 174447 has 14.38 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 1.78 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 54.12 MiB is free. Process 76987 has 14.69 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 1.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 272, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 406.12 MiB is free. Process 79981 has 14.34 GiB memory in use. Of the allocated memory 14.23 GiB is allocated by PyTorch, and 1.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp4u1i827u/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 344.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 66.12 MiB is free. Process 160717 has 14.67 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 1.71 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 461, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 354, in __init__ self.v_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 157740 has 14.74 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 90.39 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 461, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 353, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 165352 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpem1mf6ks/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 717, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 188.12 MiB is free. Process 39628 has 14.55 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.45 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 1195, in __init__ self.model = MixtralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in __init__ [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 755, in __init__ self.self_attn = MIXTRAL_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 349, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 72.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 38.12 MiB is free. Process 184129 has 14.70 GiB memory in use. Of the allocated memory 14.55 GiB is allocated by PyTorch, and 41.65 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 510, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 280, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 201, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 96856 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 891, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 823, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 374, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 253, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 116, in __init__ self.v_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 122630 has 14.73 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 3.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 54.12 MiB is free. Process 88995 has 14.69 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 1.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp8s6lot5x/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 990, in __init__ self.model = GemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 775, in __init__ [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 775, in [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 565, in __init__ self.mlp = GemmaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 140, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 139821 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1234, in __init__ self.transformer = DbrxModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1009, in __init__ self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1009, in self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 788, in __init__ self.ffn = DbrxFFN(config=config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 765, in __init__ self.experts = DbrxExperts( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 704, in __init__ self.mlp = DbrxExpertGLU( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 682, in __init__ self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 111317 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1133, in __init__ self.model = StableLmModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 914, in __init__ [StableLmDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 914, in [StableLmDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 688, in __init__ self.self_attn = ATTENTION_CLASSES[config._attn_implementation](config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 572, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 339, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.use_qkv_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 14.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 191095 has 14.73 GiB memory in use. Of the allocated memory 14.54 GiB is allocated by PyTorch, and 78.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 743, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 368, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 175, in __init__ self.dense = nn.Linear(config.hidden_size, config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 42533 has 14.73 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 20.86 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp7vf5wkz8/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 362.12 MiB is free. Process 505245 has 14.38 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 1.78 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 289, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 106.12 MiB is free. Process 37531 has 14.63 GiB memory in use. Of the allocated memory 14.52 GiB is allocated by PyTorch, and 1.52 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.62 GiB. GPU 0 has a total capacity of 14.74 GiB of which 470.12 MiB is free. Process 34193 has 14.28 GiB memory in use. Of the allocated memory 14.16 GiB is allocated by PyTorch, and 1.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 234.12 MiB is free. Process 500263 has 14.51 GiB memory in use. Of the allocated memory 14.39 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 289, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 106.12 MiB is free. Process 44799 has 14.63 GiB memory in use. Of the allocated memory 14.52 GiB is allocated by PyTorch, and 1.52 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 896.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 598.12 MiB is free. Process 504428 has 14.15 GiB memory in use. Of the allocated memory 14.04 GiB is allocated by PyTorch, and 1.81 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 289, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 106.12 MiB is free. Process 27185 has 14.63 GiB memory in use. Of the allocated memory 14.52 GiB is allocated by PyTorch, and 1.52 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1118, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 504016 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 466, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 357, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 497998 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 466, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 357, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 497578 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 234.12 MiB is free. Process 499178 has 14.51 GiB memory in use. Of the allocated memory 14.39 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 357, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 496799 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 43935 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 287, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 150.12 MiB is free. Process 483086 has 14.59 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.43 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.62 GiB. GPU 0 has a total capacity of 14.74 GiB of which 470.12 MiB is free. Process 34623 has 14.28 GiB memory in use. Of the allocated memory 14.16 GiB is allocated by PyTorch, and 1.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.62 GiB. GPU 0 has a total capacity of 14.74 GiB of which 470.12 MiB is free. Process 32833 has 14.28 GiB memory in use. Of the allocated memory 14.16 GiB is allocated by PyTorch, and 1.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 362.12 MiB is free. Process 506027 has 14.38 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 1.78 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 362.12 MiB is free. Process 506767 has 14.38 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 1.78 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 28188 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 39491 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 357, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 495666 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 38974 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 38078 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 234.12 MiB is free. Process 501045 has 14.51 GiB memory in use. Of the allocated memory 14.39 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 896.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 342.12 MiB is free. Process 498393 has 14.40 GiB memory in use. Of the allocated memory 14.29 GiB is allocated by PyTorch, and 1.75 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 29069 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.72416,13880.918016,0.0,13478.395904,13476.849152,s,1,7.63624560546875,7.63624560546875,0.0,7.63624560546875,7.63624560546875,7.63624560546875,7.63624560546875,[7.63624560546875],,kWh,8.652274633413981e-06,9.469554864099241e-07,4.83083719801658e-06,1.4430067317840486e-05,,MB,1194.377216,14115.79904,0.0,13702.791168,13671.637504,s,10,12.496803100585936,1.2496803100585938,0.004194732782403974,1.2506327514648437,1.2536902221679689,1.2544726135253907,1.2550985266113281,"[1.2408824462890624, 1.2439547119140626, 1.247958740234375, 1.2495574951171875, 1.249542236328125, 1.252339111328125, 1.2520889892578124, 1.2517080078125, 1.253516357421875, 1.2552550048828126]",tokens/s,204.85239139920267,kWh,3.6487713447495484e-05,4.0241239682494765e-06,2.418493601459848e-05,6.469677343034344e-05,tokens/kWh,3956920.668936071,MB,1242.394624,14115.79904,0.0,13702.791168,13671.640064,s,10,37.76328686523438,3.7763286865234376,0.002612174445611486,3.7771662597656253,3.7790798828125,3.779771142578125,3.780324150390625,"[3.77293896484375, 3.78046240234375, 3.7727353515625, 3.7776298828125, 3.77816455078125, 3.77892626953125, 3.77426513671875, 3.77794677734375, 3.77670263671875, 3.773514892578125]",tokens/s,16.68286985315334,kWh,0.00011030082674042053,1.2167050641991942e-05,7.334400311960198e-05,0.00019581188050201443,tokens/kWh,321737.37282172666,,s,630,37.75975438308714,0.05993611806839231,0.00027505089202098067,0.05992228889465332,0.06021547660827637,0.06028717460632324,0.06127072952270508,"[0.061268638610839844, 0.059604385375976565, 0.05971177673339844, 0.05959532928466797, 0.05959619140625, 0.059523681640625, 0.059412353515625, 0.059506847381591794, 0.05935254287719727, 0.059582271575927735, 0.05988832092285156, 0.05979862213134766, 0.05962435150146484, 0.05953740692138672, 0.05957632064819336, 0.05958364868164062, 0.05983113479614258, 0.05964380645751953, 0.05954159927368164, 0.05971148681640625, 0.05959491348266602, 0.05965619277954102, 0.059776863098144534, 0.05985657501220703, 0.059730239868164066, 0.059789310455322264, 0.0598238410949707, 0.06010809707641602, 0.06022243118286133, 0.05987139129638672, 0.05983583831787109, 0.05979331207275391, 0.059912960052490236, 0.059784961700439454, 0.05981184005737305, 0.05975417709350586, 0.05971366500854492, 0.05971955108642578, 0.05971564865112305, 0.060125438690185544, 0.05982633590698242, 0.05999705505371094, 0.05992950439453125, 0.060058719635009764, 0.06001968002319336, 0.06011084747314453, 0.05998553466796875, 0.05995148849487305, 0.05995872116088867, 0.06025068664550781, 0.06004537582397461, 0.06000617599487305, 0.06002908706665039, 0.06013132858276367, 0.06010675048828125, 0.060085823059082034, 0.06015423965454102, 0.06011500930786133, 0.0603770866394043, 0.06023987197875977, 0.06022348785400391, 0.06037299346923828, 0.06012518310546875, 0.06136214447021485, 0.05978521728515625, 0.05958860778808594, 0.05958038330078125, 0.05950387191772461, 0.05991465759277344, 0.05966400146484375, 0.05963443374633789, 0.05982620620727539, 0.0596475830078125, 0.05964223861694336, 0.05967871856689453, 0.05976614379882812, 0.05982886505126953, 0.05991427230834961, 0.06004323196411133, 0.06020915222167969, 0.06025363159179688, 0.060028736114501956, 0.05992060852050781, 0.05997795104980469, 0.05984185409545899, 0.05995008087158203, 0.059692096710205075, 0.060023391723632816, 0.0598694076538086, 0.0597525749206543, 0.05982822418212891, 0.05991584014892578, 0.06002435302734375, 0.06010502243041992, 0.060219135284423825, 0.060305950164794925, 0.060139873504638675, 0.06011081695556641, 0.0600184326171875, 0.059977535247802735, 0.0598974723815918, 0.059824512481689456, 0.05979795074462891, 0.059873279571533204, 0.0599285774230957, 0.05994208145141602, 0.05992736053466797, 0.06007398223876953, 0.06004326248168945, 0.060020736694335934, 0.06015097427368164, 0.060095294952392575, 0.06032179260253906, 0.060139041900634765, 0.06013180923461914, 0.06021324920654297, 0.0602088623046875, 0.05999407958984375, 0.05999849700927734, 0.06017846298217774, 0.0603873291015625, 0.06017433547973633, 0.06016204833984375, 0.060295169830322265, 0.060368064880371095, 0.06041273498535156, 0.06114681625366211, 0.05976505661010742, 0.05951897430419922, 0.05948604965209961, 0.059545566558837894, 0.05962361526489258, 0.05950678253173828, 0.05953116989135742, 0.059627521514892576, 0.05984460830688477, 0.05956332778930664, 0.05972377777099609, 0.059779232025146484, 0.05970793533325195, 0.05980364990234375, 0.05995100784301758, 0.05994915390014648, 0.060017982482910154, 0.05983097457885742, 0.059875167846679685, 0.05971574401855469, 0.05971286392211914, 0.059652767181396484, 0.05988943862915039, 0.05977724838256836, 0.05967462539672851, 0.05972323226928711, 0.05977724838256836, 0.059998207092285157, 0.05985721588134765, 0.05970534515380859, 0.05976883316040039, 0.059967201232910154, 0.059822208404541014, 0.059691169738769534, 0.05973603057861328, 0.0596910400390625, 0.05993212890625, 0.059799808502197266, 0.05997715377807617, 0.05987929534912109, 0.059915233612060546, 0.05987737655639649, 0.05993881607055664, 0.060006401062011716, 0.060098560333251956, 0.06008185577392578, 0.06005110549926758, 0.05995792007446289, 0.06010265731811523, 0.059925983428955075, 0.06028752136230469, 0.06036633682250977, 0.06025862503051758, 0.060131519317626954, 0.060219390869140625, 0.06003507232666016, 0.059921470642089844, 0.05987593460083008, 0.060003776550292966, 0.05994956970214844, 0.05996502304077148, 0.05991916656494141, 0.0612715835571289, 0.059820926666259766, 0.05950672149658203, 0.059510784149169924, 0.05967452621459961, 0.059670623779296876, 0.059698528289794925, 0.05968864059448242, 0.05954684829711914, 0.05962931060791016, 0.0595865592956543, 0.05971696090698242, 0.05992515182495117, 0.06005724716186524, 0.05987772750854492, 0.05998796844482422, 0.06045692825317383, 0.06023299026489258, 0.06002150344848633, 0.05989990234375, 0.059703296661376956, 0.05979750442504883, 0.059660289764404295, 0.05979084777832031, 0.05968537521362305, 0.059545600891113284, 0.05956169509887695, 0.05979721450805664, 0.05981769561767578, 0.05991715240478516, 0.059850753784179686, 0.0598006706237793, 0.05996636962890625, 0.05998960113525391, 0.059828639984130856, 0.05999951934814453, 0.060049728393554686, 0.060010433197021484, 0.05981753540039063, 0.06000291061401367, 0.059990367889404296, 0.059985889434814456, 0.059842559814453126, 0.0599552001953125, 0.06005350494384765, 0.06003683090209961, 0.0600865592956543, 0.060098560333251956, 0.06020070266723633, 0.06028889465332031, 0.06021567916870117, 0.06031478500366211, 0.06024441528320312, 0.060168609619140626, 0.060020030975341795, 0.06007878494262695, 0.06008627319335937, 0.06012259292602539, 0.06007635116577149, 0.060117057800292965, 0.0601888313293457, 0.06036275100708008, 0.06032992172241211, 0.060935649871826175, 0.05974476623535156, 0.05962361526489258, 0.059538814544677736, 0.05966447830200195, 0.0596478385925293, 0.05960950469970703, 0.059654239654541016, 0.06007807922363281, 0.05972787094116211, 0.05976876831054687, 0.05978860855102539, 0.05975936126708985, 0.05985248184204101, 0.05991561508178711, 0.059948001861572266, 0.06016748809814453, 0.060213951110839846, 0.060028926849365234, 0.059998207092285157, 0.05988761520385742, 0.05976870346069336, 0.05975052642822266, 0.05970534515380859, 0.059796703338623046, 0.0599068489074707, 0.059870559692382815, 0.0597911376953125, 0.05972851181030273, 0.05982182312011719, 0.05995270538330078, 0.05990636825561523, 0.06009299087524414, 0.059950462341308595, 0.05992240142822266, 0.06001260757446289, 0.05981455993652344, 0.0599183349609375, 0.059848705291748044, 0.05973334503173828, 0.0598936653137207, 0.060033790588378905, 0.060014591217041016, 0.060002079010009764, 0.05992217636108398, 0.06008838272094726, 0.06004134368896484, 0.06011910247802734, 0.06011312103271484, 0.06023068618774414, 0.060142559051513673, 0.06008358383178711, 0.06003366470336914, 0.060186622619628906, 0.06019705581665039, 0.06009222412109375, 0.06028044891357422, 0.06034675216674805, 0.0600002555847168, 0.06010060882568359, 0.060610401153564454, 0.0602154541015625, 0.06020204925537109, 0.061503391265869144, 0.059935073852539066, 0.05967052841186524, 0.05973535919189453, 0.0595299186706543, 0.05978656005859375, 0.059577022552490234, 0.05965619277954102, 0.059776512145996094, 0.059684703826904294, 0.05967043304443359, 0.05975465774536133, 0.05965679931640625, 0.05978316879272461, 0.05969510269165039, 0.060039169311523435, 0.059850753784179686, 0.0598853759765625, 0.06001887893676758, 0.0597064323425293, 0.05980051040649414, 0.059661952972412106, 0.05997401428222656, 0.05987942504882812, 0.05972582244873047, 0.05998591995239258, 0.05983027267456055, 0.06010060882568359, 0.0600797119140625, 0.06006211090087891, 0.06010403060913086, 0.05999068832397461, 0.06017433547973633, 0.05995305633544922, 0.06005129623413086, 0.06000870513916016, 0.05991769790649414, 0.05988415908813476, 0.05999734497070312, 0.060002464294433594, 0.05995792007446289, 0.05987129592895508, 0.05991526412963867, 0.06013363265991211, 0.06006592178344727, 0.06014569473266602, 0.06026911926269531, 0.0600715217590332, 0.06021692657470703, 0.06024265670776367, 0.06030960083007812, 0.06007129669189453, 0.060127487182617186, 0.06016851043701172, 0.05999116897583008, 0.05991238403320313, 0.05996620941162109, 0.06013132858276367, 0.06028035354614258, 0.060168670654296874, 0.06022553634643555, 0.06015385437011719, 0.060082176208496096, 0.06163455963134765, 0.05991360092163086, 0.059627521514892576, 0.059763328552246094, 0.059657791137695315, 0.059875457763671876, 0.05972198486328125, 0.05959481430053711, 0.05953046417236328, 0.059593505859375, 0.059568126678466796, 0.05957222366333008, 0.059582462310791014, 0.0597891845703125, 0.05971571350097656, 0.0599653434753418, 0.06010192108154297, 0.05990278244018555, 0.059715358734130856, 0.059717247009277344, 0.059629631042480466, 0.059619712829589847, 0.05972796630859375, 0.059649375915527346, 0.059768577575683594, 0.05973068618774414, 0.059619552612304685, 0.05968896102905273, 0.059617279052734375, 0.05967363357543945, 0.05979375839233399, 0.05990256118774414, 0.059946208953857424, 0.05986377716064453, 0.05980374526977539, 0.05989580917358398, 0.059815937042236325, 0.06005759811401367, 0.05976073455810547, 0.05986278533935547, 0.059863201141357424, 0.05962137603759766, 0.05974630355834961, 0.05971558380126953, 0.060082176208496096, 0.0599714241027832, 0.059898017883300785, 0.05997724914550781, 0.060139392852783205, 0.060264991760253905, 0.060149822235107425, 0.06014310455322266, 0.06020889663696289, 0.06014028930664062, 0.060098560333251956, 0.06018867111206055, 0.06015180969238281, 0.060267807006835934, 0.06022780990600586, 0.06023219299316406, 0.06026998519897461, 0.06029782485961914, 0.06027449417114258, 0.061284351348876956, 0.05977702331542969, 0.05962137603759766, 0.05951027297973633, 0.059580513000488285, 0.05959881591796875, 0.05959929656982422, 0.059905216217041014, 0.05966521453857422, 0.059825183868408204, 0.059697662353515625, 0.059842399597167965, 0.060025470733642575, 0.05982617568969727, 0.06000435256958008, 0.06012505722045899, 0.060151935577392575, 0.06008940887451172, 0.059908321380615234, 0.05996361541748047, 0.059959358215332034, 0.05993926239013672, 0.05970739364624023, 0.05983420944213867, 0.0597751350402832, 0.05986707305908203, 0.06004537582397461, 0.059983009338378905, 0.059947872161865236, 0.05989791870117187, 0.06000428771972656, 0.05989744186401367, 0.05984092712402344, 0.05995008087158203, 0.06026956939697266, 0.05993267059326172, 0.059815361022949216, 0.05979808044433594, 0.05969100952148437, 0.06016185760498047, 0.05976681518554688, 0.059950817108154295, 0.05980950546264648, 0.05986163330078125, 0.06008428955078125, 0.06022902297973633, 0.05999068832397461, 0.06002684783935547, 0.06012457656860352, 0.06012988662719727, 0.060102207183837894, 0.06000479888916015, 0.05999398422241211, 0.060026687622070314, 0.05994099044799805, 0.060087520599365236, 0.06000739288330078, 0.06028287887573242, 0.06010060882568359, 0.06008531188964844, 0.06025868988037109, 0.0601646728515625, 0.06028675079345703, 0.06133555221557617, 0.059732223510742186, 0.05965526580810547, 0.05955855941772461, 0.05950259017944336, 0.059714912414550785, 0.060082176208496096, 0.05950691223144531, 0.059593151092529294, 0.05950444793701172, 0.05975664138793945, 0.05977478408813477, 0.059705631256103515, 0.05975804901123047, 0.059703838348388674, 0.059658241271972653, 0.05974220657348633, 0.059805694580078124, 0.059811328887939455, 0.05976652908325195, 0.05983718490600586, 0.05984815979003906, 0.05990044784545898, 0.05995465469360352, 0.059908576965332035, 0.05994911956787109, 0.05982003021240234, 0.059998207092285157, 0.06001398468017578, 0.06004182434082031, 0.06006140899658203, 0.05995113754272461, 0.0598570556640625, 0.06002627182006836, 0.059909950256347655, 0.059927425384521484, 0.05978460693359375, 0.05995561599731445, 0.06004492950439453, 0.05993529510498047, 0.059907745361328125, 0.05989411163330078, 0.059983455657958984, 0.059885982513427735, 0.05986713409423828, 0.06010688018798828, 0.06011888122558594, 0.060094017028808594, 0.060130943298339845, 0.06035542297363281, 0.06016819381713867, 0.06017433547973633, 0.060071937561035155, 0.06003283309936523, 0.06009251022338867, 0.06013961410522461, 0.05998793411254883, 0.06024924850463867, 0.0600871696472168, 0.06031135940551758, 0.06003907012939453, 0.06004560089111328, 0.06018860626220703, 0.06143801498413086, 0.05994496154785156, 0.059584510803222655, 0.059633182525634765, 0.059519454956054686, 0.059656257629394534, 0.0595885124206543, 0.05966342544555664, 0.0596346549987793, 0.059666431427001954, 0.05960416030883789, 0.059644832611083984, 0.059592609405517576, 0.059860225677490234, 0.059942752838134765, 0.05964892959594727, 0.059957248687744144, 0.060061695098876954, 0.05979983901977539, 0.05968048095703125, 0.059617279052734375, 0.059594753265380856, 0.059805694580078124, 0.059889663696289064, 0.05982822418212891, 0.05986918258666992, 0.05988351821899414, 0.059901569366455076, 0.05999859237670899, 0.05984646224975586, 0.06002316665649414, 0.06002463912963867, 0.059918079376220706, 0.06017254257202148, 0.05996748733520508, 0.05992652893066406, 0.06029724884033203, 0.06000825500488281, 0.05979072189331055, 0.05968566513061523, 0.059676673889160155, 0.05968896102905273, 0.05963542556762695, 0.05984284973144531, 0.05981388854980469, 0.059754207611083986, 0.05995670318603516, 0.06014144134521485, 0.0601486701965332, 0.05995436859130859, 0.059869728088378905, 0.06008252716064453, 0.0600247688293457, 0.05998518371582031, 0.06006371307373047, 0.059896575927734376, 0.06000428771972656, 0.060282207489013674, 0.060136032104492185, 0.060004478454589845, 0.06000844955444336, 0.06001663970947266, 0.0599818229675293]",tokens/s,16.68443056086671,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 362.12 MiB is free. Process 504858 has 14.38 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 1.78 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 362.12 MiB is free. Process 507120 has 14.38 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 1.78 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 289, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 106.12 MiB is free. Process 43445 has 14.63 GiB memory in use. Of the allocated memory 14.52 GiB is allocated by PyTorch, and 1.52 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 46716 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 287, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 150.12 MiB is free. Process 488767 has 14.59 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.43 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 37072 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 289, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 106.12 MiB is free. Process 25816 has 14.63 GiB memory in use. Of the allocated memory 14.52 GiB is allocated by PyTorch, and 1.52 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 357, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 496038 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 28611 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 36632 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 896.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 342.12 MiB is free. Process 499531 has 14.40 GiB memory in use. Of the allocated memory 14.29 GiB is allocated by PyTorch, and 1.75 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 44367 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 26306 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 27710 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 289, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 3.25 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.42 GiB is free. Process 32290 has 13.32 GiB memory in use. Of the allocated memory 13.20 GiB is allocated by PyTorch, and 2.00 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.658624,13880.918016,0.0,13478.395904,13476.849152,s,1,7.4903740234375,7.4903740234375,0.0,7.4903740234375,7.4903740234375,7.4903740234375,7.4903740234375,[7.4903740234375],,kWh,8.544550874997489e-06,9.283715682611926e-07,4.641948157996234e-06,1.4114870601254915e-05,,MB,1294.6432,14115.79904,0.0,13700.694016,13671.637504,s,10,2.025188430786133,0.2025188430786133,0.0025492759523326495,0.20262025451660157,0.20595513610839844,0.206072607421875,0.20616658447265623,"[0.1978723907470703, 0.20619007873535156, 0.2000924530029297, 0.20497158813476563, 0.2059290313720703, 0.202415771484375, 0.20041363525390626, 0.20316012573242187, 0.20282473754882813, 0.20131861877441407]",tokens/s,1264.0799053973783,kWh,5.927953603833278e-06,6.537474860658716e-07,3.928730920760848e-06,1.0510432010659997e-05,tokens/kWh,24356753.34185665,MB,1337.91744,14117.896192,0.0,13702.791168,13671.640064,s,10,37.255335205078126,3.7255335205078124,0.0034284441509013623,3.72561181640625,3.728679321289062,3.730469543457031,3.7319017211914063,"[3.722262939453125, 3.72145654296875, 3.72214892578125, 3.7222705078125, 3.72393701171875, 3.732259765625, 3.72741455078125, 3.728281494140625, 3.728016845703125, 3.72728662109375]",tokens/s,16.910329662370806,kWh,0.00010890049058408778,1.2011787196485955e-05,7.230719117903517e-05,0.0001932194689596089,tokens/kWh,326054.09971999086,,s,630,37.252119171142574,0.05913034789070251,0.0005091308643201842,0.05905342483520508,0.059390202713012694,0.059551896667480474,0.06259809589385987,"[0.062271617889404295, 0.05935184097290039, 0.058875072479248045, 0.059009857177734375, 0.05880012893676758, 0.05887910461425781, 0.05886841583251953, 0.058930912017822266, 0.058764896392822265, 0.05874931335449219, 0.05873920059204102, 0.05889244842529297, 0.05898368072509766, 0.05899273681640625, 0.058979969024658206, 0.05914918518066406, 0.05961318588256836, 0.05957427215576172, 0.05928537750244141, 0.05934467315673828, 0.05888848114013672, 0.05888188934326172, 0.05882828903198242, 0.058923744201660154, 0.05890457534790039, 0.05894358444213867, 0.05922524642944336, 0.05902726364135742, 0.05893011093139648, 0.05899574279785156, 0.05886566543579102, 0.059091167449951174, 0.0593251838684082, 0.0594733772277832, 0.05930640029907226, 0.05908697509765625, 0.05912736129760742, 0.058970081329345704, 0.05879241561889648, 0.05897625732421875, 0.05905548858642578, 0.05890518569946289, 0.05898636627197266, 0.058914974212646486, 0.05908380889892578, 0.05910425567626953, 0.0590437126159668, 0.059187103271484375, 0.059369022369384766, 0.05939468765258789, 0.05918297576904297, 0.05908083343505859, 0.05902656173706055, 0.05902166366577148, 0.058939937591552735, 0.05892300796508789, 0.05894543838500976, 0.05889440155029297, 0.05887696075439453, 0.05887625503540039, 0.05879452896118164, 0.05880745697021485, 0.05887184143066406, 0.06310908889770508, 0.05978227233886719, 0.059061119079589844, 0.05896796798706055, 0.058978401184082034, 0.0587243537902832, 0.05881651306152344, 0.05880640029907227, 0.05903756713867187, 0.05893734359741211, 0.05875423812866211, 0.05877132797241211, 0.05873347091674805, 0.0587279052734375, 0.05868313598632813, 0.0591121597290039, 0.059281505584716794, 0.059487743377685545, 0.05910691070556641, 0.05891574478149414, 0.05881987380981445, 0.05907734298706055, 0.05887295913696289, 0.05881331253051758, 0.058759071350097655, 0.058766815185546876, 0.05872652816772461, 0.058726913452148435, 0.05913190460205078, 0.058740097045898436, 0.05896255874633789, 0.05885059356689453, 0.05902819061279297, 0.05952022552490235, 0.059247329711914064, 0.05904595184326172, 0.05901126480102539, 0.05892691040039062, 0.058877952575683595, 0.05914191818237305, 0.05879609680175781, 0.058808479309082035, 0.058947681427001956, 0.05906169509887695, 0.058999393463134764, 0.058757152557373044, 0.05905084609985352, 0.058856063842773435, 0.059050369262695315, 0.0591416015625, 0.059455455780029295, 0.05943558502197266, 0.05919334411621094, 0.05901526260375976, 0.05944435119628906, 0.05916665649414062, 0.05932060623168945, 0.05889043045043945, 0.059001216888427736, 0.05898767852783203, 0.058952606201171875, 0.05902876663208008, 0.058950145721435546, 0.06286537551879882, 0.05981388854980469, 0.05936246490478515, 0.05898121643066406, 0.05893737411499023, 0.058840286254882815, 0.0588520622253418, 0.05901081466674805, 0.05917670440673828, 0.059031105041503905, 0.05877008056640625, 0.05866339111328125, 0.05876531219482422, 0.05869689559936524, 0.058692001342773435, 0.05891916656494141, 0.05930163192749023, 0.05939388656616211, 0.05922447967529297, 0.059299808502197265, 0.058988574981689454, 0.059156478881835936, 0.05877872085571289, 0.05918377685546875, 0.058852767944335936, 0.05906464004516602, 0.058706462860107424, 0.05869363021850586, 0.058722305297851565, 0.059066368103027345, 0.05882262420654297, 0.05884735870361328, 0.05928249740600586, 0.05932310485839844, 0.059400127410888674, 0.05925225448608398, 0.059248382568359376, 0.058987422943115236, 0.059009025573730466, 0.05900815963745117, 0.05906927871704101, 0.05903564834594727, 0.059150047302246093, 0.05882230377197266, 0.05900275039672852, 0.05888691329956055, 0.058959873199462894, 0.059006847381591794, 0.05898854446411133, 0.05909926223754883, 0.05921308898925781, 0.059165409088134766, 0.05909078216552734, 0.059239711761474606, 0.05909388732910156, 0.05902534484863281, 0.05889030456542969, 0.058823776245117185, 0.058829601287841794, 0.05894137573242188, 0.05883027267456055, 0.0586855354309082, 0.05901174545288086, 0.06250905609130859, 0.0596049919128418, 0.05887753677368164, 0.058646751403808595, 0.05866310501098633, 0.058687488555908204, 0.05866495895385742, 0.05872617721557617, 0.05869728088378906, 0.05861443328857422, 0.058815967559814455, 0.058698272705078124, 0.05866889572143555, 0.058996894836425784, 0.05880831909179687, 0.058931232452392575, 0.05933587265014648, 0.05945219039916992, 0.05956361770629883, 0.059367839813232424, 0.059264801025390626, 0.05906060791015625, 0.05894707107543945, 0.05890079879760742, 0.058813919067382814, 0.058935871124267576, 0.05897235107421875, 0.058797569274902345, 0.05895609664916992, 0.05893734359741211, 0.059101184844970706, 0.05910528182983398, 0.05919728088378906, 0.059326526641845706, 0.05930198287963867, 0.05933606338500977, 0.05935299301147461, 0.05929443359375, 0.05919699096679688, 0.05896771240234375, 0.05894595336914062, 0.059004383087158205, 0.05879225540161133, 0.05874726486206055, 0.05893331146240234, 0.05894364929199219, 0.05882988739013672, 0.0588744010925293, 0.05918886566162109, 0.059259681701660155, 0.05920105743408203, 0.0591929931640625, 0.059496414184570315, 0.05932108688354492, 0.05910233688354492, 0.0589648323059082, 0.059104961395263673, 0.05891452789306641, 0.05891145706176758, 0.05912960052490234, 0.05918540954589844, 0.05893529510498047, 0.0589246711730957, 0.06236163330078125, 0.05955513763427735, 0.05916329574584961, 0.058829856872558595, 0.05879868698120117, 0.059063743591308594, 0.05896870422363281, 0.05923052978515625, 0.059112545013427734, 0.05892502212524414, 0.058846145629882815, 0.05873049545288086, 0.05899190521240234, 0.05882134246826172, 0.05875852966308594, 0.05895436859130859, 0.05967462539672851, 0.05973148727416992, 0.05946620941162109, 0.05925068664550781, 0.05914742279052734, 0.05906723022460938, 0.059082622528076174, 0.05881455993652344, 0.058676799774169924, 0.05874736022949219, 0.05902892684936523, 0.058910720825195315, 0.058829376220703125, 0.05877145767211914, 0.05868339157104492, 0.058803489685058594, 0.0592125129699707, 0.059328510284423826, 0.05927260971069336, 0.05928611373901367, 0.05914214324951172, 0.05892015838623047, 0.058732799530029293, 0.05870796966552734, 0.058716705322265625, 0.058734752655029296, 0.0587856330871582, 0.058816287994384764, 0.05910345458984375, 0.05892095947265625, 0.05903564834594727, 0.0592281608581543, 0.05916592025756836, 0.059164798736572266, 0.05921984100341797, 0.05946835327148438, 0.05933078384399414, 0.059100990295410154, 0.059315937042236325, 0.05921401596069336, 0.05922175979614258, 0.059083137512207035, 0.0590214729309082, 0.05904492950439453, 0.05930284881591797, 0.05910326385498047, 0.05905920028686523, 0.06284620666503907, 0.06000652694702149, 0.05929619216918945, 0.059080127716064454, 0.059001502990722654, 0.0589312973022461, 0.059545600891113284, 0.05902912139892578, 0.05904422378540039, 0.058996673583984374, 0.05895113754272461, 0.05906438446044922, 0.059284000396728515, 0.05912185668945313, 0.058957118988037106, 0.0592042236328125, 0.05956595230102539, 0.05973996734619141, 0.05950278472900391, 0.059289249420166015, 0.0591732177734375, 0.05899059295654297, 0.05902336120605469, 0.05911347198486328, 0.05902905654907226, 0.059057823181152345, 0.058927902221679686, 0.058931102752685545, 0.05897430419921875, 0.058940513610839844, 0.05917993545532227, 0.05907865524291992, 0.05943817520141602, 0.059513278961181644, 0.059576225280761716, 0.059568256378173826, 0.05923846435546875, 0.059196865081787106, 0.05931827163696289, 0.0592270736694336, 0.0591196174621582, 0.059055519104003903, 0.05907465744018554, 0.05912400054931641, 0.05904550552368164, 0.059097694396972655, 0.058861568450927736, 0.059082752227783204, 0.059109439849853514, 0.05923152160644531, 0.05938774490356445, 0.05926790237426758, 0.05926911926269531, 0.059165889739990235, 0.05914278411865234, 0.05912742233276367, 0.059216415405273434, 0.05916438293457031, 0.059172512054443356, 0.05913683319091797, 0.05912332916259765, 0.05900006484985352, 0.05905302429199219, 0.0630357437133789, 0.059897598266601564, 0.05921878433227539, 0.0589288330078125, 0.05901321411132812, 0.058865119934082034, 0.05887868881225586, 0.05880428695678711, 0.05884284973144531, 0.058743038177490235, 0.0587960319519043, 0.05871596908569336, 0.058714305877685544, 0.05867452621459961, 0.05865129470825195, 0.05911347198486328, 0.059365089416503904, 0.059547935485839844, 0.05931008148193359, 0.05928252792358398, 0.059046817779541017, 0.058945598602294924, 0.05895775985717774, 0.059086559295654296, 0.058913055419921874, 0.05897334289550781, 0.05925884628295899, 0.05911846542358398, 0.05910457611083984, 0.05902928161621094, 0.058960800170898435, 0.05884108734130859, 0.05899817657470703, 0.05936316680908203, 0.05938979339599609, 0.05920259094238281, 0.05930380630493164, 0.059268096923828124, 0.059276287078857424, 0.05930131149291992, 0.059103649139404295, 0.05915049743652344, 0.059187232971191404, 0.05931539154052735, 0.05907107162475586, 0.059015457153320315, 0.05911929702758789, 0.05902684783935547, 0.059069438934326174, 0.059268096923828124, 0.059439456939697266, 0.05925519943237305, 0.05918220901489258, 0.059167552947998046, 0.059094974517822266, 0.059019454956054686, 0.05924844741821289, 0.05904198455810547, 0.05905382537841797, 0.05902975845336914, 0.059149375915527345, 0.05912876892089844, 0.05913564682006836, 0.06263446426391601, 0.05959408187866211, 0.059027904510498046, 0.05873702239990235, 0.05905817413330078, 0.05905929565429688, 0.059052608489990235, 0.059111743927001956, 0.05896195220947266, 0.05897625732421875, 0.05912115097045898, 0.058922687530517576, 0.05916556930541992, 0.058875072479248045, 0.05895967864990234, 0.05933347320556641, 0.059719776153564455, 0.05948825454711914, 0.05937289428710937, 0.059203968048095704, 0.059178462982177736, 0.05910406494140625, 0.058926849365234374, 0.058947742462158205, 0.059135616302490236, 0.05916310501098633, 0.058934463500976565, 0.058927585601806644, 0.058863105773925783, 0.059145088195800784, 0.059160545349121095, 0.058845184326171876, 0.05912979125976563, 0.059344799041748046, 0.05922835159301758, 0.059166240692138675, 0.05959695816040039, 0.05918143844604492, 0.058974113464355465, 0.05883488082885742, 0.059082046508789066, 0.059079425811767575, 0.05915216064453125, 0.0588941421508789, 0.05893571090698242, 0.05880012893676758, 0.058979328155517576, 0.05910630416870117, 0.059238048553466795, 0.05922604751586914, 0.059203777313232425, 0.05940812683105469, 0.05940281677246094, 0.059451297760009764, 0.05922966384887695, 0.059111968994140625, 0.05895167922973633, 0.05912144088745117, 0.05904611206054688, 0.05899673461914062, 0.05919744110107422, 0.05920153427124023, 0.05898854446411133, 0.06284979248046875, 0.05976268768310547, 0.059230209350585934, 0.059031551361083984, 0.058933246612548826, 0.0590327033996582, 0.05900288009643555, 0.058962814331054686, 0.05902924728393555, 0.05912972640991211, 0.05899302291870117, 0.05887376022338867, 0.058706016540527345, 0.05899673461914062, 0.05878985595703125, 0.059047969818115234, 0.059367393493652346, 0.05962351989746094, 0.05959027099609375, 0.05922857666015625, 0.059033504486083986, 0.058932319641113284, 0.05890345764160156, 0.05887180709838867, 0.05886873626708984, 0.05889494323730469, 0.05902771377563477, 0.05900537490844727, 0.058899646759033204, 0.058890785217285156, 0.05894879913330078, 0.05923513412475586, 0.05924249649047852, 0.05942208099365234, 0.05928188705444336, 0.05932662582397461, 0.05924431991577148, 0.05923968124389648, 0.05907526397705078, 0.058931583404541014, 0.058857151031494144, 0.059093215942382815, 0.05906585693359375, 0.05892118453979492, 0.05910966491699219, 0.05917798233032227, 0.059136096954345706, 0.059132831573486325, 0.05931827163696289, 0.05954467010498047, 0.05936812973022461, 0.059453662872314454, 0.059399585723876956, 0.05918073654174805, 0.05912259292602539, 0.05897625732421875, 0.05921334457397461, 0.05900953674316406, 0.059015167236328124, 0.05895779037475586, 0.05896768188476562, 0.05902979278564453, 0.05917091369628906, 0.06271430587768555, 0.05971152114868164, 0.05905535888671875, 0.05887871932983398, 0.05881846237182617, 0.05894153594970703, 0.05893072128295898, 0.05899737548828125, 0.05885529708862305, 0.05885030364990235, 0.05889228820800781, 0.05883567810058594, 0.05896531295776367, 0.05882470321655273, 0.05892499160766602, 0.05929062271118164, 0.05949420928955078, 0.059502113342285154, 0.05934486389160156, 0.05934108734130859, 0.05895529556274414, 0.05878774261474609, 0.059063262939453125, 0.05905817413330078, 0.05895897674560547, 0.058892192840576174, 0.0589846076965332, 0.059195457458496095, 0.05913228988647461, 0.059185089111328124, 0.05914233779907226, 0.05929721450805664, 0.05931705474853516, 0.05938508987426758, 0.05917779159545898, 0.059361217498779296, 0.05907244873046875, 0.05908076858520508, 0.05922617721557617, 0.0591338882446289, 0.059129150390625, 0.058921375274658204, 0.05897244644165039, 0.05901295852661133, 0.0590849609375, 0.05916425704956055, 0.059103614807128904, 0.05897833633422851, 0.05901311874389648, 0.05919539260864258, 0.059477439880371095, 0.059421409606933595, 0.05916630554199219, 0.05906073760986328, 0.05902310562133789, 0.05901027297973633, 0.05903830337524414, 0.05910515213012695, 0.05905855941772461, 0.059209182739257814, 0.05936991882324219, 0.058928512573242185, 0.05897459030151367]",tokens/s,16.911789557680535,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 287, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 106.12 MiB is free. Process 502574 has 14.63 GiB memory in use. Of the allocated memory 14.52 GiB is allocated by PyTorch, and 1.52 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 270.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 98.12 MiB is free. Process 496408 has 14.64 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 1.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 362.12 MiB is free. Process 506389 has 14.38 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 1.78 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1118, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 502894 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 287, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 106.12 MiB is free. Process 501406 has 14.63 GiB memory in use. Of the allocated memory 14.52 GiB is allocated by PyTorch, and 1.52 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 289, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 3.25 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.42 GiB is free. Process 33730 has 13.32 GiB memory in use. Of the allocated memory 13.20 GiB is allocated by PyTorch, and 2.00 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-8B-Instruct,meta-llama/Llama-3.1-8B-Instruct,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 26752 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.62 GiB. GPU 0 has a total capacity of 14.74 GiB of which 470.12 MiB is free. Process 35645 has 14.28 GiB memory in use. Of the allocated memory 14.16 GiB is allocated by PyTorch, and 1.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.613568,13880.918016,0.0,13478.395904,13476.849152,s,1,7.45773681640625,7.45773681640625,0.0,7.45773681640625,7.45773681640625,7.45773681640625,7.45773681640625,[7.45773681640625],,kWh,8.334706041720587e-06,9.121287597867775e-07,5.048892927989046e-06,1.429572772949641e-05,,MB,1312.82944,14117.896192,0.0,13702.791168,13671.637504,s,10,2.079610366821289,0.2079610366821289,0.0033514069268301503,0.20994270324707032,0.21055453338623048,0.21057272262573243,0.210587274017334,"[0.20050924682617188, 0.20672898864746095, 0.21030937194824217, 0.21059091186523438, 0.21044026184082032, 0.207285888671875, 0.20330979919433595, 0.209791748046875, 0.21009365844726563, 0.21055049133300782]",tokens/s,1230.9998261419482,kWh,6.043461185120014e-06,6.664650878497999e-07,4.000836534000269e-06,1.0710762806970085e-05,tokens/kWh,23901192.157238945,MB,1368.735744,14119.993344,0.0,13702.791168,13671.640064,s,10,37.210079101562506,3.72100791015625,0.004047133964294645,3.7205743408203125,3.7272676025390625,3.7274522827148435,3.7276000268554688,"[3.71548095703125, 3.715154541015625, 3.719099365234375, 3.71879541015625, 3.720977783203125, 3.722086181640625, 3.7201708984375, 3.723450439453125, 3.727636962890625, 3.7272265625]",tokens/s,16.930896553067136,kWh,0.00010894955950113031,1.2015654418491562e-05,7.245691907659804e-05,0.0001934221329962199,tokens/kWh,325712.46642818913,,s,630,37.206773433685264,0.059058370529659204,0.0004928099861878423,0.058999071121215815,0.0593966724395752,0.05950207996368408,0.062148142089843754,"[0.06201139068603516, 0.05951391983032227, 0.05865555191040039, 0.05841113662719727, 0.05868307113647461, 0.05844614410400391, 0.058434944152832034, 0.05840278244018555, 0.05833385467529297, 0.0584290542602539, 0.05842287826538086, 0.0586288948059082, 0.058635936737060544, 0.058440032958984374, 0.05882265472412109, 0.058762657165527345, 0.05875772857666016, 0.05916876983642578, 0.05898649597167969, 0.058810367584228515, 0.05887385559082031, 0.058963329315185546, 0.05893353652954102, 0.0587918701171875, 0.05887017440795898, 0.0587407341003418, 0.05866495895385742, 0.05875302505493164, 0.05872611236572266, 0.058902496337890624, 0.0589683837890625, 0.058916862487792966, 0.05922111892700195, 0.05929868698120117, 0.05928252792358398, 0.059190174102783204, 0.05905929565429688, 0.05910345458984375, 0.05897900772094727, 0.058949630737304685, 0.05885542297363281, 0.058787841796875, 0.058830654144287106, 0.058964160919189455, 0.05906390380859375, 0.058907039642333986, 0.05915139389038086, 0.059218910217285155, 0.05894553756713867, 0.059133953094482425, 0.05926502227783203, 0.05896310424804688, 0.05903849411010742, 0.05946131134033203, 0.059396480560302736, 0.05907046508789063, 0.05926630401611328, 0.05936787033081055, 0.058904544830322265, 0.05920783996582031, 0.05914822387695313, 0.05904364776611328, 0.059263137817382815, 0.06219590377807617, 0.05919168090820313, 0.058496353149414065, 0.058657440185546875, 0.05844582366943359, 0.05851955032348633, 0.05848678588867187, 0.058352767944335936, 0.058495872497558596, 0.05852950286865234, 0.05869596862792969, 0.058605567932128906, 0.05850931167602539, 0.05872553634643555, 0.05898662567138672, 0.05872918319702149, 0.05897334289550781, 0.05912364959716797, 0.058872737884521485, 0.05877129745483398, 0.058917247772216796, 0.05886937713623047, 0.058833057403564454, 0.058587135314941405, 0.05900697708129883, 0.058896350860595706, 0.05874076843261719, 0.058758720397949216, 0.0588836784362793, 0.058860385894775394, 0.058802398681640625, 0.058668830871582034, 0.05881651306152344, 0.059015167236328124, 0.05933587265014648, 0.059216705322265625, 0.059340065002441406, 0.0589442253112793, 0.058937313079833985, 0.05884902572631836, 0.0591608657836914, 0.05883599853515625, 0.05878396987915039, 0.05887871932983398, 0.05906227111816406, 0.05921526336669922, 0.058985057830810546, 0.05895782470703125, 0.058988094329833984, 0.05903200149536133, 0.058959873199462894, 0.059187198638916014, 0.05941862487792969, 0.05955097579956055, 0.05914064025878906, 0.059200897216796874, 0.05899350357055664, 0.05887913513183594, 0.059167167663574216, 0.059292064666748044, 0.059260032653808595, 0.05907868957519531, 0.05915750503540039, 0.06205939102172851, 0.059230209350585934, 0.058730239868164065, 0.058988479614257815, 0.05890899276733398, 0.058829856872558595, 0.0588175048828125, 0.05869916915893555, 0.05875568008422852, 0.058531841278076174, 0.05869977569580078, 0.058756961822509765, 0.05868150329589844, 0.05873676681518555, 0.05880409622192383, 0.05880012893676758, 0.05916672134399414, 0.05931008148193359, 0.05912985610961914, 0.059090240478515625, 0.05875577545166016, 0.05894707107543945, 0.05890099334716797, 0.05883084869384766, 0.058817855834960936, 0.058753726959228515, 0.058953216552734375, 0.05864908981323242, 0.05880217742919922, 0.05906185531616211, 0.059144607543945314, 0.05879097747802734, 0.05919171142578125, 0.05952675247192383, 0.05936371231079102, 0.05895248031616211, 0.05880809783935547, 0.058845184326171876, 0.05899059295654297, 0.05893948745727539, 0.05900278472900391, 0.058982208251953126, 0.05874668884277344, 0.058998817443847655, 0.05898899078369141, 0.058998687744140625, 0.05909040069580078, 0.058835201263427735, 0.05900252914428711, 0.05909158325195313, 0.05906425476074219, 0.059020416259765625, 0.05905059051513672, 0.059074111938476566, 0.05905417633056641, 0.059197311401367185, 0.05907251358032226, 0.05950067138671875, 0.059323070526123046, 0.0592503662109375, 0.05918105697631836, 0.0592402229309082, 0.05925040054321289, 0.06228521728515625, 0.0594601936340332, 0.05885337448120117, 0.05860966491699219, 0.05850521469116211, 0.0586506233215332, 0.0585011215209961, 0.058415103912353515, 0.05848886489868164, 0.05852771377563477, 0.058564609527587894, 0.05898163223266602, 0.05877017593383789, 0.05874665451049805, 0.05891299057006836, 0.05900697708129883, 0.059045215606689454, 0.05941254425048828, 0.05904806518554687, 0.05879353713989258, 0.0586187858581543, 0.05873049545288086, 0.05896540832519531, 0.058669662475585936, 0.058611583709716794, 0.05873062515258789, 0.058605216979980466, 0.05872447967529297, 0.0590090560913086, 0.058796222686767576, 0.05868057632446289, 0.058700542449951175, 0.05890867233276367, 0.05926700973510742, 0.05976684951782227, 0.05911481475830078, 0.05898223876953125, 0.058901439666748046, 0.05897724914550781, 0.05910208129882812, 0.05916227340698242, 0.05898076629638672, 0.058971710205078125, 0.059167167663574216, 0.05907830429077148, 0.05926128005981445, 0.05927446365356445, 0.05897004699707031, 0.059009246826171875, 0.059197696685791015, 0.05945996856689453, 0.05963123321533203, 0.05920191955566406, 0.059131553649902344, 0.05935660934448242, 0.05910825729370117, 0.059007198333740234, 0.059020320892333986, 0.059148895263671876, 0.059139743804931644, 0.05916873550415039, 0.059187744140625, 0.05936947250366211, 0.062236927032470704, 0.05952982330322266, 0.058843135833740234, 0.05857865524291992, 0.05879340744018555, 0.05873945617675781, 0.058584896087646485, 0.0587184944152832, 0.058521598815917966, 0.05852262496948242, 0.05879891204833984, 0.05885520172119141, 0.05865488052368164, 0.058703487396240234, 0.05868304061889648, 0.05880047988891601, 0.059070751190185546, 0.05928995132446289, 0.059485889434814455, 0.05902684783935547, 0.05900073623657227, 0.05918207931518555, 0.0590186882019043, 0.05877980804443359, 0.05889260864257812, 0.05901116943359375, 0.05885891342163086, 0.05860822296142578, 0.05877664184570312, 0.058888416290283206, 0.0588520622253418, 0.05886377716064453, 0.059115425109863284, 0.05928691101074219, 0.05933116912841797, 0.059231647491455076, 0.05926150512695313, 0.05927731323242187, 0.05886361694335938, 0.05884108734130859, 0.05886771011352539, 0.05906406402587891, 0.05876934432983399, 0.05926684951782227, 0.05883119964599609, 0.059003070831298826, 0.058935039520263674, 0.05892736053466797, 0.05907455825805664, 0.05969100952148437, 0.059170814514160154, 0.0592749137878418, 0.059439456939697266, 0.059238399505615234, 0.059295040130615234, 0.05934723281860352, 0.059302303314208986, 0.05926911926269531, 0.05937356948852539, 0.0591646728515625, 0.05897353744506836, 0.05906470489501953, 0.058980640411376954, 0.0624156494140625, 0.05975040054321289, 0.05893939208984375, 0.058631809234619144, 0.058867359161376955, 0.05897903823852539, 0.05881017684936524, 0.05875024032592773, 0.05860444641113281, 0.05850521469116211, 0.05875619125366211, 0.05878636932373047, 0.058713729858398435, 0.05870409774780273, 0.05870633697509765, 0.05858044815063477, 0.058953792572021484, 0.05955027389526367, 0.05925888061523438, 0.05912313461303711, 0.05898912048339844, 0.05882060623168945, 0.059135711669921875, 0.05875308990478516, 0.058724193572998046, 0.058775936126708984, 0.05873673629760742, 0.05874214553833008, 0.05884377670288086, 0.05895977783203125, 0.05880963134765625, 0.058890975952148435, 0.058945247650146484, 0.058902015686035154, 0.05926377487182617, 0.05943910217285156, 0.059377471923828126, 0.059170623779296876, 0.059189632415771486, 0.05894076919555664, 0.05888476943969727, 0.05919744110107422, 0.05887385559082031, 0.05881967926025391, 0.0588741455078125, 0.05893555068969727, 0.05921203231811523, 0.05906585693359375, 0.059072158813476563, 0.059022560119628906, 0.05953919982910156, 0.059633663177490234, 0.05937356948852539, 0.05928940963745117, 0.059233665466308594, 0.05910160064697265, 0.05901763153076172, 0.05912326431274414, 0.05931008148193359, 0.05939152145385742, 0.05934339141845703, 0.059257217407226566, 0.05938313674926758, 0.06210579299926758, 0.059409503936767576, 0.058743518829345705, 0.058474590301513675, 0.058744384765625, 0.05889059066772461, 0.05867520141601563, 0.05858243179321289, 0.058983009338378904, 0.05877532958984375, 0.05883855819702148, 0.05908755111694336, 0.05883084869384766, 0.05881814575195313, 0.058696033477783204, 0.05872828674316406, 0.05923452758789063, 0.0592281608581543, 0.059072544097900394, 0.05903971099853516, 0.05878300857543945, 0.058724990844726564, 0.058652542114257813, 0.05871023941040039, 0.05865206527709961, 0.058588798522949216, 0.05858371353149414, 0.05860383987426758, 0.058875904083251954, 0.058828800201416016, 0.05875263977050781, 0.05898867034912109, 0.05939839935302734, 0.059232257843017576, 0.059115520477294924, 0.05926707077026367, 0.05923430252075195, 0.05888608169555664, 0.05866819381713867, 0.058613727569580075, 0.05870188903808594, 0.05869657516479492, 0.05886566543579102, 0.05926502227783203, 0.05910732650756836, 0.059174400329589844, 0.05918566513061523, 0.05901311874389648, 0.05938943862915039, 0.059165184020996096, 0.05907212829589844, 0.05945897674560547, 0.05950265502929687, 0.0593540153503418, 0.05922934341430664, 0.05942086410522461, 0.059310752868652346, 0.059383296966552736, 0.05912755203247071, 0.059155200958251955, 0.05923011016845703, 0.059504737854003904, 0.05938127899169922, 0.06216543960571289, 0.05937097549438477, 0.05886025619506836, 0.05858220672607422, 0.05849507141113281, 0.05861872100830078, 0.0587935676574707, 0.05887340927124023, 0.058718944549560545, 0.05873443222045899, 0.058579071044921875, 0.05873452758789063, 0.058929248809814455, 0.05888409423828125, 0.058719776153564454, 0.05887638473510742, 0.05931622314453125, 0.05940185546875, 0.05919168090820313, 0.05904508972167969, 0.05886032104492187, 0.0586321907043457, 0.058617183685302734, 0.05860559844970703, 0.058826560974121096, 0.05891769790649414, 0.05893529510498047, 0.05892300796508789, 0.05883699035644531, 0.05907251358032226, 0.059131423950195314, 0.058999263763427734, 0.059252544403076174, 0.05908294296264648, 0.05915817642211914, 0.05915273666381836, 0.059115425109863284, 0.05914384078979492, 0.05917292785644531, 0.05913433456420898, 0.0591278076171875, 0.05903257751464844, 0.0589051513671875, 0.05894803237915039, 0.05917491149902344, 0.059278560638427735, 0.05938652801513672, 0.05907263946533203, 0.059115425109863284, 0.0593265609741211, 0.059487998962402346, 0.05935436630249023, 0.059456031799316404, 0.05925724792480469, 0.05942806243896484, 0.05945635223388672, 0.059461631774902345, 0.05927526473999024, 0.05919120025634766, 0.059185214996337894, 0.059129886627197266, 0.05933260726928711, 0.05929369735717773, 0.06252908706665039, 0.059644001007080075, 0.05909539031982422, 0.058613761901855466, 0.05885318374633789, 0.058872001647949215, 0.05898175811767578, 0.05884787368774414, 0.058613761901855466, 0.05852364730834961, 0.05858303833007812, 0.05898198318481445, 0.058748897552490235, 0.05895212936401367, 0.05892432022094726, 0.05902329635620117, 0.05948089599609375, 0.06002483367919922, 0.05961520004272461, 0.059178943634033206, 0.059014720916748045, 0.05897651290893555, 0.058974369049072266, 0.05865891265869141, 0.058931137084960936, 0.058926334381103514, 0.05887059020996094, 0.05892822265625, 0.058747806549072266, 0.05918310546875, 0.05915852737426758, 0.05909670257568359, 0.05927974319458008, 0.059338752746582034, 0.0594384651184082, 0.059259521484375, 0.059172863006591796, 0.059061279296875, 0.05912227249145508, 0.058808704376220704, 0.05930188751220703, 0.0592037124633789, 0.058946910858154296, 0.058998878479003904, 0.059025375366210935, 0.060004608154296875, 0.05936883163452149, 0.0591512336730957, 0.05899283218383789, 0.05928323364257813, 0.059444416046142576, 0.05950137710571289, 0.05934694290161133, 0.059359294891357425, 0.059310016632080076, 0.059149471282958985, 0.05906435012817383, 0.05907334518432617, 0.059141536712646485, 0.05910793685913086, 0.05906227111816406, 0.059015167236328124, 0.05935308837890625, 0.0624161605834961, 0.05965692901611328, 0.059009025573730466, 0.05861785507202148, 0.058742782592773435, 0.058836734771728516, 0.05877376174926758, 0.05875408172607422, 0.058599872589111326, 0.05851567840576172, 0.05852959823608399, 0.05865052795410156, 0.058628353118896484, 0.05900703811645508, 0.058853023529052736, 0.05914620971679688, 0.059589374542236326, 0.059764640808105465, 0.059367198944091794, 0.05902880096435547, 0.05877443313598633, 0.058654720306396485, 0.05862582397460937, 0.058914272308349606, 0.058727169036865236, 0.05875820922851562, 0.05925574493408203, 0.058992641448974606, 0.05885091018676758, 0.058865345001220704, 0.05882953643798828, 0.05918515014648437, 0.059241600036621093, 0.05929663848876953, 0.05951871871948242, 0.05936563110351562, 0.059305984497070315, 0.059101184844970706, 0.059154430389404294, 0.05916656112670898, 0.059127967834472654, 0.058893760681152346, 0.05915091323852539, 0.05913167953491211, 0.05899507141113281, 0.05931017684936524, 0.059176448822021485, 0.05915059280395508, 0.059260513305664064, 0.05935871887207031, 0.059377696990966795, 0.05945564651489258, 0.05940092849731445, 0.0594411506652832, 0.05957632064819336, 0.05946681594848633, 0.059466686248779294, 0.05942214584350586, 0.059326656341552736, 0.0591671028137207, 0.05940364837646484, 0.059327102661132815, 0.05940633773803711]",tokens/s,16.932400793174583,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 234.12 MiB is free. Process 499918 has 14.51 GiB memory in use. Of the allocated memory 14.39 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 270.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 98.12 MiB is free. Process 495263 has 14.64 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 1.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1118, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 503282 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 357, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 497164 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 45832 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 45340 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 38510 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1118, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 503640 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 234.12 MiB is free. Process 498789 has 14.51 GiB memory in use. Of the allocated memory 14.39 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.531648,13880.918016,0.0,13478.395904,13476.849152,s,1,7.557330078125,7.557330078125,0.0,7.557330078125,7.557330078125,7.557330078125,7.557330078125,[7.557330078125],,kWh,8.614358579203931e-06,9.423954797841812e-07,4.910003927993678e-06,1.4466757986981792e-05,,MB,1208.127488,14113.701888,0.0,13700.694016,13671.637504,s,10,12.46454931640625,1.246454931640625,0.004118358371665385,1.2484244995117186,1.2501810546875,1.2503031127929687,1.2504007592773438,"[1.23776806640625, 1.24261962890625, 1.243001708984375, 1.2441705322265626, 1.249641845703125, 1.2497257080078126, 1.2501539306640626, 1.2472071533203124, 1.2504251708984375, 1.2498355712890625]",tokens/s,205.38247593360185,kWh,3.637035038000704e-05,4.011153165015655e-06,2.417424156159753e-05,6.455574510662022e-05,tokens/kWh,3965564.948203922,MB,1256.267776,14115.79904,0.0,13702.791168,13671.640064,s,10,37.74185693359375,3.7741856933593754,0.0019019189260765204,3.774030517578125,3.776664501953125,3.7768697265625,3.77703390625,"[3.771296142578125, 3.7749912109375, 3.77295654296875, 3.771998046875, 3.776618896484375, 3.7728369140625, 3.7732041015625, 3.777074951171875, 3.77485693359375, 3.776023193359375]",tokens/s,16.692342433189648,kWh,0.00011034360833999474,1.2169942675116277e-05,7.313094739360427e-05,0.0001956444984087153,tokens/kWh,322012.63267004065,,s,630,37.73844884872437,0.059902299759879936,0.00026899646318153086,0.05989480018615723,0.060194672393798825,0.06027122974395752,0.061000479354858396,"[0.06125177764892578, 0.05970035171508789, 0.05954444885253906, 0.05974380874633789, 0.059281856536865234, 0.05950003051757812, 0.05950918579101563, 0.05949161529541016, 0.05956025695800781, 0.05976316833496094, 0.059584510803222655, 0.05952511978149414, 0.05941862487792969, 0.05956787109375, 0.05941683197021484, 0.05949235153198242, 0.05967052841186524, 0.05999958419799805, 0.05982278442382812, 0.06006771087646484, 0.05966428756713867, 0.05973011016845703, 0.05953945541381836, 0.059594753265380856, 0.05972927856445313, 0.0598331184387207, 0.05987094497680664, 0.05989593505859375, 0.059860225677490234, 0.05995187377929687, 0.05967462539672851, 0.059614593505859376, 0.059910945892333985, 0.06017552185058594, 0.060041919708251956, 0.05997561645507812, 0.06011283111572266, 0.05994303894042969, 0.05992038345336914, 0.059708480834960935, 0.059975776672363285, 0.05993967819213867, 0.05991190338134766, 0.059991649627685543, 0.06001119995117187, 0.05997910308837891, 0.059845279693603516, 0.05984640121459961, 0.05985843276977539, 0.06001945495605469, 0.05992201614379883, 0.06026691055297852, 0.06002390289306641, 0.060165023803710936, 0.05988761520385742, 0.05995075225830078, 0.05987977600097656, 0.06009609603881836, 0.06005801773071289, 0.06006284713745117, 0.060176673889160155, 0.06012480163574219, 0.06030230331420899, 0.06094432067871094, 0.05971267318725586, 0.0595560302734375, 0.059353118896484376, 0.05934972763061523, 0.05956742477416992, 0.05950534439086914, 0.059658241271972653, 0.059717632293701174, 0.059743263244628905, 0.06030799865722656, 0.059700672149658206, 0.05967769622802734, 0.059957248687744144, 0.05975244903564453, 0.05996723175048828, 0.0599653434753418, 0.05990435028076172, 0.05979340744018555, 0.05980934524536133, 0.05963161468505859, 0.059863071441650394, 0.05973443222045898, 0.05975238418579101, 0.05966364669799805, 0.05987526321411133, 0.059655006408691404, 0.059716670989990235, 0.05970016098022461, 0.05973196792602539, 0.0599609603881836, 0.05992486572265625, 0.05975151824951172, 0.0600335693359375, 0.06001446533203125, 0.0598922233581543, 0.059850753784179686, 0.059791358947753906, 0.0598364143371582, 0.059837825775146486, 0.05986163330078125, 0.059924480438232425, 0.06005350494384765, 0.0599552001953125, 0.05985260772705078, 0.06002297592163086, 0.05999987030029297, 0.06008838272094726, 0.06004313659667969, 0.060146110534667965, 0.06022553634643555, 0.06016524887084961, 0.060029407501220704, 0.06021980667114258, 0.06012659072875977, 0.060211841583251956, 0.060186622619628906, 0.06033407974243164, 0.06014563369750976, 0.06032559967041016, 0.06017465591430664, 0.06021247863769531, 0.060166912078857424, 0.061077503204345705, 0.05968201446533203, 0.05937561416625976, 0.0595577278137207, 0.059486335754394534, 0.059509567260742184, 0.059581790924072266, 0.05955219268798828, 0.059701248168945314, 0.059693279266357424, 0.05965119934082031, 0.05983020782470703, 0.059744609832763675, 0.059748703002929685, 0.05956793594360352, 0.059953086853027346, 0.060014720916748046, 0.06002899169921875, 0.059883296966552736, 0.060074527740478514, 0.05994496154785156, 0.060088096618652345, 0.05975267028808594, 0.05962457656860352, 0.0596181755065918, 0.059625377655029295, 0.05947401428222656, 0.059674175262451175, 0.05981827163696289, 0.0597751350402832, 0.059804672241210936, 0.05981491088867188, 0.05979340744018555, 0.0598466567993164, 0.059756542205810545, 0.0599736328125, 0.05989376068115235, 0.0599285774230957, 0.0598548469543457, 0.059856895446777345, 0.060088321685791014, 0.05989990234375, 0.05982796859741211, 0.059955455780029296, 0.059874942779541016, 0.05992035293579102, 0.06017251205444336, 0.05995743942260742, 0.0599898567199707, 0.060039295196533206, 0.06018255996704101, 0.060055553436279295, 0.05998591995239258, 0.06010022354125977, 0.06035289764404297, 0.06013747024536133, 0.05991548919677735, 0.06004816055297851, 0.059915904998779294, 0.06006739044189453, 0.060076416015625, 0.06012723159790039, 0.060254337310791016, 0.061212223052978514, 0.05975286483764648, 0.05957017517089844, 0.059667999267578126, 0.059315967559814456, 0.059480510711669925, 0.05947558212280273, 0.05937641525268555, 0.059463550567626956, 0.05950054550170898, 0.05954969787597656, 0.059754112243652346, 0.05953542327880859, 0.059728191375732424, 0.059666431427001954, 0.05970943832397461, 0.05991628646850586, 0.05999545669555664, 0.05991084671020508, 0.059686912536621096, 0.05972787094116211, 0.05981718444824219, 0.05969753646850586, 0.059689342498779295, 0.0596992301940918, 0.05978726577758789, 0.05977907180786133, 0.05990399932861328, 0.059958656311035155, 0.05992310333251953, 0.0600080337524414, 0.05987776184082031, 0.060055553436279295, 0.06014361572265625, 0.05989888000488281, 0.05992268753051758, 0.059877567291259766, 0.059944801330566407, 0.05979935836791992, 0.05983119964599609, 0.05979878234863281, 0.05999814224243164, 0.059805694580078124, 0.060018848419189454, 0.059808414459228514, 0.05998387145996094, 0.060039169311523435, 0.06001830291748047, 0.060088191986083984, 0.06010227203369141, 0.06000278472900391, 0.06011916732788086, 0.06009420776367187, 0.059992607116699216, 0.05988351821899414, 0.06005072021484375, 0.060133438110351566, 0.06005417633056641, 0.06005145645141602, 0.05997545623779297, 0.059982048034667966, 0.06003046417236328, 0.060004863739013675, 0.061222942352294925, 0.05980096054077148, 0.05973040008544922, 0.05970150375366211, 0.05944950485229492, 0.05963148880004883, 0.059624862670898435, 0.059625728607177735, 0.05967305755615234, 0.05970694351196289, 0.05977132797241211, 0.059759777069091795, 0.05959766387939453, 0.05988556671142578, 0.05981798553466797, 0.059850753784179686, 0.05972339248657227, 0.05985033416748047, 0.05997443389892578, 0.05982822418212891, 0.05968281555175781, 0.05974016189575195, 0.05974220657348633, 0.059641151428222655, 0.059751102447509766, 0.05976678466796875, 0.05979123306274414, 0.059895839691162106, 0.059711265563964847, 0.05992879867553711, 0.05991846466064453, 0.0598691520690918, 0.060022785186767576, 0.05997772979736328, 0.05996543884277344, 0.06010268783569336, 0.06007567977905273, 0.06001628875732422, 0.059869857788085935, 0.05994063949584961, 0.060023006439208985, 0.06023347091674805, 0.05989775848388672, 0.06006630325317383, 0.059909984588623046, 0.0599736328125, 0.059936576843261716, 0.05996559906005859, 0.06000028610229492, 0.060290657043457034, 0.0601800651550293, 0.0602077751159668, 0.060110401153564454, 0.060162654876708986, 0.05997568130493164, 0.06009648132324219, 0.060135456085205076, 0.0601448974609375, 0.06027135848999023, 0.06029510498046875, 0.060225601196289065, 0.06041702270507812, 0.060170848846435546, 0.06123961639404297, 0.059684478759765625, 0.05955027389526367, 0.05957017517089844, 0.05933987045288086, 0.059630496978759766, 0.05959884643554687, 0.059662303924560546, 0.059565536499023436, 0.05991670227050781, 0.05974441528320312, 0.05984220886230469, 0.05963398361206055, 0.05958863830566406, 0.05980995178222656, 0.0598092155456543, 0.059901439666748046, 0.059974559783935545, 0.059975135803222654, 0.059684864044189455, 0.05967472076416016, 0.05977132797241211, 0.059701248168945314, 0.05976201629638672, 0.059834590911865236, 0.05979385757446289, 0.05977062225341797, 0.059805248260498045, 0.05986579132080078, 0.05994291305541992, 0.059850753784179686, 0.0598256950378418, 0.0598037109375, 0.05990851211547851, 0.05979545593261719, 0.05990742492675781, 0.05981865692138672, 0.05981977462768555, 0.05974143981933594, 0.059664543151855466, 0.059620128631591794, 0.059934783935546875, 0.059736000061035154, 0.05999417495727539, 0.05985007858276367, 0.05999683380126953, 0.059972766876220704, 0.05996015930175781, 0.060096511840820314, 0.060047359466552735, 0.06003507232666016, 0.06010060882568359, 0.06003507232666016, 0.06021324920654297, 0.060063873291015625, 0.060118721008300784, 0.0599984016418457, 0.060080127716064455, 0.05985033416748047, 0.06021104049682617, 0.06026911926269531, 0.06032307052612305, 0.060213600158691406, 0.06121267318725586, 0.059887424468994144, 0.05950191879272461, 0.05953212738037109, 0.05959884643554687, 0.0597341423034668, 0.05947296142578125, 0.05943174362182617, 0.05961523056030273, 0.05947187042236328, 0.05950873565673828, 0.05962108612060547, 0.05960528182983398, 0.05968896102905273, 0.05973606491088867, 0.05967871856689453, 0.05973097610473633, 0.05983542251586914, 0.05973545455932617, 0.05970793533325195, 0.05958860778808594, 0.05981798553466797, 0.05961081695556641, 0.059650047302246094, 0.05981727981567383, 0.05977142333984375, 0.05973859024047851, 0.059866718292236325, 0.05991465759277344, 0.059778785705566405, 0.059881214141845704, 0.05982262420654297, 0.05995062255859375, 0.060205375671386716, 0.06000860977172852, 0.06001859283447265, 0.059923744201660155, 0.05994707107543945, 0.059763233184814454, 0.05977110290527344, 0.05967593765258789, 0.05991644668579101, 0.05985747146606445, 0.059958305358886715, 0.06003401565551758, 0.060047359466552735, 0.059982975006103514, 0.059932769775390624, 0.05998368072509765, 0.06018352127075195, 0.060063743591308595, 0.060129280090332034, 0.06022310256958008, 0.06025459289550781, 0.06017228698730469, 0.0601212158203125, 0.060032863616943356, 0.06013750457763672, 0.0600552978515625, 0.0602872314453125, 0.060270591735839846, 0.06025139236450195, 0.06018124771118164, 0.06101103973388672, 0.0598127670288086, 0.05958041763305664, 0.05953542327880859, 0.059565185546875, 0.0596712646484375, 0.05958591842651367, 0.059509471893310545, 0.05965523147583008, 0.05954860687255859, 0.05976678466796875, 0.05985187149047851, 0.05966694259643555, 0.05976105499267578, 0.05979340744018555, 0.05989785766601562, 0.06011471939086914, 0.06017184066772461, 0.05991696166992187, 0.05993580627441406, 0.05987833786010742, 0.059856895446777345, 0.05960003280639648, 0.05965651321411133, 0.05957891082763672, 0.059630592346191405, 0.059696128845214844, 0.05984403228759766, 0.05988761520385742, 0.05981651306152344, 0.05975449752807617, 0.05975449752807617, 0.059824127197265625, 0.05994220733642578, 0.05984128189086914, 0.05984364700317383, 0.06004576110839844, 0.06000889587402344, 0.05990534210205078, 0.059778881072998044, 0.05996428680419922, 0.06020025634765625, 0.059996864318847654, 0.05994259262084961, 0.05987974548339844, 0.06022300720214844, 0.060271072387695315, 0.060128929138183594, 0.06020249557495117, 0.06043843078613281, 0.06035270309448242, 0.06032665634155274, 0.060418048858642576, 0.060232929229736325, 0.06022633743286133, 0.0601492805480957, 0.06020479965209961, 0.06022422409057617, 0.060052894592285154, 0.06031216049194336, 0.06019071960449219, 0.060194816589355465, 0.06012723159790039, 0.060903072357177734, 0.05951523208618164, 0.05951811218261719, 0.059427680969238283, 0.05955331039428711, 0.05973993682861328, 0.05969136047363281, 0.059511135101318356, 0.05950883102416992, 0.059620479583740234, 0.0595849609375, 0.05963126373291015, 0.05977977752685547, 0.05972278213500976, 0.05978972625732422, 0.05990047836303711, 0.059957248687744144, 0.060020736694335934, 0.05986918258666992, 0.059856895446777345, 0.059658241271972653, 0.05993673706054688, 0.059819358825683594, 0.05967327880859375, 0.05969715118408203, 0.06008544158935547, 0.05991929626464844, 0.05987724685668945, 0.05978239822387695, 0.05976947021484375, 0.05986304092407226, 0.05991427230834961, 0.05986249542236328, 0.06009920120239258, 0.059957248687744144, 0.06000435256958008, 0.05974630355834961, 0.05988531112670899, 0.059733535766601564, 0.05986073684692383, 0.059945568084716794, 0.06008256149291992, 0.05990399932861328, 0.05994684982299805, 0.05989174270629883, 0.05993280029296875, 0.05992403030395508, 0.06014924621582031, 0.059988929748535154, 0.060129280090332034, 0.06014905548095703, 0.06028700637817383, 0.06021558380126953, 0.06020544052124024, 0.06014892959594727, 0.060106945037841794, 0.060134014129638674, 0.06013337707519531, 0.060284927368164064, 0.060343841552734374, 0.06004719924926758, 0.06012137603759766, 0.06017884826660156, 0.06097462463378906, 0.059591201782226565, 0.05959836959838867, 0.05954403305053711, 0.05955583953857422, 0.05953308868408203, 0.05951641464233398, 0.059593215942382816, 0.05956220626831055, 0.05972377777099609, 0.05953036880493164, 0.059582401275634765, 0.05962438583374023, 0.059625473022460934, 0.0596234245300293, 0.05998793411254883, 0.05993024063110351, 0.06008668899536133, 0.06005961608886719, 0.05995119857788086, 0.05977225494384766, 0.059824737548828125, 0.059858943939208986, 0.05971558380126953, 0.05970259094238281, 0.05990879821777344, 0.0597523193359375, 0.059848831176757815, 0.05987923049926758, 0.059803840637207034, 0.05977814483642578, 0.05994076919555664, 0.06004019165039062, 0.05993471908569336, 0.06004121780395508, 0.059985023498535156, 0.059865345001220704, 0.06017078399658203, 0.05995267105102539, 0.05982787322998047, 0.05997865676879883, 0.060210624694824216, 0.059928607940673825, 0.06016668701171875, 0.059877086639404296, 0.06003273773193359, 0.059908672332763674, 0.060104705810546874, 0.06019465637207031, 0.06032515335083008, 0.060133312225341795, 0.060122047424316406, 0.0600874252319336, 0.06030144119262695, 0.06003993606567383, 0.06009241485595703, 0.06012473678588867, 0.06015430450439453, 0.06016614532470703, 0.06013747024536133, 0.06026649475097656, 0.06043017578125, 0.060055713653564456]",tokens/s,16.69384988570603,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 46257 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B-Instruct,meta-llama/Meta-Llama-3-8B-Instruct,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 289, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 106.12 MiB is free. Process 36108 has 14.63 GiB memory in use. Of the allocated memory 14.52 GiB is allocated by PyTorch, and 1.52 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.62 GiB. GPU 0 has a total capacity of 14.74 GiB of which 470.12 MiB is free. Process 33277 has 14.28 GiB memory in use. Of the allocated memory 14.16 GiB is allocated by PyTorch, and 1.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 234.12 MiB is free. Process 500625 has 14.51 GiB memory in use. Of the allocated memory 14.39 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-3.1-405B,meta-llama/Llama-3.1-405B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.62 GiB. GPU 0 has a total capacity of 14.74 GiB of which 470.12 MiB is free. Process 35181 has 14.28 GiB memory in use. Of the allocated memory 14.16 GiB is allocated by PyTorch, and 1.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1118, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 501790 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 896.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 598.12 MiB is free. Process 505642 has 14.15 GiB memory in use. Of the allocated memory 14.04 GiB is allocated by PyTorch, and 1.81 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1118, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 502188 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 891, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 823, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 374, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 253, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 117, in __init__ self.q_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 120.12 MiB is free. Process 127874 has 14.62 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 2.29 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1121, in __init__ self.embed_out = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 592.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 300.12 MiB is free. Process 194801 has 14.45 GiB memory in use. Of the allocated memory 14.33 GiB is allocated by PyTorch, and 1.52 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp69x4nnr8/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmphr0ihgqn/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,810.217472,14639.104,0.0,14243.856384,14221.3376,s,1,7.73187548828125,7.73187548828125,0.0,7.73187548828125,7.73187548828125,7.73187548828125,7.73187548828125,[7.73187548828125],,kWh,1.5289258250001583e-05,1.583704214532396e-06,5.663893419999809e-06,2.2536855884533787e-05,,MB,1176.8832,14737.670144,0.0,14329.839616,14290.688,s,10,2.162870071411133,0.2162870071411133,0.004696034653436737,0.2166063461303711,0.21995816955566405,0.22030833129882813,0.22058846069335938,"[0.20358029174804687, 0.21567996215820312, 0.21597410583496093, 0.21516217041015626, 0.21971200561523438, 0.2152815399169922, 0.21988035583496093, 0.21723858642578125, 0.21970256042480468, 0.2206584930419922]",tokens/s,1183.6124757738062,kWh,6.414262803894934e-06,7.073772449779575e-07,4.258982875782576e-06,1.1380622924655468e-05,tokens/kWh,22494375.017503712,MB,1197.019136,14752.350208,0.0,14344.51968,14290.69056,s,10,38.786872070312505,3.8786872070312506,0.006007349949207754,3.878911254882812,3.8850998779296875,3.8858206909179684,3.8863973413085935,"[3.86822705078125, 3.869866455078125, 3.8762998046875, 3.877141845703125, 3.875823974609375, 3.8806806640625, 3.884939697265625, 3.8824814453125, 3.88486962890625, 3.88654150390625]",tokens/s,16.24260906777792,kWh,0.0001135623148202716,1.2526194901041918e-05,7.550227537761758e-05,0.00020159078509893112,tokens/kWh,312514.2846637688,,s,630,38.78313149261476,0.06156052617875356,0.0005346627059583197,0.06147313499450684,0.06187085227966308,0.06209518413543701,0.0648411915588379,"[0.06455510711669922, 0.06257535934448243, 0.06161743927001953, 0.06133129501342773, 0.061139488220214845, 0.061219169616699216, 0.06158134460449219, 0.06133699035644531, 0.061130943298339846, 0.061174144744873045, 0.06128025436401367, 0.06136217498779297, 0.0612044792175293, 0.06106524658203125, 0.06133139038085938, 0.06137859344482422, 0.06139206314086914, 0.06144492721557617, 0.061488929748535155, 0.06133782577514649, 0.06118403244018555, 0.06113894271850586, 0.06117375946044922, 0.06125088119506836, 0.06095276641845703, 0.060787166595458984, 0.06112774276733399, 0.06120284652709961, 0.06129103851318359, 0.06122003173828125, 0.06128067016601563, 0.06118431854248047, 0.061415519714355465, 0.061571071624755856, 0.06145539093017578, 0.061434879302978515, 0.06140860748291015, 0.06142851257324219, 0.061468513488769534, 0.06149478530883789, 0.06123344039916992, 0.06148284912109375, 0.0613359375, 0.06105878448486328, 0.061403423309326174, 0.061265918731689455, 0.06129375839233398, 0.061341663360595704, 0.06132003021240234, 0.06149529647827148, 0.06137855911254883, 0.06113075256347656, 0.06128844833374023, 0.06150656127929687, 0.06157619094848633, 0.061556190490722654, 0.06131974411010742, 0.06125129699707031, 0.06128873443603516, 0.06154441452026367, 0.06125721740722656, 0.061409248352050784, 0.061698497772216795, 0.06434092712402344, 0.062304031372070315, 0.06152624130249024, 0.061203712463378905, 0.06103731155395508, 0.06134912109375, 0.06124364852905274, 0.06118761444091797, 0.061062175750732424, 0.06103238296508789, 0.061246910095214845, 0.0609285774230957, 0.06118915176391602, 0.06125609588623047, 0.06131974411010742, 0.06176358413696289, 0.06177785491943359, 0.061806655883789065, 0.06161324691772461, 0.06151046371459961, 0.061158462524414064, 0.061487041473388675, 0.0612567024230957, 0.06126572799682617, 0.061120704650878904, 0.06131916809082031, 0.06121273422241211, 0.061240863800048825, 0.06153667068481445, 0.06138675308227539, 0.06132534408569336, 0.06155875015258789, 0.061679615020751956, 0.061949600219726564, 0.06196012878417969, 0.061442462921142575, 0.061431774139404295, 0.061354015350341795, 0.061317119598388675, 0.06128844833374023, 0.06114508819580078, 0.06104403305053711, 0.060979774475097656, 0.061040000915527345, 0.06113766479492187, 0.06111983871459961, 0.06120515060424805, 0.061298561096191403, 0.06165843200683594, 0.06163497543334961, 0.06179471969604492, 0.0616357421875, 0.06159600067138672, 0.06145280075073242, 0.06136217498779297, 0.061341697692871094, 0.06113455963134766, 0.06125187301635742, 0.06138880157470703, 0.06128787231445312, 0.06129439926147461, 0.06137420654296875, 0.06135903930664063, 0.06497280120849609, 0.06270124816894532, 0.0614475212097168, 0.06150243377685547, 0.06108918380737305, 0.06117161560058594, 0.06119625473022461, 0.06116835021972656, 0.06128752136230469, 0.061094814300537106, 0.06107046508789062, 0.06114355087280274, 0.061059455871582034, 0.06107340621948242, 0.0612086067199707, 0.06176716613769531, 0.062091136932373045, 0.06186044692993164, 0.062132225036621094, 0.06180835342407227, 0.06179459381103516, 0.06145347213745117, 0.06143881607055664, 0.06113894271850586, 0.06113203048706055, 0.061105983734130856, 0.06141753768920898, 0.061752193450927736, 0.06111641693115234, 0.06166732788085937, 0.061087745666503906, 0.06137247848510742, 0.06146451187133789, 0.06138265609741211, 0.061337406158447266, 0.06145248031616211, 0.06176358413696289, 0.061572128295898435, 0.061512321472167966, 0.06148745727539062, 0.06116556930541992, 0.06127734375, 0.06176387023925781, 0.0615810546875, 0.0616640625, 0.06151504135131836, 0.06169055938720703, 0.061642784118652344, 0.06146047973632812, 0.06162163162231445, 0.06157171249389649, 0.061626399993896484, 0.061961822509765625, 0.061585182189941405, 0.061354591369628904, 0.06123519897460938, 0.06117382431030274, 0.06122284698486328, 0.061427391052246094, 0.06141513442993164, 0.06147126388549805, 0.061599807739257814, 0.061499393463134766, 0.06485810852050782, 0.06280556869506836, 0.06165139389038086, 0.06140447998046875, 0.06123180770874023, 0.061483009338378906, 0.06133481597900391, 0.06139344024658203, 0.06123097610473633, 0.06165126419067383, 0.061233150482177735, 0.06119619369506836, 0.061102176666259764, 0.06113663864135742, 0.061502784729003904, 0.06193657684326172, 0.06183939361572265, 0.061894622802734375, 0.061911041259765626, 0.06155820846557617, 0.06128662490844727, 0.06136441421508789, 0.06156508636474609, 0.061443775177001954, 0.06119456100463867, 0.06146662521362305, 0.061387966156005856, 0.06130771255493164, 0.06130207824707031, 0.06130505752563477, 0.06170057678222656, 0.061906017303466794, 0.061792415618896486, 0.061750049591064456, 0.06167958450317383, 0.0618535041809082, 0.061475006103515625, 0.06122905731201172, 0.061325183868408205, 0.06130476760864258, 0.06113299179077149, 0.06110620880126953, 0.06113481521606445, 0.06113894271850586, 0.061334945678710937, 0.06106534576416016, 0.06141331100463867, 0.06172079849243164, 0.06158367919921875, 0.06189056015014648, 0.06192079925537109, 0.061860321044921875, 0.06176540756225586, 0.06155699157714844, 0.06146214294433594, 0.061406623840332034, 0.06121696090698242, 0.06122918319702148, 0.06133414459228516, 0.061308895111083984, 0.06128236770629883, 0.06133695983886719, 0.061571678161621096, 0.06479977416992187, 0.0625940170288086, 0.06150147247314453, 0.061335521697998045, 0.06113689422607422, 0.06141929626464844, 0.061219039916992186, 0.061222942352294925, 0.0612856330871582, 0.06125641632080078, 0.06145769500732422, 0.061313758850097655, 0.06142761611938476, 0.06130239868164063, 0.0614354248046875, 0.06183417510986328, 0.06192127990722656, 0.06180454254150391, 0.06169715118408203, 0.06140607833862305, 0.06144316864013672, 0.06119247817993164, 0.06118390274047852, 0.06116985702514648, 0.061034271240234375, 0.061115135192871095, 0.061128097534179686, 0.06116364669799805, 0.061321727752685545, 0.06137200164794922, 0.061380992889404296, 0.06159769439697266, 0.06174720001220703, 0.06175539016723633, 0.06166678237915039, 0.06157561492919922, 0.06173295974731445, 0.061521953582763675, 0.06157104110717773, 0.061321247100830076, 0.061273887634277345, 0.061319103240966795, 0.061093441009521486, 0.06113542556762695, 0.061314849853515624, 0.061303009033203126, 0.06138044738769531, 0.06152582550048828, 0.06143600082397461, 0.06169843292236328, 0.061555774688720706, 0.0614901123046875, 0.06170217514038086, 0.06184483337402344, 0.06157513427734375, 0.061556640625, 0.06180326461791992, 0.061423614501953126, 0.06156224060058594, 0.06172713470458984, 0.06127017593383789, 0.061502655029296874, 0.06160406494140625, 0.06554598236083985, 0.06342863845825196, 0.06211376190185547, 0.06147715377807617, 0.06130265426635742, 0.061231006622314454, 0.06128271865844727, 0.06133964920043945, 0.06121065521240234, 0.06126793670654297, 0.061231136322021484, 0.061065185546875, 0.060947711944580076, 0.06101174545288086, 0.0613078727722168, 0.06163587188720703, 0.06171311950683594, 0.06195513534545898, 0.06190095901489258, 0.06173740768432617, 0.06152431869506836, 0.06144553756713867, 0.06122761535644531, 0.061284320831298825, 0.06124755096435547, 0.061269153594970704, 0.06145267105102539, 0.061560478210449215, 0.061413280487060545, 0.06125600051879883, 0.0614356803894043, 0.061494049072265626, 0.0617155532836914, 0.061578174591064454, 0.06181600189208984, 0.06171065521240234, 0.06176816177368164, 0.06145792007446289, 0.06157980728149414, 0.06180659103393555, 0.061689823150634766, 0.061394878387451175, 0.061454017639160155, 0.06149363327026367, 0.061400062561035154, 0.061321247100830076, 0.061468830108642576, 0.06176403045654297, 0.061731231689453124, 0.06167958450317383, 0.06171852874755859, 0.06180659103393555, 0.06156595230102539, 0.06129971313476563, 0.061287742614746094, 0.061440704345703125, 0.06165673446655273, 0.06137216186523437, 0.061603839874267576, 0.061506145477294924, 0.061530113220214844, 0.06177382278442383, 0.06155632019042969, 0.06560559844970704, 0.06320329666137696, 0.062007328033447266, 0.061638656616210936, 0.06126387023925781, 0.061445568084716795, 0.061432384490966795, 0.06149324798583984, 0.061290496826171874, 0.06150348663330078, 0.06137036895751953, 0.06156412887573242, 0.06140396881103516, 0.06138876724243164, 0.06174518585205078, 0.06177382278442383, 0.062182910919189455, 0.062304737091064454, 0.061892608642578124, 0.06167552185058594, 0.0615546875, 0.06159564971923828, 0.06129401779174805, 0.06124604797363281, 0.061257694244384764, 0.06134486389160156, 0.061295520782470705, 0.061489151000976565, 0.0614093132019043, 0.06103241729736328, 0.0612720947265625, 0.0615230712890625, 0.061645278930664064, 0.06199318313598633, 0.06179024124145508, 0.061730209350585936, 0.061794784545898436, 0.061800704956054685, 0.061742271423339844, 0.06150537490844726, 0.06150783920288086, 0.06152265548706055, 0.061373950958251954, 0.06139136123657227, 0.06160179138183594, 0.061417217254638674, 0.06169830322265625, 0.06147622299194336, 0.06151628875732422, 0.06152764892578125, 0.06185219192504883, 0.06211174392700195, 0.061818878173828126, 0.061712383270263675, 0.06159097671508789, 0.06159622573852539, 0.061400192260742184, 0.061524063110351565, 0.061426464080810546, 0.061411136627197264, 0.06152211380004883, 0.06136835098266601, 0.06165654373168945, 0.06534742736816407, 0.06290099334716796, 0.06167705535888672, 0.06150502395629883, 0.0613869743347168, 0.06128924942016602, 0.061515777587890626, 0.061464576721191405, 0.06145024108886719, 0.061470718383789064, 0.061394622802734375, 0.06137887954711914, 0.06157721710205078, 0.06123110580444336, 0.0615464973449707, 0.061949600219726564, 0.06207113647460937, 0.06198409652709961, 0.061878944396972654, 0.0617938232421875, 0.06158982467651367, 0.06139644622802734, 0.061207168579101565, 0.06133107376098633, 0.06148912048339844, 0.061353759765625, 0.06118060684204102, 0.061532161712646485, 0.061650974273681644, 0.06144204711914063, 0.0618106575012207, 0.061738014221191406, 0.06177824020385742, 0.06173331069946289, 0.06177199935913086, 0.06186188888549805, 0.062000801086425784, 0.06152771377563476, 0.06146937561035156, 0.06128639984130859, 0.06111638259887695, 0.061408767700195314, 0.06132585525512695, 0.06141299057006836, 0.061480575561523435, 0.06132515335083008, 0.061440929412841794, 0.061443424224853514, 0.061750942230224606, 0.06185881423950195, 0.06175129699707031, 0.06155673599243164, 0.06183935928344726, 0.061868030548095705, 0.06157516860961914, 0.0614824333190918, 0.06139894485473633, 0.061303489685058596, 0.0614870719909668, 0.06132294464111328, 0.06129900741577148, 0.061265918731689455, 0.06146665573120117, 0.0649062728881836, 0.06303334426879882, 0.06179840087890625, 0.06136422348022461, 0.061351295471191405, 0.06111433410644531, 0.061507678985595705, 0.06155728149414062, 0.06154025650024414, 0.06163241577148437, 0.06170646286010742, 0.06116348648071289, 0.061192192077636716, 0.06120819091796875, 0.06150182342529297, 0.06216447830200195, 0.062063102722167966, 0.06198428726196289, 0.06195574569702148, 0.06160262298583984, 0.06147894287109375, 0.06144160079956055, 0.061604095458984376, 0.06153792190551758, 0.061680191040039065, 0.061360095977783205, 0.061306880950927733, 0.06150348663330078, 0.06137449645996094, 0.06153420639038086, 0.061582592010498045, 0.06157583999633789, 0.061827167510986325, 0.06184307098388672, 0.06171990585327149, 0.06151663970947266, 0.06147884750366211, 0.06125593566894531, 0.06162633514404297, 0.061503040313720704, 0.061637054443359374, 0.061638656616210936, 0.06147686386108398, 0.06153823852539062, 0.061554206848144534, 0.06155094528198242, 0.0617760009765625, 0.06164691162109375, 0.061655040740966796, 0.06196428680419922, 0.061851646423339846, 0.06170009613037109, 0.06160105514526367, 0.06165167999267578, 0.061580543518066404, 0.06167350387573242, 0.06150201416015625, 0.06155059051513672, 0.06152207946777344, 0.061626014709472654, 0.06152816009521484, 0.06168787384033203, 0.061542625427246096, 0.06508134460449219, 0.0629227523803711, 0.06164067077636719, 0.06132499313354492, 0.061208927154541015, 0.06129199981689453, 0.06137500762939453, 0.0613642578125, 0.061423583984375, 0.061295967102050784, 0.061196094512939454, 0.06112956619262695, 0.06130278396606445, 0.061224960327148435, 0.06165507125854492, 0.06210355377197266, 0.06235123062133789, 0.062344993591308594, 0.06209977722167969, 0.061960193634033205, 0.06183695983886719, 0.06165055847167969, 0.06130467224121094, 0.06117875289916992, 0.06148303985595703, 0.06153740692138672, 0.06129135894775391, 0.061327392578125, 0.06129660797119141, 0.06133059310913086, 0.061730751037597655, 0.06169283294677735, 0.06190681457519531, 0.061914558410644534, 0.062111774444580076, 0.06208377456665039, 0.06176927947998047, 0.061488800048828125, 0.06176787185668945, 0.06163475036621094, 0.06146892929077148, 0.06158963012695313, 0.06142348861694336, 0.06163264083862305, 0.06146640014648438, 0.061266143798828124, 0.06144617462158203, 0.061431774139404295, 0.06168166351318359, 0.06182454299926758, 0.06192585754394531, 0.061917217254638675, 0.062098495483398436, 0.06185257720947265, 0.06182876968383789, 0.06162467193603516, 0.06157926559448242, 0.061669055938720706, 0.06150921630859375, 0.06137519836425781, 0.061869953155517576, 0.061580448150634765, 0.061496288299560546]",tokens/s,16.244175644248,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1219, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1020, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 737, in forward self_attn_output, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 655, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 461, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 353, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 154530 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1235, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1037, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 763, in forward attn_outputs, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 557, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpe4qxnbmr/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1174, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 894, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 507, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 436, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1219, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1020, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 737, in forward self_attn_output, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 655, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 170977 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 270, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 85640 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 905.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 73576 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmprtygwgxa/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 1248, in __init__ self.transformer = FalconModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 973, in __init__ self.h = nn.ModuleList([FalconDecoderLayer(config, layer_idx=i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 973, in self.h = nn.ModuleList([FalconDecoderLayer(config, layer_idx=i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 721, in __init__ self.self_attention = FALCON_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 587, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 366, in __init__ self.query_key_value = FalconLinear(self.hidden_size, qkv_out_dim, bias=config.bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 450.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 424.12 MiB is free. Process 204460 has 14.32 GiB memory in use. Of the allocated memory 14.20 GiB is allocated by PyTorch, and 6.16 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 1043, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 806, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 513, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 463, in forward return self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neo/modeling_gpt_neo.py"", line 407, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 1039, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 816, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 551, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 384, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 1195, in __init__ self.model = MixtralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in __init__ [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 757, in __init__ self.block_sparse_moe = MixtralSparseMoeBlock(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in __init__ self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 672, in __init__ self.w3 = nn.Linear(self.hidden_dim, self.ffn_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 100.12 MiB is free. Process 180746 has 14.64 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 49.54 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 891, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 823, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 374, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 253, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 116, in __init__ self.v_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 125049 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 3.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1262, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1030, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1030, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 797, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 492, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 402, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 82510 has 14.73 GiB memory in use. Of the allocated memory 12.27 GiB is allocated by PyTorch, and 2.34 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 22909 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 718, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 176.12 MiB is free. Process 47324 has 14.57 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpz9_hp97x/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: XGLMForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpc13qw90f/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 270, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 70556 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 905.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 234.12 MiB is free. Process 167903 has 14.51 GiB memory in use. Of the allocated memory 14.39 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 362.12 MiB is free. Process 174086 has 14.38 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 1.78 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 54.12 MiB is free. Process 76638 has 14.69 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 1.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 272, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 406.12 MiB is free. Process 79605 has 14.34 GiB memory in use. Of the allocated memory 14.23 GiB is allocated by PyTorch, and 1.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmps0l0btel/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 344.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 66.12 MiB is free. Process 160226 has 14.67 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 1.71 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 461, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 354, in __init__ self.v_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 157398 has 14.74 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 90.39 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 461, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 353, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 165002 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp9vu1dak1/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 717, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 188.12 MiB is free. Process 39248 has 14.55 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.45 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 1195, in __init__ self.model = MixtralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in __init__ [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 755, in __init__ self.self_attn = MIXTRAL_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 349, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 72.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 38.12 MiB is free. Process 183719 has 14.70 GiB memory in use. Of the allocated memory 14.55 GiB is allocated by PyTorch, and 41.65 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 510, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 280, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 201, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 96507 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 891, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 823, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 374, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 253, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 116, in __init__ self.v_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 122260 has 14.73 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 3.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1000, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 729, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 551, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 54.12 MiB is free. Process 88640 has 14.69 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 1.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1167, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 976, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 702, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 522, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 1069, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 881, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 604, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 479, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: FalconForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpwfv6r3_1/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 990, in __init__ self.model = GemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 775, in __init__ [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 775, in [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 565, in __init__ self.mlp = GemmaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 140, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 139417 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1234, in __init__ self.transformer = DbrxModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1009, in __init__ self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1009, in self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 788, in __init__ self.ffn = DbrxFFN(config=config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 765, in __init__ self.experts = DbrxExperts( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 704, in __init__ self.mlp = DbrxExpertGLU( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 682, in __init__ self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 110968 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 1012, in forward outputs = self.model.decoder( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 778, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 419, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 343, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1133, in __init__ self.model = StableLmModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 914, in __init__ [StableLmDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 914, in [StableLmDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 688, in __init__ self.self_attn = ATTENTION_CLASSES[config._attn_implementation](config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 572, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 339, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.use_qkv_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 14.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 190738 has 14.73 GiB memory in use. Of the allocated memory 14.54 GiB is allocated by PyTorch, and 78.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 743, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 368, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 175, in __init__ self.dense = nn.Linear(config.hidden_size, config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 42172 has 14.73 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 20.86 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: CodeGenForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpxleuhnxy/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2048, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3008, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1178, in forward outputs = self.gpt_neox( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1007, in forward outputs = layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 758, in forward attention_layer_outputs = self.attention( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 438, in forward attn_weights = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,799.8464,8046.641152,0.0,7644.11904,7642.422272,s,1,10.8059189453125,10.8059189453125,0.0,10.8059189453125,10.8059189453125,10.8059189453125,10.8059189453125,[10.8059189453125],,kWh,7.0646694708254166e-06,7.715680940136139e-07,2.445279733996608e-06,1.0281517298835639e-05,,MB,1212.674048,8375.894016,0.0,7962.886144,7924.272128,s,10,7.128355773925781,0.7128355773925781,0.004382346843136146,0.7138260803222656,0.7171376586914062,0.7176173217773437,0.7180010522460937,"[0.7019710083007813, 0.7105671997070313, 0.7097369995117188, 0.7139281616210937, 0.7137239990234375, 0.715084228515625, 0.715468505859375, 0.7180969848632812, 0.7127476196289062, 0.7170310668945312]",tokens/s,359.12910090206367,kWh,2.0813442115278095e-05,2.293911531522424e-06,1.3820696241732822e-05,3.6928049888533344e-05,tokens/kWh,6932399.646684063,MB,1238.597632,8375.894016,0.0,7962.886144,7924.274688,s,10,24.784465576171872,2.478446557617187,0.01017907670704205,2.478732177734375,2.490047314453125,2.4920455078125,2.4936440625,"[2.479656494140625, 2.476998291015625, 2.494043701171875, 2.489603271484375, 2.484344482421875, 2.477807861328125, 2.4860888671875, 2.461406982421875, 2.47116357421875, 2.46335205078125]",tokens/s,25.419148057228668,kWh,7.31932964243061e-05,8.074994322140187e-06,4.849996472586771e-05,0.00012976825547231403,tokens/kWh,485480.82711523393,,s,630,24.7817029037476,0.039336036355154855,0.0006747766386773948,0.03918395233154297,0.03984417457580566,0.04028822689056396,0.042018163642883306,"[0.040157470703125, 0.03941523361206055, 0.03913296127319336, 0.03910464096069336, 0.03916032028198242, 0.039064544677734375, 0.039297985076904296, 0.039288833618164064, 0.0390096321105957, 0.039623329162597656, 0.0390533447265625, 0.04061347198486328, 0.03924211120605469, 0.03916998291015625, 0.03897555160522461, 0.03899919891357422, 0.03900502395629883, 0.039106113433837894, 0.039139774322509764, 0.038880992889404296, 0.03902489471435547, 0.03898371124267578, 0.03895606231689453, 0.038859745025634766, 0.038803455352783206, 0.03896844863891601, 0.039066337585449216, 0.0391640625, 0.039114398956298826, 0.04039100646972656, 0.0415945930480957, 0.03919081497192383, 0.039346176147460936, 0.03931158447265625, 0.038763423919677735, 0.03925267028808594, 0.03927875137329102, 0.03889769744873047, 0.03914931106567383, 0.039226848602294924, 0.039099166870117184, 0.03935395050048828, 0.039454593658447265, 0.039448638916015626, 0.039381473541259766, 0.03913020706176758, 0.03964815902709961, 0.03951980972290039, 0.039247425079345706, 0.03945065689086914, 0.039119007110595704, 0.039543487548828124, 0.039501823425292966, 0.0393072624206543, 0.03982748794555664, 0.039530464172363285, 0.039583744049072264, 0.03936016082763672, 0.03934576034545898, 0.039895294189453125, 0.040317440032958986, 0.04011996841430664, 0.039413726806640625, 0.041088993072509766, 0.04015516662597656, 0.039809024810791016, 0.039585182189941406, 0.039666015625, 0.03955276870727539, 0.03966371154785156, 0.039786911010742186, 0.0396550407409668, 0.03975411224365234, 0.03907174301147461, 0.03904428863525391, 0.03913379287719727, 0.03932592010498047, 0.039118175506591794, 0.039035423278808594, 0.0392336654663086, 0.03879935836791992, 0.04081868743896484, 0.03899311828613281, 0.03908713531494141, 0.03927219009399414, 0.039057407379150394, 0.0398680648803711, 0.04203964614868164, 0.03918617630004883, 0.03942595291137695, 0.03916233444213867, 0.03882380676269531, 0.03960627365112305, 0.03943571090698242, 0.0391063346862793, 0.03899267196655273, 0.038989185333251956, 0.03877155303955078, 0.03885347366333008, 0.03902582550048828, 0.03931094360351563, 0.03882355117797852, 0.03889769744873047, 0.03933776092529297, 0.038984127044677734, 0.0395164794921875, 0.038921470642089846, 0.038855422973632814, 0.038944736480712894, 0.038822208404541016, 0.03920048141479492, 0.04008345413208008, 0.038846176147460935, 0.038846752166748044, 0.039397377014160156, 0.03916505432128906, 0.038881664276123044, 0.038814208984375, 0.03937212753295898, 0.03895977783203125, 0.039135040283203124, 0.03912928009033203, 0.03919257736206055, 0.038989601135253904, 0.03923580932617188, 0.03906969451904297, 0.0403087043762207, 0.03963078308105469, 0.03916419219970703, 0.03948540878295898, 0.03927676773071289, 0.03917372894287109, 0.03909872055053711, 0.0396416015625, 0.039397377014160156, 0.03919257736206055, 0.039102401733398434, 0.03914499282836914, 0.039449119567871095, 0.038967296600341796, 0.039145408630371095, 0.039110145568847655, 0.03896163177490235, 0.04890201568603516, 0.03927257537841797, 0.03928688049316406, 0.039208927154541016, 0.03884239959716797, 0.03892355346679687, 0.0391421127319336, 0.03906268692016602, 0.039451488494873045, 0.03920896148681641, 0.03952025604248047, 0.03902054214477539, 0.03925823974609375, 0.03919011306762695, 0.03951792144775391, 0.039793216705322265, 0.03942569732666015, 0.03972745513916016, 0.039739391326904294, 0.040343551635742186, 0.04042924880981445, 0.03936492919921875, 0.03944038391113281, 0.03951433563232422, 0.039456607818603516, 0.03937216186523437, 0.03924828720092773, 0.03915792083740234, 0.03950166320800781, 0.03937203216552734, 0.040369056701660154, 0.03947315216064453, 0.03946086502075195, 0.03983564758300781, 0.040091487884521486, 0.03984400177001953, 0.03965971374511719, 0.039565120697021484, 0.03915087890625, 0.03939401626586914, 0.03997695922851562, 0.03939430236816406, 0.039375873565673826, 0.039403518676757815, 0.03953241729736328, 0.039280769348144534, 0.04021657562255859, 0.039731201171875, 0.039370750427246096, 0.039507518768310546, 0.0392872314453125, 0.03917391967773438, 0.0396778564453125, 0.039422271728515625, 0.04017366409301758, 0.040653919219970705, 0.03905532836914063, 0.03942697525024414, 0.03911407852172852, 0.03897516632080078, 0.039201663970947265, 0.03928067016601562, 0.039362655639648435, 0.03950991821289063, 0.03952640151977539, 0.039276702880859375, 0.04005257415771484, 0.0402405776977539, 0.03980563354492188, 0.03950783920288086, 0.03940496063232422, 0.039360416412353515, 0.03914617538452148, 0.039587329864501954, 0.03905177688598633, 0.03914307022094726, 0.03930707168579101, 0.040608287811279294, 0.04180275344848633, 0.03911167907714844, 0.03911203384399414, 0.039309024810791016, 0.03904560089111328, 0.03908201599121094, 0.039307712554931644, 0.039435489654541016, 0.042354560852050784, 0.03929180908203125, 0.039155616760253906, 0.03918380737304687, 0.039020351409912106, 0.039342369079589844, 0.039495361328125, 0.0394161262512207, 0.03915219116210938, 0.039032833099365234, 0.0389857292175293, 0.03927836990356445, 0.03984201431274414, 0.04026319885253906, 0.03900844955444336, 0.03978895950317383, 0.03974972915649414, 0.03917596817016602, 0.03923763275146484, 0.0392108154296875, 0.03936198425292969, 0.03914582443237305, 0.039459232330322266, 0.03981286239624023, 0.039069023132324215, 0.03932048034667969, 0.03921929550170898, 0.03906345748901367, 0.039052894592285156, 0.03908572769165039, 0.03916262435913086, 0.039255233764648435, 0.03904595184326172, 0.039041023254394534, 0.03896950531005859, 0.039008094787597654, 0.03889491271972656, 0.039064254760742184, 0.039213054656982424, 0.038833343505859375, 0.039041854858398437, 0.03969996643066406, 0.04018339157104492, 0.03963286590576172, 0.03969734573364258, 0.039409217834472654, 0.03945318222045899, 0.03922915267944336, 0.03921913528442383, 0.03966799926757812, 0.039346176147460936, 0.03935174560546875, 0.039843551635742186, 0.03942486572265625, 0.03929702377319336, 0.039272449493408204, 0.03988275146484375, 0.04196556854248047, 0.04045571136474609, 0.03962704086303711, 0.03929516983032227, 0.03945471954345703, 0.03905535888671875, 0.03939059066772461, 0.03922393417358398, 0.03922739028930664, 0.039144542694091795, 0.03905014419555664, 0.03918409729003906, 0.03902828979492187, 0.039102176666259765, 0.03955635070800781, 0.03997875213623047, 0.039962623596191404, 0.039395008087158206, 0.039164031982421875, 0.03924396896362305, 0.03887513732910156, 0.03918150329589844, 0.03932371139526367, 0.03895987319946289, 0.040896129608154294, 0.040204254150390625, 0.04018163299560547, 0.039787040710449216, 0.039376895904541014, 0.039894847869873046, 0.03909036636352539, 0.039315647125244144, 0.03924153518676758, 0.03901136016845703, 0.03924272155761719, 0.03902054214477539, 0.039164031982421875, 0.03984572982788086, 0.03896934509277344, 0.039099422454833985, 0.03930828857421875, 0.03896249771118164, 0.0388287353515625, 0.03889152145385742, 0.038981632232666014, 0.039181377410888674, 0.0393135986328125, 0.03912160110473633, 0.03896662521362305, 0.03919340896606445, 0.03965327835083008, 0.039487327575683594, 0.040853374481201174, 0.03993999862670899, 0.039558815002441405, 0.03909641647338867, 0.039216991424560546, 0.04191721725463867, 0.03949574279785156, 0.039183391571044925, 0.03958262252807617, 0.039266273498535155, 0.03944963073730469, 0.03935075378417969, 0.03914601516723633, 0.039299072265625, 0.0391536636352539, 0.038860801696777345, 0.03902588653564453, 0.03956937789916992, 0.03933472061157227, 0.039330078125, 0.0392509765625, 0.03904787063598633, 0.039059104919433596, 0.03972275161743164, 0.03983388900756836, 0.039373119354248046, 0.039929855346679685, 0.03920281600952148, 0.039221248626708984, 0.03920048141479492, 0.039651103973388675, 0.03911660766601562, 0.038801151275634764, 0.03897187042236328, 0.038924190521240236, 0.039074142456054686, 0.038992095947265625, 0.03948262405395508, 0.03931961441040039, 0.038951614379882815, 0.04006707382202149, 0.039401248931884764, 0.03943670272827148, 0.03963679885864258, 0.03909222412109375, 0.03936240005493164, 0.039565471649169924, 0.03970361709594727, 0.03943420791625977, 0.03973603057861328, 0.039540992736816404, 0.039847934722900394, 0.03931955337524414, 0.03943423843383789, 0.03884646224975586, 0.039337982177734376, 0.04041680145263672, 0.04031740951538086, 0.039771137237548826, 0.03933695983886719, 0.03913312149047852, 0.039333953857421874, 0.042676223754882815, 0.03962607955932617, 0.03915753555297852, 0.03903372955322266, 0.039395328521728515, 0.03954441452026367, 0.039217567443847655, 0.03925955200195313, 0.03932940673828125, 0.039279678344726565, 0.03895238494873047, 0.039131454467773434, 0.03930742263793945, 0.03900995254516602, 0.03909619140625, 0.03895507049560547, 0.038863262176513674, 0.03892428970336914, 0.03926403045654297, 0.039250144958496096, 0.03917184066772461, 0.03905152130126953, 0.039237022399902344, 0.03906569671630859, 0.03908208084106445, 0.039152030944824216, 0.03899596786499023, 0.03945692825317383, 0.04296073532104492, 0.03943833541870117, 0.03925196838378906, 0.039273632049560546, 0.04001801681518555, 0.039364574432373046, 0.03997734451293945, 0.03940176010131836, 0.03907097625732422, 0.039119743347167966, 0.03924339294433594, 0.03894857788085938, 0.03918627166748047, 0.04045235061645508, 0.03962588882446289, 0.039344734191894534, 0.039610336303710934, 0.03916719818115234, 0.03907030487060547, 0.0395, 0.039618560791015625, 0.03883152008056641, 0.039129695892333984, 0.03895500946044922, 0.03894476699829102, 0.03878911972045898, 0.03901459121704102, 0.03895439910888672, 0.03908659362792969, 0.039468608856201175, 0.03882223892211914, 0.03886284637451172, 0.03954687881469727, 0.03876454544067383, 0.0388342399597168, 0.03868467330932617, 0.03867180633544922, 0.03877097702026367, 0.03893404769897461, 0.039000160217285154, 0.0389590072631836, 0.03879801559448242, 0.03887923049926758, 0.03868467330932617, 0.03879935836791992, 0.039032608032226565, 0.03868889617919922, 0.038755489349365235, 0.038529983520507814, 0.03879638290405273, 0.03881788635253906, 0.03884115219116211, 0.03858809661865235, 0.038844417572021485, 0.038717342376708985, 0.03902032089233398, 0.03891878509521484, 0.03922739028930664, 0.042342464447021486, 0.03913926315307617, 0.03888742446899414, 0.03892019271850586, 0.039008255004882815, 0.0392374382019043, 0.03889075088500977, 0.03898463821411133, 0.03875145721435547, 0.03894147109985351, 0.03909235382080078, 0.03889491271972656, 0.03893100738525391, 0.039337982177734376, 0.03891798400878906, 0.03916563034057617, 0.03911644744873047, 0.03921184158325195, 0.04023561477661133, 0.039110206604003904, 0.03973756790161133, 0.04131350326538086, 0.039801185607910156, 0.041677631378173825, 0.04211820983886719, 0.03896556854248047, 0.03901599884033203, 0.03913593673706055, 0.03899820709228516, 0.039061054229736325, 0.03898822402954102, 0.038914337158203124, 0.03909580612182617, 0.03883030319213867, 0.03872972869873047, 0.038937599182128906, 0.03889254379272461, 0.03881167984008789, 0.03911027145385742, 0.0388570556640625, 0.03932707214355469, 0.03912303924560547, 0.03904975891113281, 0.039215137481689456, 0.038857791900634764, 0.039029056549072266, 0.03935891342163086, 0.03906083297729492, 0.03891491317749023, 0.038936416625976564, 0.03891532897949219, 0.03894160079956055, 0.038950912475585936, 0.03881769561767578, 0.038629470825195314, 0.03878620910644531, 0.03893939208984375, 0.03895260620117187, 0.04175423812866211, 0.03914425659179688, 0.03900368118286133, 0.03907984161376953, 0.039123390197753904, 0.03894489669799805, 0.039179454803466796, 0.03889209747314453, 0.03875455856323242, 0.03878297424316406, 0.03888102340698242, 0.03920684814453125, 0.038805824279785156, 0.03916620635986328, 0.038901504516601564, 0.03917824172973633, 0.0391833610534668, 0.039553760528564456, 0.03919203186035156, 0.03922822570800781, 0.03894681549072266, 0.03886633682250976, 0.038988670349121096, 0.03998294448852539, 0.03912825775146484, 0.03930620956420899, 0.0391038703918457, 0.03949023818969727, 0.039129024505615236, 0.03924582290649414, 0.03914092636108398, 0.039065601348876954, 0.03904966354370117, 0.038742015838623044, 0.03880681610107422, 0.03903667068481445, 0.03885564804077148, 0.038727199554443356, 0.038805473327636716, 0.03886540985107422, 0.03923353576660156, 0.03891129684448242, 0.03885065460205078, 0.03888291168212891, 0.038894718170166015, 0.03878895950317383, 0.03899395370483399, 0.03885260772705078, 0.03871868896484375, 0.03893119812011719, 0.03959414291381836, 0.03926595306396485, 0.03904534530639649, 0.039051136016845706, 0.03989465713500977, 0.039029247283935545, 0.03912704086303711, 0.03914547348022461, 0.03903692626953125, 0.03888886260986328, 0.038840831756591795, 0.03919801712036133, 0.03891279983520508, 0.03894451141357422, 0.03936486434936524, 0.039041023254394534, 0.03923369598388672, 0.03932153701782227, 0.0388955192565918, 0.03899184036254883, 0.039131168365478516, 0.03885990524291992, 0.03906649780273438, 0.03902054214477539, 0.03888332748413086, 0.03879052734375, 0.03907648086547852, 0.03868876647949219, 0.03877068710327149, 0.03923555374145508, 0.039546337127685544, 0.03908256149291992, 0.03879116821289062, 0.038975486755371096, 0.03996672058105469, 0.04082406234741211]",tokens/s,25.421981792249216,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,799.997952,8046.641152,0.0,7644.11904,7642.422272,s,1,9.52233203125,9.52233203125,0.0,9.52233203125,9.52233203125,9.52233203125,9.52233203125,[9.52233203125],,kWh,6.425510816664352e-06,6.90792374393327e-07,2.9305578999952453e-06,1.0046861091052925e-05,,MB,1333.98528,8377.991168,0.0,7962.886144,7924.272128,s,10,1.1137695007324218,0.11137695007324219,0.0029297192218080375,0.1117605094909668,0.11385457916259765,0.11391312942504882,0.11395996963500976,"[0.10338703918457032, 0.11021523284912109, 0.11265853118896485, 0.11150643157958984, 0.11177689361572266, 0.1139716796875, 0.11091072082519532, 0.11174412536621094, 0.11384156799316406, 0.11375727844238281]",tokens/s,2298.5007205858374,kWh,3.3828295606321704e-06,3.729990118166601e-07,2.248060546850612e-06,6.0038891192994425e-06,tokens/kWh,42639028.6218129,MB,1367.79776,8380.08832,0.0,7962.886144,7924.274688,s,10,25.381887695312493,2.53818876953125,0.010450145752360251,2.5356971435546876,2.5487151611328125,2.5550666870117187,2.5601479077148435,"[2.535957275390625, 2.519610595703125, 2.532236083984375, 2.5448359375, 2.561418212890625, 2.53621044921875, 2.53543701171875, 2.5345244140625, 2.5473037109375, 2.53435400390625]",tokens/s,24.820848928283127,kWh,7.411609871186834e-05,8.174199167025911e-06,4.9123702772750025e-05,0.00013141400065164427,tokens/kWh,479400.97468763683,,s,630,25.379004028320324,0.040284133378286215,0.0005839811221674359,0.04016435050964355,0.04077804565429687,0.041188852310180664,0.042781143531799314,"[0.040941566467285154, 0.04026572799682617, 0.04020579147338867, 0.04012406539916992, 0.0402215690612793, 0.04065193557739258, 0.04039561462402344, 0.040796161651611325, 0.04161494445800781, 0.04036886215209961, 0.039945919036865236, 0.039782398223876955, 0.042014049530029296, 0.03977283096313477, 0.03993804931640625, 0.04060160064697266, 0.04005039978027344, 0.03983388900756836, 0.03986016082763672, 0.039895233154296876, 0.03999321746826172, 0.039868160247802736, 0.04017996978759766, 0.040683521270751956, 0.04073267364501953, 0.03990911865234375, 0.040548606872558596, 0.04384928131103515, 0.04016582489013672, 0.039929855346679685, 0.04006911849975586, 0.040623966217041015, 0.04041129684448242, 0.04047990417480469, 0.040053600311279296, 0.03985120010375977, 0.03993881607055664, 0.03980047988891602, 0.03980534362792969, 0.03981926345825195, 0.03977830505371094, 0.03995651245117188, 0.039917537689208984, 0.039806846618652345, 0.03987673568725586, 0.04319027328491211, 0.03974044799804687, 0.039863262176513675, 0.039874561309814455, 0.03971686553955078, 0.03991551971435547, 0.03988479995727539, 0.039778144836425784, 0.039876766204833984, 0.03987033462524414, 0.03959961700439453, 0.039583518981933595, 0.04004131317138672, 0.040128288269042967, 0.04009187316894531, 0.03948953628540039, 0.03945676803588867, 0.042254337310791014, 0.04063619232177734, 0.03976988983154297, 0.03988524627685547, 0.04007526397705078, 0.039708671569824217, 0.039723007202148435, 0.039669761657714846, 0.040030208587646485, 0.039962047576904296, 0.039784416198730466, 0.0403702392578125, 0.039825950622558594, 0.03984320068359375, 0.03983219146728516, 0.03982044982910156, 0.03991446304321289, 0.03999321746826172, 0.040349727630615236, 0.04057241439819336, 0.039835231781005856, 0.03967884826660156, 0.039766014099121096, 0.039806976318359374, 0.03993190383911133, 0.04011212921142578, 0.03986022567749024, 0.040052734375, 0.040202239990234374, 0.04023199844360351, 0.03998195266723633, 0.03996268844604492, 0.03989692687988281, 0.039764129638671875, 0.03976512145996094, 0.03992256164550781, 0.03999951934814453, 0.039884769439697265, 0.04007526397705078, 0.04008755111694336, 0.03994617462158203, 0.03995449447631836, 0.039927806854248044, 0.04059913635253906, 0.040274337768554686, 0.03998844909667969, 0.039815521240234374, 0.040024127960205075, 0.04000396728515625, 0.03997625732421875, 0.03993158340454102, 0.03983052825927735, 0.04003430557250977, 0.039755775451660154, 0.04010598373413086, 0.039897087097167966, 0.03970387268066406, 0.039813152313232424, 0.03964313507080078, 0.039672481536865235, 0.040158336639404296, 0.04123328018188477, 0.04039680099487305, 0.040062976837158204, 0.04090774536132812, 0.039808673858642576, 0.04058492660522461, 0.04094966506958008, 0.04004118347167969, 0.03991961669921875, 0.03983078384399414, 0.03983958435058594, 0.040190879821777346, 0.04101932907104492, 0.04013881683349609, 0.03986227035522461, 0.040113441467285155, 0.039798622131347654, 0.03993484878540039, 0.03992166519165039, 0.03995647811889649, 0.03997491073608399, 0.040209503173828126, 0.04026460647583008, 0.040291423797607424, 0.04048169708251953, 0.04036403274536133, 0.04029030227661133, 0.040103935241699216, 0.04020163345336914, 0.04018646240234375, 0.04069580841064453, 0.04391526412963867, 0.04013011169433594, 0.03996102523803711, 0.04001551818847656, 0.0397479362487793, 0.039739391326904294, 0.039814239501953126, 0.03987753677368164, 0.039794048309326174, 0.04003424072265625, 0.039871166229248044, 0.039757823944091795, 0.03974758529663086, 0.040226814270019534, 0.03991487884521484, 0.039915134429931644, 0.040487422943115234, 0.040159744262695314, 0.03996876907348633, 0.04011187362670898, 0.040039710998535157, 0.0406148796081543, 0.041198688507080077, 0.04029942321777344, 0.040742271423339846, 0.04052646255493164, 0.04020159912109375, 0.03998374557495117, 0.039779552459716795, 0.039733505249023436, 0.03984406280517578, 0.03982118225097656, 0.03998764801025391, 0.0402553596496582, 0.03982144165039062, 0.040974143981933595, 0.04028716659545899, 0.04019401550292969, 0.040232990264892576, 0.03993190383911133, 0.03994214248657227, 0.04003583908081055, 0.040339519500732425, 0.039997886657714844, 0.03990323257446289, 0.04002406311035156, 0.04077756881713867, 0.04068745422363281, 0.04026809692382813, 0.0402938232421875, 0.040176128387451174, 0.04003641510009766, 0.040210304260253904, 0.04045177459716797, 0.04017606353759766, 0.04025126266479492, 0.04060172653198242, 0.040406238555908205, 0.0400305290222168, 0.04045849609375, 0.0403109130859375, 0.04025920104980469, 0.03999177551269531, 0.04042860794067383, 0.04048915100097656, 0.040255329132080075, 0.040008609771728515, 0.039766014099121096, 0.03982521438598633, 0.04002016067504883, 0.04036399841308594, 0.0410747184753418, 0.04230348968505859, 0.04031488037109375, 0.04031203079223633, 0.04039350509643555, 0.04021593475341797, 0.040051551818847654, 0.039870529174804686, 0.04027142333984375, 0.041056415557861325, 0.04044105529785156, 0.040280864715576174, 0.04039475250244141, 0.040153087615966795, 0.04050739288330078, 0.040509441375732425, 0.040750240325927736, 0.04059427261352539, 0.0409804801940918, 0.04042099380493164, 0.04045452880859375, 0.04015923309326172, 0.04043571090698242, 0.041758846282958985, 0.04045606231689453, 0.04096108627319336, 0.0409978256225586, 0.04145593643188476, 0.04062051010131836, 0.040478622436523434, 0.04029040145874024, 0.0403900146484375, 0.04027833557128906, 0.040386878967285156, 0.04030803298950195, 0.04146419143676758, 0.04207583999633789, 0.040727169036865234, 0.04036198425292969, 0.040738815307617186, 0.040359935760498046, 0.04025139236450195, 0.04048831939697266, 0.0414071044921875, 0.041226303100585934, 0.04163372802734375, 0.040288257598876956, 0.040195934295654295, 0.040605857849121095, 0.04015923309326172, 0.040217697143554686, 0.040145950317382814, 0.04027305603027344, 0.040083168029785156, 0.039956737518310546, 0.04011494445800781, 0.0405401611328125, 0.04149657440185547, 0.044523521423339846, 0.040676959991455076, 0.04022108840942383, 0.040048641204833986, 0.040361377716064455, 0.040261280059814455, 0.040368896484375, 0.04044819259643555, 0.040255199432373046, 0.04052406311035156, 0.04045209503173828, 0.04062822341918945, 0.04152115249633789, 0.04078387069702148, 0.0411278076171875, 0.04116083145141602, 0.042788864135742184, 0.04069580841064453, 0.040288257598876956, 0.040525825500488284, 0.04025548934936524, 0.04016537475585937, 0.04006083297729492, 0.04004671859741211, 0.040812191009521485, 0.04059782409667969, 0.040525089263916014, 0.04007190322875977, 0.040210430145263674, 0.04003839874267578, 0.04128287887573242, 0.04037907028198242, 0.04103168106079102, 0.040443904876708986, 0.04044595336914063, 0.04037017440795899, 0.040417057037353515, 0.04095395278930664, 0.04043993759155273, 0.04020537567138672, 0.03988780975341797, 0.04024934387207031, 0.040243198394775394, 0.04000937652587891, 0.04004489517211914, 0.039767585754394534, 0.04019862365722656, 0.04036723327636719, 0.040336254119873044, 0.040204288482666016, 0.04021657562255859, 0.0401162223815918, 0.040022014617919925, 0.04001190567016601, 0.040079231262207034, 0.03990300750732422, 0.03996444702148438, 0.039704544067382816, 0.03994416046142578, 0.04009830474853516, 0.04003430557250977, 0.04057478332519531, 0.04037263870239258, 0.04050307083129883, 0.040253440856933595, 0.0404186897277832, 0.040299137115478514, 0.04027801513671875, 0.040097793579101565, 0.03990512084960938, 0.039870624542236326, 0.04015683364868164, 0.04002143859863281, 0.04014172744750977, 0.041093120574951174, 0.04094976043701172, 0.04031078338623047, 0.041009151458740234, 0.040989856719970706, 0.04023353576660156, 0.04003644943237305, 0.04027609634399414, 0.040048702239990235, 0.040013568878173825, 0.04044825744628906, 0.0407256965637207, 0.04016006469726562, 0.040081409454345705, 0.04012607955932617, 0.040083839416503904, 0.04047872161865235, 0.040066913604736326, 0.03999964904785156, 0.040220672607421876, 0.039964672088623046, 0.041641311645507814, 0.04026777648925781, 0.04020905685424805, 0.040521728515625, 0.040196094512939456, 0.04276224136352539, 0.04012790298461914, 0.040142974853515624, 0.03997465515136719, 0.0399508171081543, 0.04005094528198242, 0.040194080352783206, 0.04086092758178711, 0.04199087905883789, 0.04010166549682617, 0.04052540969848633, 0.040135295867919925, 0.040457534790039065, 0.04029715347290039, 0.04021561431884765, 0.04023820877075195, 0.04014060974121094, 0.04015718460083008, 0.04010400009155273, 0.040312767028808594, 0.04039680099487305, 0.03996672058105469, 0.039997440338134765, 0.040235008239746094, 0.04051968002319336, 0.0401162223815918, 0.0399251823425293, 0.03974921417236328, 0.04008649444580078, 0.04000972747802734, 0.04014659118652344, 0.04011452865600586, 0.04012579345703125, 0.040215198516845706, 0.03990937423706055, 0.04026367950439453, 0.04013875198364258, 0.03986598587036133, 0.04006028747558594, 0.03989811325073242, 0.03989820861816406, 0.04006800079345703, 0.040130561828613284, 0.04026777648925781, 0.039907329559326174, 0.04037222290039062, 0.040040321350097656, 0.03992300796508789, 0.04023583984375, 0.03992166519165039, 0.040187904357910156, 0.03995647811889649, 0.039974014282226564, 0.04024614334106445, 0.039839263916015624, 0.04000611114501953, 0.04054143905639648, 0.04032131195068359, 0.04117683029174805, 0.04025347137451172, 0.040200351715087894, 0.04032566452026367, 0.04006707382202149, 0.04001968002319336, 0.04006531143188476, 0.040290016174316406, 0.03992195129394531, 0.039997440338134765, 0.04004569625854492, 0.039908222198486325, 0.040036128997802733, 0.040419551849365236, 0.04002406311035156, 0.03982665634155273, 0.03988355255126953, 0.03988067245483398, 0.039851520538330076, 0.04029801559448242, 0.041335807800292966, 0.042897407531738284, 0.040010974884033206, 0.039932830810546875, 0.03992972946166992, 0.040254848480224606, 0.040124961853027344, 0.03987875366210938, 0.03980492782592773, 0.03986841583251953, 0.03974348831176758, 0.03978400039672852, 0.04002860641479492, 0.040088958740234375, 0.04141324615478516, 0.040049793243408204, 0.0400208625793457, 0.03994214248657227, 0.04010383987426758, 0.039882846832275394, 0.03982745742797852, 0.03966566467285156, 0.039823360443115234, 0.0396954231262207, 0.04081350326538086, 0.03998905563354492, 0.040045921325683596, 0.0410810546875, 0.04037686538696289, 0.04004463958740234, 0.040275966644287106, 0.04044787216186523, 0.04010111999511719, 0.039887744903564455, 0.04006092834472656, 0.04023295974731445, 0.04013667297363281, 0.03989686584472656, 0.04031635284423828, 0.04127008056640625, 0.04124176025390625, 0.04020105743408203, 0.041211265563964844, 0.041398239135742185, 0.040209503173828126, 0.040172447204589845, 0.04012988662719726, 0.04106492614746094, 0.040585407257080076, 0.04028153610229492, 0.04033388900756836, 0.04084143829345703, 0.040619361877441404, 0.040387008666992186, 0.040392704010009765, 0.04049273681640625, 0.039901504516601564, 0.04018175888061523, 0.04074496078491211, 0.040103935241699216, 0.04655513763427734, 0.040953857421875, 0.04019200134277344, 0.040237056732177735, 0.04016332626342774, 0.040017822265625, 0.04008518218994141, 0.04006758499145508, 0.04002191925048828, 0.04008550262451172, 0.03986758422851563, 0.04027065658569336, 0.03998310470581055, 0.04022880172729492, 0.040710208892822265, 0.04116275024414062, 0.0406769905090332, 0.04030835342407227, 0.04021120071411133, 0.04036179351806641, 0.04010412979125977, 0.04012236785888672, 0.04006707382202149, 0.039924991607666015, 0.040069889068603516, 0.04035347366333008, 0.04015052795410156, 0.040366817474365234, 0.04033340835571289, 0.040296382904052734, 0.040162849426269534, 0.040044670104980466, 0.04039107131958008, 0.04034099197387695, 0.040231422424316404, 0.04035174560546875, 0.040226814270019534, 0.04039267349243164, 0.04050048065185547, 0.04024163055419922, 0.04027014541625976, 0.040191200256347655, 0.04027881622314453, 0.040681472778320314, 0.04030044937133789, 0.040615680694580075, 0.04109878540039062, 0.04045395278930664, 0.04038937759399414, 0.040153087615966795, 0.04036608123779297, 0.040101886749267575, 0.04016332626342774, 0.040323070526123043, 0.04040499114990234, 0.04007731246948242, 0.039976383209228514, 0.04016185760498047, 0.04023295974731445, 0.040003135681152345, 0.04020492935180664, 0.039875743865966796, 0.039919456481933596, 0.03989775848388672, 0.04023712158203125, 0.04013475036621094, 0.040322494506835935, 0.040213054656982425, 0.040013599395751956, 0.040057056427001955, 0.04063436889648438, 0.040120319366455076, 0.04016659164428711, 0.040426303863525394, 0.04046438217163086, 0.040474624633789064, 0.04057027053833008, 0.04005744171142578, 0.04033126449584961, 0.04071014404296875, 0.040002750396728515, 0.04000787353515625, 0.03985696029663086, 0.040183616638183595, 0.04018560028076172, 0.03998300933837891, 0.0401693115234375, 0.0407823371887207, 0.04048896026611328, 0.040038272857666014, 0.04048499298095703, 0.04017939376831055, 0.04018368148803711, 0.04048940658569336, 0.040269824981689455, 0.040310432434082034, 0.0401343994140625, 0.04056092834472656, 0.04014521789550781, 0.04025548934936524, 0.039952030181884764, 0.040290721893310545, 0.04010508728027344, 0.04026860809326172, 0.039929855346679685, 0.04000262451171875, 0.040018207550048826, 0.03995510482788086, 0.0401080322265625]",tokens/s,24.823669175393402,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,883.580928,3447.586816,0.0,3045.064704,2842.846208,s,1,7.4159697265625,7.4159697265625,0.0,7.4159697265625,7.4159697265625,7.4159697265625,7.4159697265625,[7.4159697265625],,kWh,6.189378475013048e-06,6.70126793552225e-07,1.8450014759863187e-06,8.704506744551592e-06,,MB,1307.598848,3623.747584,0.0,3208.64256,2982.452736,s,10,0.3403420181274414,0.03403420181274414,0.0013332341762685943,0.033708992004394533,0.03466038665771484,0.03624656181335449,0.037515501937866215,"[0.03783273696899414, 0.03341984176635742, 0.03409612655639648, 0.03300508880615234, 0.033621505737304686, 0.03292422485351563, 0.034307903289794925, 0.033796478271484375, 0.03343942260742187, 0.03389868927001953]",tokens/s,7521.845272250238,kWh,1.1796272121807028e-06,1.3009251465400757e-07,7.856581106047771e-07,2.0953778374394875e-06,tokens/kWh,122173669.79161486,MB,1340.616704,3623.747584,0.0,3208.64256,2982.455296,s,10,12.024080200195312,1.2024080200195315,0.006990480143182823,1.2019589233398438,1.2105512084960937,1.2113985290527345,1.212076385498047,"[1.212245849609375, 1.1979422607421875, 1.2079669189453126, 1.196303466796875, 1.2003634033203125, 1.203554443359375, 1.1907943115234374, 1.2103629150390625, 1.19519921875, 1.209347412109375]",tokens/s,52.39486010661892,kWh,3.5070653710735914e-05,3.8658401161081896e-06,2.305163873699328e-05,6.198813256383738e-05,tokens/kWh,1016323.56701697,,s,630,12.02196740341187,0.019082487941923597,0.0003478571803843443,0.019001967430114747,0.019425436973571776,0.019584565067291258,0.0205242243385315,"[0.01960550308227539, 0.0191997127532959, 0.01923849678039551, 0.019204063415527342, 0.01940553665161133, 0.019335168838500977, 0.01926527976989746, 0.02037580871582031, 0.019200000762939453, 0.019167104721069337, 0.01913654327392578, 0.019048416137695312, 0.019464319229125976, 0.019076480865478515, 0.019591808319091797, 0.019143808364868165, 0.018964479446411133, 0.018946432113647462, 0.01914112091064453, 0.01935081672668457, 0.019346143722534178, 0.01905254364013672, 0.019459871292114257, 0.019134559631347657, 0.019139904022216797, 0.01955308723449707, 0.01973606491088867, 0.019511808395385744, 0.019412992477416992, 0.019188831329345703, 0.0194303035736084, 0.019763200759887696, 0.019322879791259767, 0.019161216735839842, 0.01915180778503418, 0.019211200714111327, 0.01923244857788086, 0.019044416427612305, 0.019499008178710937, 0.01899545669555664, 0.01927168083190918, 0.019238815307617188, 0.02004764747619629, 0.01902838325500488, 0.018976192474365234, 0.018954687118530274, 0.01913644790649414, 0.019178752899169923, 0.019213151931762696, 0.018948095321655273, 0.018962272644042967, 0.01954217529296875, 0.01888649559020996, 0.01894211196899414, 0.018859615325927736, 0.0189956169128418, 0.019023775100708008, 0.018999008178710936, 0.018901376724243163, 0.019304447174072266, 0.019451904296875, 0.019079168319702147, 0.018898944854736328, 0.019102176666259765, 0.01899728012084961, 0.01895625686645508, 0.018933088302612304, 0.018794975280761718, 0.01896681594848633, 0.019120031356811524, 0.01881292724609375, 0.018974720001220705, 0.01923686408996582, 0.019222496032714843, 0.019017759323120116, 0.018917375564575196, 0.018952287673950196, 0.018912511825561522, 0.018965152740478514, 0.018903039932250978, 0.018951839447021484, 0.018968927383422853, 0.018915327072143554, 0.020555776596069338, 0.02068217658996582, 0.018936384201049806, 0.01883750343322754, 0.01901955223083496, 0.01886636734008789, 0.01895747184753418, 0.01885795211791992, 0.018951072692871093, 0.01881497573852539, 0.01897881507873535, 0.019144767761230468, 0.019139711380004882, 0.01897145652770996, 0.01892483139038086, 0.01884566307067871, 0.01885875129699707, 0.01882931137084961, 0.018822208404541015, 0.019104703903198243, 0.019156991958618166, 0.01908940887451172, 0.01906483268737793, 0.0187675838470459, 0.018839168548583984, 0.018843711853027342, 0.018737760543823243, 0.018739200592041014, 0.019208063125610353, 0.019283775329589845, 0.01909494400024414, 0.019063648223876954, 0.019126176834106445, 0.01890934371948242, 0.018937856674194335, 0.018984960556030273, 0.018820255279541016, 0.01874006462097168, 0.018871551513671876, 0.01882713508605957, 0.01906915283203125, 0.018794368743896485, 0.01902057647705078, 0.019080255508422853, 0.019086111068725587, 0.018976160049438476, 0.019305120468139647, 0.019085119247436524, 0.018982751846313477, 0.019154815673828124, 0.018963008880615233, 0.018921056747436524, 0.018810432434082033, 0.018830047607421876, 0.019027328491210936, 0.0197578239440918, 0.018929664611816405, 0.01920614433288574, 0.018808511734008788, 0.019054912567138673, 0.0188272647857666, 0.019130367279052735, 0.019386240005493164, 0.019437696456909178, 0.019271072387695314, 0.01955081558227539, 0.01921433639526367, 0.01924710464477539, 0.01927987289428711, 0.019117311477661134, 0.019091583251953124, 0.01910028839111328, 0.018905088424682616, 0.018927616119384767, 0.018993152618408202, 0.019109888076782225, 0.018997312545776367, 0.019085248947143554, 0.019462303161621095, 0.01971183967590332, 0.019901695251464843, 0.019323648452758788, 0.01907436752319336, 0.018987071990966796, 0.018989696502685546, 0.019116031646728517, 0.019021823883056642, 0.019277631759643556, 0.019424896240234375, 0.020260992050170897, 0.019301984786987306, 0.019475391387939453, 0.019160991668701173, 0.01906185531616211, 0.01908627128601074, 0.018958303451538087, 0.01894326400756836, 0.0189201602935791, 0.01908684730529785, 0.018914976119995118, 0.018938720703125, 0.019257408142089844, 0.019595199584960938, 0.019347455978393553, 0.01922831916809082, 0.01926780891418457, 0.019279647827148437, 0.01957088088989258, 0.01910483169555664, 0.01901388740539551, 0.019037952423095705, 0.0189752311706543, 0.018902912139892578, 0.018849952697753906, 0.01894438362121582, 0.01914019203186035, 0.01902351951599121, 0.018922496795654296, 0.01908051109313965, 0.01895439910888672, 0.018810976028442384, 0.018966047286987305, 0.018856639862060546, 0.018847808837890626, 0.01878006362915039, 0.018742624282836913, 0.01891600036621094, 0.018843679428100585, 0.018711679458618163, 0.018825599670410156, 0.018761663436889647, 0.019206111907958984, 0.018913888931274415, 0.019406848907470704, 0.019390464782714844, 0.0192423038482666, 0.019114688873291017, 0.018991104125976564, 0.018941280364990234, 0.018843679428100585, 0.01947097587585449, 0.01893574333190918, 0.018919488906860352, 0.018834943771362304, 0.018845407485961915, 0.01884035110473633, 0.01882111930847168, 0.018761728286743166, 0.01910163116455078, 0.019007007598876954, 0.01891587257385254, 0.01884160041809082, 0.01881292724609375, 0.018909183502197266, 0.018739200592041014, 0.019086368560791017, 0.01927471923828125, 0.01922047996520996, 0.018898784637451174, 0.018821279525756837, 0.01908940887451172, 0.018909183502197266, 0.018850847244262694, 0.01909267234802246, 0.01882499122619629, 0.01877743911743164, 0.019996639251708984, 0.018735584259033204, 0.018855583190917968, 0.01884979248046875, 0.018917375564575196, 0.01904844856262207, 0.019310272216796875, 0.019351264953613282, 0.01926313591003418, 0.019188671112060546, 0.019040063858032228, 0.018804927825927735, 0.0188939208984375, 0.01925212860107422, 0.018831615447998048, 0.018788095474243163, 0.01884569549560547, 0.01918976020812988, 0.01895225524902344, 0.018816959381103514, 0.019140640258789064, 0.018892480850219728, 0.018802976608276366, 0.018972671508789063, 0.01904844856262207, 0.018947135925292968, 0.01878112030029297, 0.018876415252685547, 0.01900339126586914, 0.018855072021484374, 0.018814815521240234, 0.01887948799133301, 0.018898752212524413, 0.01883359909057617, 0.01895212745666504, 0.018835519790649412, 0.018886655807495118, 0.018780160903930664, 0.018850847244262694, 0.0192521915435791, 0.01948988723754883, 0.019398847579956056, 0.01903436851501465, 0.018932191848754883, 0.01910335922241211, 0.01946771240234375, 0.019266080856323243, 0.019091712951660157, 0.0192325439453125, 0.018897279739379883, 0.01898428726196289, 0.01880303955078125, 0.01883734321594238, 0.018921951293945312, 0.01904844856262207, 0.019134464263916014, 0.01897881507873535, 0.01897382354736328, 0.019005727767944337, 0.01920790481567383, 0.020732864379882813, 0.019943359375, 0.019066879272460938, 0.01903615951538086, 0.019005311965942382, 0.018917312622070314, 0.019060800552368164, 0.019309375762939455, 0.019523584365844726, 0.01947216033935547, 0.01926576042175293, 0.019359743118286133, 0.019383552551269532, 0.019219200134277345, 0.019254560470581054, 0.02060576057434082, 0.019947423934936523, 0.01918476867675781, 0.01904934310913086, 0.0192774715423584, 0.019122112274169923, 0.019022207260131836, 0.019003423690795898, 0.019070976257324217, 0.019126272201538085, 0.01901081657409668, 0.018840288162231444, 0.018837024688720703, 0.018745567321777342, 0.01873539161682129, 0.01898700714111328, 0.018871423721313476, 0.018748287200927735, 0.018904640197753907, 0.01903455924987793, 0.019199167251586914, 0.01901148796081543, 0.01879747200012207, 0.018869407653808595, 0.019571552276611327, 0.01880268859863281, 0.018902463912963866, 0.018737407684326173, 0.018690015792846678, 0.01867401695251465, 0.01884979248046875, 0.018875776290893556, 0.018760063171386718, 0.018753791809082033, 0.01878835105895996, 0.01874652862548828, 0.01993199920654297, 0.020790752410888673, 0.02061955261230469, 0.0189453125, 0.018820064544677734, 0.019320831298828126, 0.019025856018066407, 0.018808448791503906, 0.018657312393188477, 0.019140703201293945, 0.019220767974853517, 0.018808128356933594, 0.018772703170776367, 0.018919424057006837, 0.018948095321655273, 0.0187936954498291, 0.019041055679321288, 0.018780160903930664, 0.01893391990661621, 0.019070335388183594, 0.018839872360229493, 0.01890358352661133, 0.018859167098999024, 0.019891040802001953, 0.018882080078125, 0.01877244758605957, 0.018716672897338867, 0.018892799377441406, 0.018746559143066405, 0.018699071884155274, 0.01874483108520508, 0.01873094367980957, 0.018727487564086914, 0.018788000106811524, 0.018872671127319336, 0.01908527946472168, 0.01922870445251465, 0.018931711196899414, 0.018880512237548826, 0.018851743698120118, 0.018796640396118162, 0.01871574401855469, 0.018776960372924804, 0.018733055114746093, 0.0187857608795166, 0.019011199951171873, 0.018776224136352538, 0.018826112747192383, 0.018807968139648436, 0.01901798439025879, 0.019175615310668945, 0.018910783767700196, 0.01887708854675293, 0.01888060760498047, 0.018808832168579103, 0.018892799377441406, 0.018876352310180665, 0.018833471298217774, 0.018750816345214843, 0.018831903457641602, 0.01872435188293457, 0.0193686408996582, 0.01911596870422363, 0.018978208541870118, 0.018860063552856444, 0.01882374382019043, 0.018882560729980468, 0.018735103607177735, 0.018769792556762695, 0.018829471588134767, 0.018798559188842774, 0.01884569549560547, 0.01909884834289551, 0.019155744552612305, 0.0190382080078125, 0.018968095779418947, 0.018917856216430665, 0.018919424057006837, 0.01887846374511719, 0.018755168914794923, 0.018991424560546876, 0.01894607925415039, 0.018952287673950196, 0.019013504028320312, 0.018991104125976564, 0.018874368667602538, 0.01923232078552246, 0.02023468780517578, 0.019799968719482423, 0.019802207946777343, 0.01954368019104004, 0.019302783966064455, 0.019695295333862304, 0.019238304138183594, 0.019145631790161134, 0.018968128204345704, 0.01891539192199707, 0.018876800537109373, 0.019019775390625, 0.01904640007019043, 0.019062591552734376, 0.019046560287475586, 0.018931232452392577, 0.018864639282226564, 0.018917375564575196, 0.018851839065551757, 0.01883340835571289, 0.019283967971801756, 0.018884735107421877, 0.018796415328979493, 0.01873744010925293, 0.01879216003417969, 0.020078847885131836, 0.02044697570800781, 0.018873760223388672, 0.01890121650695801, 0.01914531135559082, 0.01957571220397949, 0.019479423522949218, 0.019475679397583007, 0.019326847076416017, 0.01916102409362793, 0.019116928100585937, 0.019011199951171873, 0.018969184875488283, 0.01892134475708008, 0.018964351654052733, 0.019136640548706056, 0.019113536834716796, 0.023068832397460937, 0.019290399551391602, 0.019109888076782225, 0.018893823623657227, 0.01887718391418457, 0.018816831588745118, 0.018892927169799803, 0.018872095108032227, 0.01900588798522949, 0.019287263870239258, 0.019108320236206056, 0.01907084846496582, 0.018962976455688476, 0.019177471160888672, 0.019386016845703125, 0.01919171142578125, 0.019271488189697265, 0.019364288330078125, 0.019565664291381835, 0.019358240127563476, 0.019132640838623045, 0.019058048248291017, 0.019028608322143554, 0.019186784744262695, 0.019140512466430663, 0.01901798439025879, 0.01897756767272949, 0.01889254379272461, 0.018888128280639647, 0.018866975784301757, 0.018838752746582033, 0.01881167984008789, 0.018822336196899415, 0.018875200271606444, 0.01904800033569336, 0.01914703941345215, 0.018868383407592774, 0.018961759567260743, 0.01888435173034668, 0.018833919525146483, 0.01905289649963379, 0.018837152481079103, 0.01886867141723633, 0.0189716796875, 0.018758079528808595, 0.018893312454223633, 0.018883968353271486, 0.018619007110595703, 0.018765792846679688, 0.01889436721801758, 0.019027456283569336, 0.018893760681152345, 0.018834751129150392, 0.018774240493774415, 0.01872140884399414, 0.01880463981628418, 0.01887027168273926, 0.018747167587280275, 0.0188438720703125, 0.018860031127929687, 0.019019903182983397, 0.018918815612792968, 0.018874847412109375, 0.01887433624267578, 0.018702367782592773, 0.01937785530090332, 0.019013952255249024, 0.019039968490600585, 0.018876991271972655, 0.01895599937438965, 0.01906255912780762, 0.01898313522338867, 0.018751487731933594, 0.018920448303222655, 0.01887116813659668, 0.019737855911254883, 0.01908367919921875, 0.01897724723815918, 0.018888927459716796, 0.019251007080078125, 0.019093311309814454, 0.019001695632934572, 0.01890265655517578, 0.0189116153717041, 0.018925567626953126, 0.018910207748413087, 0.018882976531982423, 0.019274335861206054, 0.01925324821472168, 0.01888684844970703, 0.019080127716064453, 0.019002239227294923, 0.018925439834594725, 0.018864191055297852, 0.018853952407836914, 0.019171327590942384, 0.019150848388671874, 0.019316736221313476, 0.019490816116333007, 0.019532032012939453, 0.019536991119384766, 0.01943414306640625, 0.019355648040771483, 0.01929792022705078, 0.019123743057250977, 0.019170143127441405, 0.01902083206176758, 0.018995647430419923, 0.019157535552978517, 0.019125312805175782, 0.0190794563293457, 0.019384992599487304, 0.019128320693969726, 0.019171136856079102, 0.01910799980163574, 0.019719392776489257, 0.019294240951538085, 0.019159839630126952, 0.018997247695922852, 0.01923481559753418, 0.019144447326660156, 0.019286272048950195, 0.019146751403808594, 0.01917305564880371, 0.019206464767456053, 0.01931180763244629, 0.020247360229492188, 0.019457536697387694, 0.019355295181274414, 0.019505664825439452, 0.019229024887084962, 0.019214591979980468, 0.01920524787902832, 0.018994815826416017, 0.019112960815429687, 0.01915228843688965, 0.019034719467163085, 0.019134496688842772, 0.019401792526245118, 0.019554752349853516, 0.01920047950744629]",tokens/s,52.404068224407624,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,883.314688,3447.586816,0.0,3045.064704,2842.846208,s,1,7.27972607421875,7.27972607421875,0.0,7.27972607421875,7.27972607421875,7.27972607421875,7.27972607421875,[7.27972607421875],,kWh,5.9193053208370355e-06,6.2582366923507e-07,2.0352794060096713e-06,8.580408396081776e-06,,MB,1339.404288,3646.816256,0.0,3229.61408,2982.452736,s,10,0.43298176193237303,0.0432981761932373,0.0011073369945902936,0.04303446578979492,0.04362301368713379,0.0450688024520874,0.046225433464050295,"[0.04651459121704102, 0.04304624176025391, 0.04268492889404297, 0.042840065002441405, 0.042525184631347655, 0.04326047897338867, 0.043022689819335935, 0.04325619125366211, 0.0425296630859375, 0.04330172729492188]",tokens/s,5912.489220273079,kWh,1.4488039630364121e-06,1.5977669475319217e-07,9.565367938315837e-07,2.5651174516211882e-06,tokens/kWh,99800498.35074984,MB,1378.418688,3646.816256,0.0,3229.61408,2982.455296,s,10,14.71218017578125,1.471218017578125,0.006520559730111671,1.4740194091796874,1.476150732421875,1.4782819702148438,1.4799869604492188,"[1.4740574951171874, 1.4804132080078125, 1.4681180419921875, 1.474802490234375, 1.4563297119140626, 1.467184326171875, 1.4661009521484376, 1.4755155029296876, 1.4756771240234374, 1.4739813232421874]",tokens/s,42.82166153980952,kWh,4.241440238488713e-05,4.677934810174127e-06,2.6915763281769904e-05,7.400810047683117e-05,tokens/kWh,851258.1676072426,,s,630,14.709816682815553,0.023348915369548494,0.00040903208967215624,0.023234016418457033,0.02376182060241699,0.02399247236251831,0.025019298324584965,"[0.024095424652099608, 0.02422528076171875, 0.02356038475036621, 0.023521600723266603, 0.023207935333251953, 0.023742431640625, 0.023303199768066406, 0.023393184661865234, 0.023347103118896484, 0.023217599868774415, 0.023009632110595705, 0.023098943710327148, 0.022962303161621095, 0.02297916793823242, 0.02298271942138672, 0.022987936019897463, 0.02306550407409668, 0.02297222328186035, 0.023044288635253905, 0.023046112060546874, 0.023289920806884766, 0.02316080093383789, 0.023267328262329103, 0.023310335159301757, 0.024853759765625, 0.023407360076904297, 0.02327347183227539, 0.023166303634643556, 0.0231364803314209, 0.02513145637512207, 0.02504265594482422, 0.023132448196411134, 0.023191007614135742, 0.023061023712158204, 0.02304572868347168, 0.023062944412231445, 0.023084447860717772, 0.02314089584350586, 0.02375071907043457, 0.023994272232055663, 0.023384159088134765, 0.023351104736328124, 0.023215839385986328, 0.023275199890136718, 0.023769535064697266, 0.023206399917602538, 0.023225343704223633, 0.024037792205810548, 0.02312828826904297, 0.02350921630859375, 0.023783424377441405, 0.0234998722076416, 0.023403423309326172, 0.023871488571166992, 0.023664608001708984, 0.02326736068725586, 0.023142400741577147, 0.023076864242553712, 0.023250944137573244, 0.023193599700927735, 0.023209983825683594, 0.023105087280273436, 0.023014944076538087, 0.024061952590942383, 0.02385856056213379, 0.023790079116821287, 0.023765119552612304, 0.02351103973388672, 0.023350431442260743, 0.02327987289428711, 0.023290239334106445, 0.023150815963745117, 0.02311404800415039, 0.023557823181152345, 0.024864511489868166, 0.023730016708374022, 0.023412607192993165, 0.023187999725341798, 0.02326016044616699, 0.023170047760009766, 0.02308639907836914, 0.023107423782348632, 0.023208032608032225, 0.02336844825744629, 0.023690528869628906, 0.024120031356811525, 0.023762943267822266, 0.02426470375061035, 0.0248907527923584, 0.023954240798950196, 0.023698463439941406, 0.0236592960357666, 0.023425024032592775, 0.023573951721191408, 0.02323308753967285, 0.023626815795898436, 0.023194656372070313, 0.023199647903442384, 0.023023616790771483, 0.0231911678314209, 0.022957759857177733, 0.02307551956176758, 0.023183263778686524, 0.023047807693481446, 0.023062944412231445, 0.023064640045166014, 0.02309939193725586, 0.023093248367309572, 0.023178367614746093, 0.023224735260009767, 0.0234454402923584, 0.02352387237548828, 0.023680320739746095, 0.02365305519104004, 0.0234716796875, 0.023353792190551757, 0.023138303756713868, 0.02462735939025879, 0.026070880889892577, 0.023389471054077147, 0.02323481559753418, 0.023585151672363282, 0.023037439346313478, 0.022937824249267578, 0.022959936141967775, 0.02347270393371582, 0.023922048568725585, 0.02360153579711914, 0.02388960075378418, 0.023599679946899415, 0.023302143096923827, 0.02329100799560547, 0.02363465690612793, 0.023621631622314454, 0.023834783554077147, 0.02365644836425781, 0.02348441505432129, 0.023347200393676756, 0.02326268768310547, 0.023181407928466798, 0.02316057586669922, 0.022901439666748048, 0.022977792739868164, 0.02293836784362793, 0.0230645751953125, 0.02296406364440918, 0.02343747138977051, 0.02339583969116211, 0.023223840713500976, 0.023098335266113282, 0.022994943618774414, 0.022927263259887695, 0.023011423110961913, 0.022953983306884765, 0.02299068832397461, 0.023036064147949217, 0.023408447265625, 0.023439552307128905, 0.023603071212768556, 0.023509120941162108, 0.02330316734313965, 0.023085376739501954, 0.023171775817871092, 0.023068672180175782, 0.023178560256958008, 0.022961919784545898, 0.02299728012084961, 0.023146144866943358, 0.023403135299682618, 0.02347660827636719, 0.02322256088256836, 0.023254175186157227, 0.023290399551391602, 0.023140384674072267, 0.023131776809692382, 0.023017568588256834, 0.02317750358581543, 0.023224159240722655, 0.023087263107299805, 0.02341481590270996, 0.023179040908813477, 0.02295801544189453, 0.023040191650390625, 0.023310400009155272, 0.023214080810546874, 0.02323187255859375, 0.02316703987121582, 0.02311635208129883, 0.026261503219604493, 0.024090400695800783, 0.023916543960571288, 0.023700544357299805, 0.023467775344848632, 0.023526464462280273, 0.02337887954711914, 0.02321414375305176, 0.023008703231811523, 0.02309119987487793, 0.02325315284729004, 0.023206239700317384, 0.023069856643676757, 0.022963039398193358, 0.02310553550720215, 0.023326784133911132, 0.02381590461730957, 0.023375520706176756, 0.023730527877807616, 0.023635520935058593, 0.023192512512207032, 0.023215904235839843, 0.023316415786743164, 0.02306662368774414, 0.023345151901245118, 0.023418880462646483, 0.024772607803344726, 0.023791648864746093, 0.02359209632873535, 0.023521408081054688, 0.023347007751464845, 0.023200639724731444, 0.0233482551574707, 0.02313520050048828, 0.023338464736938475, 0.02329654312133789, 0.023135616302490235, 0.023424863815307617, 0.023366559982299806, 0.023553056716918944, 0.023598112106323243, 0.02347398376464844, 0.02322163200378418, 0.02340662384033203, 0.02323516845703125, 0.023076351165771485, 0.023060640335083007, 0.023104927062988282, 0.023034624099731445, 0.02307030487060547, 0.02365222358703613, 0.023675615310668946, 0.023605247497558594, 0.024393503189086913, 0.02336956787109375, 0.02323289680480957, 0.02308915138244629, 0.02324684715270996, 0.02354297637939453, 0.02405049514770508, 0.02328780746459961, 0.023102783203125, 0.023124895095825194, 0.02367980766296387, 0.023071008682250975, 0.02328780746459961, 0.023433216094970705, 0.023222335815429686, 0.023197696685791015, 0.023502784729003905, 0.024141632080078124, 0.023377695083618165, 0.023148672103881836, 0.023165216445922853, 0.023212032318115236, 0.02307891273498535, 0.023123327255249022, 0.02303241539001465, 0.023119903564453124, 0.023068672180175782, 0.02291302490234375, 0.022930816650390626, 0.02307904052734375, 0.022944095611572266, 0.023031103134155274, 0.023661407470703125, 0.022978464126586915, 0.022972543716430663, 0.022978368759155272, 0.022945951461791993, 0.02294169616699219, 0.023011327743530274, 0.022988800048828126, 0.022803808212280275, 0.022936223983764648, 0.02289459228515625, 0.023027711868286133, 0.023242944717407225, 0.02330124855041504, 0.02339023971557617, 0.02325974464416504, 0.02300716781616211, 0.0231507511138916, 0.023221216201782226, 0.023112415313720703, 0.023027999877929688, 0.023157920837402344, 0.022901599884033202, 0.023066463470458983, 0.022968000411987304, 0.02295814323425293, 0.022917215347290038, 0.023036224365234375, 0.022996063232421874, 0.023118623733520506, 0.023040031433105467, 0.023221439361572265, 0.02316713523864746, 0.023382783889770508, 0.023207935333251953, 0.02310348892211914, 0.02303401565551758, 0.023002912521362304, 0.022917184829711914, 0.0229453125, 0.023006879806518554, 0.023038272857666017, 0.023761695861816406, 0.023799808502197265, 0.023563743591308594, 0.0232042236328125, 0.02319491195678711, 0.023465919494628906, 0.02341164779663086, 0.02325299263000488, 0.023104799270629882, 0.023300895690917967, 0.023472063064575194, 0.023348608016967774, 0.02343302345275879, 0.02339036750793457, 0.02328848075866699, 0.023236608505249022, 0.023052288055419923, 0.02309939193725586, 0.023001087188720702, 0.02331443214416504, 0.023961599349975587, 0.023558143615722657, 0.02369331169128418, 0.02381110382080078, 0.023470495223999025, 0.023267904281616212, 0.023158784866333007, 0.023216127395629883, 0.02323404884338379, 0.023208255767822265, 0.02315430450439453, 0.023151168823242186, 0.02312518310546875, 0.023009599685668944, 0.023083072662353515, 0.023026111602783204, 0.023203840255737306, 0.023009248733520508, 0.023178848266601562, 0.023169471740722657, 0.023064191818237306, 0.0231200008392334, 0.023052032470703126, 0.023031679153442383, 0.02346691131591797, 0.023746271133422852, 0.023653535842895507, 0.023581375122070314, 0.023417087554931642, 0.02323961639404297, 0.02319171142578125, 0.023128095626831054, 0.02303670310974121, 0.02312396812438965, 0.023195840835571288, 0.02320595169067383, 0.023307327270507813, 0.023607551574707033, 0.023165376663208007, 0.023087167739868165, 0.022996831893920898, 0.022986879348754884, 0.023185375213623047, 0.02327756881713867, 0.02414985656738281, 0.02357907295227051, 0.023258207321166992, 0.02314847946166992, 0.02325551986694336, 0.023544031143188475, 0.02349056053161621, 0.02353971290588379, 0.023463935852050782, 0.023178976058959962, 0.023131711959838867, 0.02337843132019043, 0.02328566360473633, 0.023269695281982423, 0.023135520935058593, 0.023096031188964843, 0.023048063278198243, 0.0232938232421875, 0.02319385528564453, 0.023830528259277343, 0.02311174392700195, 0.023046079635620116, 0.023250080108642577, 0.023425888061523438, 0.023233983993530275, 0.023259552001953124, 0.02317430305480957, 0.023126976013183594, 0.0229683837890625, 0.02296780776977539, 0.022912544250488283, 0.02309391975402832, 0.023359647750854494, 0.023568544387817383, 0.023371328353881837, 0.023458112716674806, 0.023354848861694335, 0.02325161552429199, 0.022982656478881838, 0.023012704849243164, 0.02295052719116211, 0.02325302314758301, 0.022962175369262695, 0.023334495544433592, 0.023085056304931642, 0.023087520599365235, 0.0229532470703125, 0.02324883270263672, 0.02292620849609375, 0.022959871292114256, 0.023324064254760742, 0.023226335525512697, 0.023918399810791014, 0.023786624908447265, 0.02361737632751465, 0.023541759490966797, 0.02315817642211914, 0.023179872512817383, 0.02320207977294922, 0.023299808502197265, 0.023183359146118163, 0.023154272079467773, 0.02388096046447754, 0.02373110389709473, 0.023416831970214845, 0.02332464027404785, 0.02347216033935547, 0.023584543228149416, 0.023586271286010742, 0.023424928665161132, 0.02330300712585449, 0.023195199966430664, 0.023196096420288085, 0.023180896759033204, 0.023028127670288084, 0.023990272521972656, 0.023642112731933593, 0.02334649658203125, 0.02320454406738281, 0.023183359146118163, 0.023118911743164064, 0.023124927520751952, 0.023488512039184572, 0.023287200927734376, 0.023598976135253906, 0.023104480743408203, 0.023131904602050782, 0.023101663589477538, 0.023371551513671877, 0.02369536018371582, 0.0234967041015625, 0.02351043128967285, 0.023714399337768553, 0.023749664306640626, 0.023651296615600587, 0.023604671478271486, 0.02350752067565918, 0.023937023162841797, 0.023436800003051757, 0.02322470474243164, 0.023173248291015625, 0.02327552032470703, 0.023390207290649414, 0.0236810245513916, 0.02355200004577637, 0.023371776580810546, 0.023184703826904296, 0.023202495574951174, 0.023185407638549805, 0.023159040451049804, 0.023265024185180665, 0.023205888748168944, 0.023635871887207033, 0.0243570556640625, 0.023749984741210938, 0.023616031646728517, 0.023506975173950194, 0.023228416442871092, 0.02340355110168457, 0.023450592041015624, 0.02327552032470703, 0.02317424011230469, 0.023149471282958984, 0.023173120498657225, 0.023389280319213866, 0.025873247146606444, 0.02342051124572754, 0.02335580825805664, 0.023152191162109374, 0.023144895553588868, 0.02326911926269531, 0.02309350395202637, 0.023355392456054686, 0.023375871658325196, 0.023556095123291015, 0.02496211242675781, 0.023401407241821288, 0.023463935852050782, 0.02538047981262207, 0.023159168243408204, 0.023087200164794923, 0.022951103210449218, 0.022949600219726564, 0.023006208419799806, 0.023147743225097658, 0.02309926414489746, 0.023139232635498046, 0.023183359146118163, 0.02294940757751465, 0.02304047966003418, 0.023015424728393553, 0.023060480117797853, 0.023093248367309572, 0.02332262420654297, 0.02450361633300781, 0.02466454315185547, 0.023793216705322265, 0.023499263763427734, 0.023568672180175783, 0.024200735092163087, 0.023154336929321288, 0.02303046417236328, 0.023203712463378906, 0.023083135604858397, 0.02329804801940918, 0.023334911346435547, 0.022999040603637694, 0.023551488876342775, 0.023031455993652344, 0.023100255966186523, 0.022984703063964843, 0.022990175247192383, 0.02385955238342285, 0.023344863891601564, 0.022937376022338866, 0.02297734451293945, 0.02282921600341797, 0.023101280212402344, 0.02287808036804199, 0.02305241584777832, 0.02289206314086914, 0.023529151916503906, 0.02378566360473633, 0.02391110420227051, 0.02346499252319336, 0.02407244873046875, 0.023843456268310546, 0.023795711517333985, 0.02375040054321289, 0.023419584274291992, 0.023635135650634766, 0.02319215965270996, 0.023384511947631834, 0.023281728744506836, 0.023178976058959962, 0.023080543518066408, 0.023251359939575195, 0.023394304275512694, 0.023489824295043947, 0.023433952331542968, 0.023412384033203126, 0.023561824798583986, 0.02357529640197754, 0.02364409637451172, 0.023412799835205077, 0.02329167938232422, 0.023166271209716798, 0.023157024383544923, 0.02320400047302246, 0.023082592010498046, 0.02353651237487793, 0.024069664001464843, 0.02366716766357422, 0.023627775192260742, 0.02393497657775879, 0.023137792587280274, 0.02321241569519043, 0.023062463760375976, 0.02311315155029297, 0.022979328155517578, 0.023214080810546874, 0.02344960021972656, 0.02344723129272461, 0.023429439544677733, 0.023349248886108398, 0.023256607055664062, 0.025977279663085936, 0.023719968795776366, 0.02346518325805664, 0.023437952041625975, 0.02305878448486328, 0.023045120239257814, 0.023067264556884765, 0.0233822078704834, 0.023324512481689454, 0.023423040390014648, 0.023214176177978517, 0.0230645751953125, 0.02305433654785156, 0.023074623107910155, 0.02322643280029297, 0.0234333438873291, 0.02381932830810547, 0.02318841552734375, 0.023108896255493165, 0.02317180824279785, 0.023059648513793947, 0.02314463996887207, 0.023380544662475584, 0.02401257514953613, 0.023410240173339845]",tokens/s,42.8285418903952,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: Phi3ForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,885.383168,6089.998336,0.0,5687.476224,5685.679104,s,1,7.2154111328125,7.2154111328125,0.0,7.2154111328125,7.2154111328125,7.2154111328125,7.2154111328125,[7.2154111328125],,kWh,6.160286183376229e-06,6.490118287632258e-07,2.160001727979921e-06,8.969299740119376e-06,,MB,1185.0752,6324.87936,0.0,5911.871488,5850.451456,s,10,2.0647819976806643,0.2064781997680664,0.0035611014775419956,0.20754294586181643,0.20964447326660157,0.20971802673339843,0.20977686950683594,"[0.19737107849121094, 0.20962812805175782, 0.2097915802001953, 0.20860684204101562, 0.205314208984375, 0.20371104431152343, 0.20913235473632813, 0.206140869140625, 0.20749098205566407, 0.20759490966796876]",tokens/s,1239.8403332049613,kWh,5.818667245016424e-06,6.416971915618303e-07,3.87657172870538e-06,1.0336936165283636e-05,tokens/kWh,24765558.76002893,MB,1190.346752,6324.87936,0.0,5911.871488,5850.454016,s,10,16.424772827148438,1.6424772827148437,0.0017984575255992113,1.6431479492187502,1.6441755249023438,1.644232598876953,1.6442782580566406,"[1.6395599365234375, 1.63903662109375, 1.6421165771484374, 1.642842041015625, 1.6415816650390624, 1.6442896728515626, 1.6438729248046875, 1.643856689453125, 1.643453857421875, 1.644162841796875]",tokens/s,38.35669489191812,kWh,4.8175107581231194e-05,5.313461340400184e-06,3.198365140389735e-05,8.547222032552873e-05,tokens/kWh,737081.5893170759,,s,630,16.422411247253407,0.02606731944008479,0.00038848892341905757,0.025981664657592773,0.026220637512207032,0.02637586679458618,0.028588566150665285,"[0.028468671798706054, 0.027068960189819337, 0.026368000030517577, 0.026068992614746093, 0.02588640022277832, 0.025976320266723633, 0.02580768013000488, 0.025823232650756835, 0.0258024959564209, 0.026196191787719727, 0.025781375885009766, 0.025818016052246092, 0.025825279235839844, 0.025866239547729493, 0.025870336532592773, 0.025851743698120117, 0.02588038444519043, 0.025903455734252928, 0.025827327728271485, 0.02580611228942871, 0.025866464614868166, 0.025907583236694336, 0.025895040512084962, 0.02582863998413086, 0.025948896408081054, 0.025896671295166016, 0.025925920486450194, 0.025858047485351563, 0.025917695999145507, 0.02593152046203613, 0.025984895706176757, 0.02587174415588379, 0.025901088714599608, 0.025905055999755858, 0.02587526321411133, 0.026025983810424806, 0.026206207275390626, 0.026265535354614258, 0.026219743728637696, 0.026180063247680664, 0.026198400497436523, 0.02607689666748047, 0.026077472686767578, 0.026019840240478515, 0.026074207305908204, 0.026026912689208984, 0.025990495681762694, 0.025941888809204103, 0.0259486083984375, 0.025964895248413087, 0.025948160171508788, 0.02632294464111328, 0.02591107177734375, 0.026087167739868165, 0.0260546875, 0.02593222427368164, 0.025961727142333985, 0.025903839111328125, 0.02600553512573242, 0.02587593650817871, 0.02592790412902832, 0.02586476707458496, 0.025915136337280275, 0.02855900764465332, 0.02722003173828125, 0.026458431243896484, 0.02609619140625, 0.02598297691345215, 0.025833471298217774, 0.02587811279296875, 0.02579088020324707, 0.025831008911132814, 0.025799072265625, 0.025767936706542968, 0.02585536003112793, 0.025868928909301758, 0.02585116767883301, 0.025843744277954103, 0.02584003257751465, 0.02584339141845703, 0.0258504638671875, 0.025793760299682618, 0.025937887191772462, 0.02585478401184082, 0.025886720657348632, 0.025860095977783205, 0.025887744903564453, 0.02585004806518555, 0.025870880126953124, 0.02586556816101074, 0.025901216506958008, 0.02591209602355957, 0.025826528549194337, 0.025834272384643555, 0.026064895629882814, 0.025860095977783205, 0.025921503067016603, 0.025942047119140624, 0.025994400024414062, 0.02618864059448242, 0.02625721549987793, 0.02631491279602051, 0.026191904067993165, 0.02615500831604004, 0.02611609649658203, 0.02603936004638672, 0.025967071533203125, 0.025950687408447266, 0.02597887992858887, 0.02596646308898926, 0.025929311752319335, 0.02591798400878906, 0.02596249580383301, 0.02593382453918457, 0.025947647094726564, 0.026083839416503905, 0.026076992034912108, 0.026043840408325195, 0.025965311050415038, 0.02588876724243164, 0.025890815734863282, 0.025998624801635742, 0.025909887313842774, 0.026003551483154298, 0.025960447311401368, 0.025938976287841798, 0.028807872772216796, 0.027283456802368163, 0.026464256286621093, 0.02614476776123047, 0.025976831436157227, 0.02590924835205078, 0.025888736724853516, 0.02583760070800781, 0.025891935348510742, 0.02585487937927246, 0.025827327728271485, 0.025855104446411134, 0.02598182487487793, 0.02597887992858887, 0.02598297691345215, 0.025892704010009766, 0.025927391052246094, 0.02594041633605957, 0.025939968109130858, 0.026029600143432616, 0.026000896453857423, 0.026093759536743165, 0.025921760559082033, 0.025852479934692384, 0.025878528594970703, 0.025896575927734374, 0.025917823791503907, 0.025937471389770508, 0.0259400634765625, 0.02588627243041992, 0.02595305633544922, 0.026030080795288086, 0.02594144058227539, 0.025969215393066406, 0.025987071990966795, 0.026113407135009765, 0.02620275115966797, 0.026178688049316407, 0.026211391448974608, 0.026101631164550783, 0.026163135528564453, 0.02617308807373047, 0.026206560134887695, 0.026101760864257813, 0.026074911117553713, 0.026011871337890624, 0.026015743255615235, 0.026030080795288086, 0.026025983810424806, 0.02595155143737793, 0.02594476890563965, 0.02594611167907715, 0.02593382453918457, 0.02595430374145508, 0.026044416427612304, 0.026038015365600586, 0.025949472427368163, 0.025954431533813476, 0.026053472518920897, 0.025991167068481445, 0.025998464584350588, 0.025932287216186522, 0.025964576721191405, 0.028925952911376954, 0.027355167388916017, 0.026633920669555663, 0.02620649528503418, 0.02608742332458496, 0.025886720657348632, 0.025878143310546876, 0.025829120635986327, 0.02586595153808594, 0.02582406425476074, 0.025892000198364257, 0.025874975204467774, 0.025853439331054686, 0.025834175109863283, 0.025823455810546875, 0.025939968109130858, 0.025829376220703124, 0.025821184158325194, 0.025814815521240233, 0.025886751174926757, 0.025864383697509766, 0.02591062355041504, 0.025897504806518555, 0.02589411163330078, 0.025927648544311524, 0.025973567962646483, 0.025954431533813476, 0.026046464920043946, 0.025952255249023438, 0.02595020866394043, 0.025910400390625, 0.025873279571533202, 0.02588057518005371, 0.025968416213989258, 0.025953792572021486, 0.026139360427856445, 0.02619539260864258, 0.026315296173095703, 0.02628438377380371, 0.0262260799407959, 0.026181119918823242, 0.02616329574584961, 0.026233535766601562, 0.026085119247436523, 0.026007232666015626, 0.02603424072265625, 0.026198528289794923, 0.026038272857666016, 0.02605606460571289, 0.02601433563232422, 0.02614067268371582, 0.025991167068481445, 0.02604377555847168, 0.026030271530151368, 0.026028480529785156, 0.02595840072631836, 0.026032127380371094, 0.026218496322631835, 0.026011648178100585, 0.025931776046752928, 0.02599839973449707, 0.025990079879760743, 0.026008832931518556, 0.028617439270019532, 0.027092992782592775, 0.026558464050292968, 0.026220544815063477, 0.02607855987548828, 0.02597750473022461, 0.025923583984375, 0.02586134338378906, 0.02588751983642578, 0.025862144470214843, 0.025882623672485353, 0.025894912719726562, 0.025956352233886718, 0.02590496063232422, 0.025899200439453124, 0.025863775253295897, 0.026018207550048827, 0.02595840072631836, 0.025993215560913087, 0.02597478485107422, 0.02599238395690918, 0.02590582466125488, 0.025915552139282226, 0.02591939163208008, 0.025913408279418945, 0.025976863861083985, 0.0259420166015625, 0.025943424224853514, 0.02592950439453125, 0.025934688568115233, 0.02591334342956543, 0.025911296844482422, 0.025924800872802734, 0.02594633674621582, 0.025908992767333983, 0.02617024040222168, 0.026252832412719727, 0.026292671203613283, 0.026203264236450197, 0.026183904647827147, 0.02614748764038086, 0.026054655075073242, 0.02604243278503418, 0.026066879272460937, 0.026015743255615235, 0.025941823959350584, 0.02594220733642578, 0.025925247192382813, 0.02600998306274414, 0.025939071655273437, 0.025946847915649413, 0.02593401527404785, 0.02598851203918457, 0.025985599517822266, 0.025977951049804687, 0.026049440383911132, 0.02599344062805176, 0.0259520320892334, 0.02595840072631836, 0.025980928421020507, 0.026025983810424806, 0.02599283218383789, 0.025991552352905272, 0.02876486396789551, 0.02719705581665039, 0.026491167068481446, 0.02612233543395996, 0.02595020866394043, 0.025816127777099608, 0.02579654312133789, 0.02580143928527832, 0.02580918312072754, 0.025915391921997072, 0.0259420166015625, 0.025847808837890625, 0.025870336532592773, 0.026032127380371094, 0.025947456359863282, 0.02591814422607422, 0.025985023498535157, 0.026003456115722655, 0.026003456115722655, 0.02598297691345215, 0.02601913642883301, 0.026014400482177735, 0.026035295486450196, 0.02599193572998047, 0.025992864608764647, 0.025990976333618163, 0.02604307174682617, 0.0261200008392334, 0.02612348747253418, 0.026081632614135743, 0.026092159271240235, 0.025974176406860353, 0.02598358345031738, 0.025944063186645508, 0.025980064392089844, 0.026104543685913088, 0.02615100860595703, 0.02628611183166504, 0.026290176391601562, 0.026286079406738282, 0.026382303237915038, 0.02620419120788574, 0.026185728073120116, 0.026092672348022462, 0.026071935653686523, 0.02606675148010254, 0.026031808853149416, 0.02596284866333008, 0.025989055633544922, 0.025972320556640626, 0.02596518325805664, 0.02607913589477539, 0.02601705551147461, 0.02606572723388672, 0.02604431915283203, 0.026034271240234375, 0.026034175872802736, 0.026039648056030273, 0.026000032424926756, 0.025976192474365233, 0.02603481674194336, 0.02605398368835449, 0.026040992736816405, 0.028721216201782227, 0.02728009605407715, 0.02653343963623047, 0.026174047470092773, 0.0260133113861084, 0.025984895706176757, 0.02608736038208008, 0.025981504440307616, 0.02596246337890625, 0.025935520172119142, 0.025915199279785157, 0.02593235206604004, 0.02587648010253906, 0.02587648010253906, 0.025886463165283202, 0.02588697624206543, 0.025963647842407227, 0.026065792083740234, 0.026093568801879883, 0.026001407623291017, 0.02600281524658203, 0.025979103088378905, 0.025964351654052736, 0.025933792114257812, 0.02591529655456543, 0.025975519180297852, 0.02597385597229004, 0.026028959274291993, 0.025916576385498047, 0.025946975708007813, 0.025991167068481445, 0.026005504608154296, 0.02597887992858887, 0.025993215560913087, 0.02593951988220215, 0.026077152252197266, 0.026150880813598634, 0.0262108154296875, 0.026249216079711913, 0.026177536010742186, 0.026224288940429687, 0.026149215698242186, 0.026133920669555662, 0.026165855407714843, 0.026108928680419922, 0.026063808441162108, 0.02607315254211426, 0.026023935317993165, 0.02612428855895996, 0.026028032302856444, 0.02599283218383789, 0.025988704681396486, 0.025987871170043947, 0.025953535079956055, 0.0260184326171875, 0.026005023956298827, 0.02600601577758789, 0.025964191436767578, 0.026012096405029297, 0.025931392669677734, 0.025976383209228515, 0.026026815414428712, 0.026055904388427736, 0.028876224517822267, 0.02728607940673828, 0.02652569580078125, 0.026202112197875976, 0.026038272857666016, 0.02595430374145508, 0.02586796760559082, 0.025835840225219727, 0.02585545539855957, 0.025881120681762695, 0.025894912719726562, 0.025894912719726562, 0.025899007797241212, 0.02590105628967285, 0.02596646308898926, 0.025991296768188475, 0.026085376739501953, 0.026042015075683593, 0.026011999130249024, 0.026017791748046876, 0.026030080795288086, 0.02597020721435547, 0.026015552520751953, 0.025946144104003907, 0.025936511993408202, 0.02592153549194336, 0.025921503067016603, 0.025968671798706055, 0.026000864028930665, 0.02596713638305664, 0.025989343643188476, 0.026013471603393554, 0.026043743133544923, 0.02600927925109863, 0.026057695388793944, 0.026187776565551758, 0.026181407928466797, 0.026314687728881837, 0.026277952194213867, 0.026250560760498046, 0.026221471786499022, 0.026168352127075196, 0.02612937545776367, 0.02611609649658203, 0.026068992614746093, 0.026027456283569336, 0.026004032135009767, 0.026003456115722655, 0.02594713592529297, 0.025994239807128908, 0.02598428726196289, 0.025968704223632812, 0.026016544342041016, 0.026056575775146484, 0.026013599395751954, 0.026029951095581056, 0.025974176406860353, 0.025983808517456054, 0.02595840072631836, 0.02597478485107422, 0.02592972755432129, 0.02602774429321289, 0.02600281524658203, 0.02850201606750488, 0.027076576232910158, 0.02641836738586426, 0.02610780715942383, 0.025961376190185546, 0.025899007797241212, 0.025946079254150392, 0.0258621768951416, 0.025927328109741212, 0.02588230323791504, 0.025928031921386718, 0.0258460807800293, 0.02587398338317871, 0.02593631935119629, 0.025927263259887694, 0.025907615661621093, 0.025972671508789062, 0.025944128036499023, 0.02588371276855469, 0.025895584106445314, 0.025913568496704103, 0.025920703887939454, 0.02596486473083496, 0.0259303035736084, 0.025927135467529297, 0.026003007888793946, 0.025942943572998048, 0.025972799301147462, 0.025933664321899415, 0.025974399566650392, 0.025893184661865236, 0.025946048736572264, 0.025930015563964844, 0.02597887992858887, 0.025960447311401368, 0.02613043212890625, 0.026251264572143555, 0.026428800582885742, 0.026300863265991212, 0.026275583267211914, 0.026316768646240236, 0.026208736419677733, 0.026206207275390626, 0.02610585594177246, 0.0261345272064209, 0.026045568466186525, 0.02608627128601074, 0.02603385543823242, 0.026048992156982424, 0.025972576141357423, 0.026003456115722655, 0.025949920654296875, 0.026023712158203125, 0.026010112762451174, 0.025990528106689455, 0.02604198455810547, 0.026137599945068358, 0.02606070327758789, 0.026212448120117186, 0.0261345272064209, 0.02610380744934082, 0.02606460762023926, 0.026025888442993163, 0.02860063934326172, 0.02732441520690918, 0.026566656112670898, 0.02617078399658203, 0.026058944702148437, 0.025975072860717773, 0.02592576026916504, 0.025896160125732422, 0.025897760391235352, 0.02592323112487793, 0.02592585563659668, 0.02591663932800293, 0.02589481544494629, 0.025867263793945314, 0.026393695831298827, 0.02590979194641113, 0.026052127838134764, 0.025966880798339843, 0.025952831268310547, 0.025943071365356445, 0.025942527770996093, 0.025907680511474608, 0.025900192260742187, 0.02592425537109375, 0.025961952209472658, 0.026005823135375975, 0.02598134422302246, 0.026062847137451172, 0.02597478485107422, 0.025952255249023438, 0.025953983306884764, 0.025932096481323243, 0.026056320190429687, 0.02603865623474121, 0.026062463760375975, 0.026239360809326172, 0.02632499122619629, 0.02631884765625, 0.02627084732055664, 0.026243967056274416, 0.02625718307495117, 0.02615113639831543, 0.02632499122619629, 0.02611814308166504, 0.026042367935180662, 0.026005504608154296, 0.025989120483398437, 0.025985023498535157, 0.02599692726135254, 0.025992895126342775, 0.02598080062866211, 0.025961280822753906, 0.02595625686645508, 0.02596873664855957, 0.025966207504272462, 0.025940351486206055, 0.026039520263671876, 0.026161472320556642, 0.026014080047607423, 0.025974880218505858, 0.02600873565673828, 0.025959264755249022, 0.025933055877685546]",tokens/s,38.36221067143018,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 1243, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 1121, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 842, in forward attn_outputs, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 530, in forward attn_output = self._flash_attention_forward( File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 628, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,883.617792,3447.586816,0.0,3045.064704,2842.846208,s,1,7.31207080078125,7.31207080078125,0.0,7.31207080078125,7.31207080078125,7.31207080078125,7.31207080078125,[7.31207080078125],,kWh,5.85538782503742e-06,6.349197328465118e-07,2.39111302402506e-06,8.881420581908993e-06,,MB,1209.339904,3621.650432,0.0,3208.64256,2982.452736,s,10,2.440714004516602,0.2440714004516602,0.002982791952260248,0.24365643310546875,0.24921473541259764,0.24931415481567382,0.24939369033813474,"[0.244029541015625, 0.2426337890625, 0.24349565124511718, 0.24182231140136717, 0.24381721496582032, 0.24919264221191406, 0.2415570526123047, 0.24941357421875, 0.24507904052734375, 0.23967318725585937]",tokens/s,1048.8734014975357,kWh,7.358720484791849e-06,8.115317455554202e-07,4.871212230300736e-06,1.3041464460648006e-05,tokens/kWh,19629697.322142597,MB,1233.989632,3621.650432,0.0,3208.64256,2982.455296,s,10,12.786667846679686,1.2786667846679687,0.0073945896239175635,1.2776575927734375,1.28396220703125,1.2901543212890625,1.2951080126953125,"[1.2819935302734375, 1.2655325927734375, 1.27572216796875, 1.277006103515625, 1.296346435546875, 1.275377197265625, 1.27872119140625, 1.2750733642578125, 1.282586181640625, 1.27830908203125]",tokens/s,49.27006844582986,kWh,3.69526574972887e-05,4.075608335763504e-06,2.4110227621498836e-05,6.513849345455105e-05,tokens/kWh,967170.0504396354,,s,630,12.784264642715456,0.0202924835598658,0.00045080526153967396,0.020203487396240234,0.020631770133972167,0.02087374200820923,0.02160998861312867,"[0.020982751846313475, 0.02023423957824707, 0.020092287063598634, 0.019978879928588867, 0.019916799545288084, 0.01988540840148926, 0.01989699172973633, 0.019965951919555663, 0.020361215591430663, 0.02041651153564453, 0.020934656143188478, 0.02047590446472168, 0.02044220733642578, 0.02062553596496582, 0.02058540725708008, 0.02046316719055176, 0.02035740852355957, 0.020227392196655272, 0.02021811294555664, 0.020570720672607422, 0.020453216552734375, 0.02016860771179199, 0.019994720458984375, 0.020385791778564453, 0.020458816528320312, 0.020372159957885744, 0.02031167984008789, 0.020148544311523436, 0.020266592025756838, 0.020151039123535157, 0.020106847763061524, 0.020117631912231447, 0.020313631057739256, 0.020314271926879884, 0.02026233673095703, 0.020425312042236327, 0.0206582088470459, 0.0205230712890625, 0.020533632278442383, 0.0206682243347168, 0.020353023529052734, 0.020361215591430663, 0.020475711822509766, 0.020879199981689453, 0.020996448516845703, 0.020502431869506836, 0.020635744094848633, 0.020255903244018554, 0.020257631301879884, 0.02028163146972656, 0.020375263214111327, 0.020709280014038087, 0.020065504074096678, 0.020287904739379883, 0.020060640335083008, 0.0202193603515625, 0.020310207366943358, 0.020451040267944337, 0.020361183166503906, 0.020208288192749023, 0.02018854331970215, 0.020275840759277342, 0.02001299285888672, 0.020916223526000977, 0.02030134391784668, 0.020115392684936523, 0.02011395263671875, 0.019889951705932617, 0.020231967926025392, 0.02049065589904785, 0.020088863372802735, 0.02004582405090332, 0.0200130558013916, 0.0200130558013916, 0.019996768951416017, 0.019955263137817383, 0.020074335098266602, 0.01995008087158203, 0.019912704467773438, 0.02008406448364258, 0.020286111831665038, 0.020258432388305665, 0.020128128051757812, 0.020286815643310547, 0.020178943634033202, 0.020122272491455078, 0.020230144500732423, 0.020022943496704103, 0.019992063522338867, 0.01993404769897461, 0.019953792572021484, 0.020006975173950194, 0.01996988868713379, 0.019986400604248045, 0.020037248611450197, 0.020085119247436525, 0.020121088027954103, 0.02053548812866211, 0.020177215576171876, 0.020066240310668945, 0.019959871292114257, 0.019936864852905273, 0.020007328033447267, 0.01995689582824707, 0.019948320388793947, 0.02015545654296875, 0.020517728805541993, 0.020551488876342772, 0.020304224014282227, 0.02014521598815918, 0.020017951965332032, 0.020016704559326172, 0.01998908805847168, 0.01996575927734375, 0.019939519882202147, 0.019984031677246095, 0.020050207138061524, 0.02000009536743164, 0.01998451232910156, 0.01997494316101074, 0.019973728179931642, 0.01992678451538086, 0.019886272430419922, 0.019846879959106445, 0.01980828857421875, 0.019908672332763673, 0.021132160186767578, 0.02045747184753418, 0.020184703826904297, 0.020181055068969726, 0.01998396873474121, 0.01995859146118164, 0.0199781436920166, 0.02002124786376953, 0.020086687088012697, 0.01998886489868164, 0.020040800094604492, 0.020004575729370117, 0.019964000701904298, 0.020023935317993163, 0.019994016647338866, 0.020021280288696288, 0.020165376663208008, 0.020193119049072266, 0.020201631546020508, 0.020271104812622072, 0.02000249671936035, 0.020097503662109373, 0.0205883846282959, 0.020764671325683593, 0.02040729522705078, 0.020329471588134765, 0.02022809600830078, 0.020503583908081054, 0.02062169647216797, 0.020508928298950194, 0.020296031951904298, 0.02030745506286621, 0.020209632873535156, 0.02010166358947754, 0.0200130558013916, 0.02004172706604004, 0.019957727432250976, 0.020070528030395506, 0.02002092742919922, 0.019974143981933593, 0.019940576553344726, 0.01990777587890625, 0.019955520629882813, 0.01999667167663574, 0.02086092758178711, 0.020211456298828125, 0.020099327087402342, 0.02047590446472168, 0.02032975959777832, 0.02027494430541992, 0.02033148765563965, 0.020262912750244142, 0.020338111877441407, 0.020406848907470704, 0.020504575729370117, 0.020600831985473633, 0.020396032333374024, 0.020428800582885744, 0.020924224853515624, 0.020565696716308594, 0.020291936874389647, 0.02025596809387207, 0.020237119674682617, 0.02106777572631836, 0.020423776626586915, 0.020147104263305664, 0.02004787254333496, 0.020008096694946288, 0.02004774475097656, 0.01998681640625, 0.020015552520751954, 0.019994783401489257, 0.01997772789001465, 0.01996441650390625, 0.02001049613952637, 0.019943967819213867, 0.020012672424316407, 0.02001692771911621, 0.019933759689331056, 0.019998304367065428, 0.020040096282958983, 0.019961856842041017, 0.02022604751586914, 0.020471519470214843, 0.020314592361450196, 0.020164064407348633, 0.020283744812011718, 0.02035526466369629, 0.020221759796142578, 0.020393087387084962, 0.02206604766845703, 0.02020115280151367, 0.020366687774658204, 0.02055471992492676, 0.020384992599487305, 0.020287296295166016, 0.02019740867614746, 0.020147104263305664, 0.020279327392578126, 0.020256736755371093, 0.02009609603881836, 0.020031488418579102, 0.020063167572021486, 0.019990047454833983, 0.019966432571411133, 0.02071286392211914, 0.021479711532592774, 0.020258975982666017, 0.02029689598083496, 0.020214752197265626, 0.020148223876953125, 0.02008678436279297, 0.020358175277709962, 0.020409183502197267, 0.02021798324584961, 0.020176895141601564, 0.020119104385375976, 0.02041427230834961, 0.020441728591918944, 0.02022809600830078, 0.02045238494873047, 0.02030691146850586, 0.020459711074829103, 0.02031977653503418, 0.020200960159301756, 0.02057823944091797, 0.021063455581665037, 0.02113148880004883, 0.027246559143066406, 0.021867839813232422, 0.02084022331237793, 0.021210176467895508, 0.02110246467590332, 0.020604896545410156, 0.02079913520812988, 0.02063132858276367, 0.02062553596496582, 0.02081635284423828, 0.020537343978881836, 0.02056368064880371, 0.02046326446533203, 0.020349567413330077, 0.021174272537231444, 0.020598495483398437, 0.020574495315551757, 0.02050361633300781, 0.02067955207824707, 0.020557119369506837, 0.02055392074584961, 0.020568639755249023, 0.020537567138671876, 0.020448991775512695, 0.020465791702270506, 0.020567392349243162, 0.02037615966796875, 0.02044268798828125, 0.0203917121887207, 0.020506591796875, 0.020310976028442382, 0.020259584426879883, 0.020271871566772463, 0.020371328353881835, 0.02047043228149414, 0.02023516845703125, 0.020177728652954103, 0.020185056686401366, 0.020150304794311524, 0.02016396713256836, 0.02048873519897461, 0.020219135284423827, 0.020462432861328126, 0.020227680206298827, 0.020179359436035157, 0.020135936737060548, 0.02015977668762207, 0.020401887893676758, 0.02065939140319824, 0.020334400177001954, 0.02027248001098633, 0.0202159366607666, 0.020081184387207032, 0.020262655258178712, 0.02027136039733887, 0.020065792083740236, 0.02008291244506836, 0.020070688247680664, 0.02006425666809082, 0.020051424026489257, 0.02002998352050781, 0.021023391723632812, 0.020187231063842775, 0.020169919967651367, 0.020294336318969725, 0.02029737663269043, 0.020353504180908203, 0.02012348747253418, 0.0201395206451416, 0.02008950424194336, 0.0202359676361084, 0.020101184844970702, 0.020070655822753906, 0.02008064079284668, 0.019992576599121094, 0.020017152786254884, 0.019990528106689453, 0.02012508773803711, 0.019964096069335937, 0.020267583847045897, 0.019963935852050783, 0.01994483184814453, 0.019980735778808593, 0.01990800094604492, 0.02000339126586914, 0.01996598434448242, 0.020121055603027342, 0.019964448928833006, 0.020084735870361328, 0.020045440673828126, 0.020588768005371093, 0.020076671600341798, 0.020285472869873047, 0.02030748748779297, 0.020261119842529297, 0.02034627151489258, 0.021084991455078125, 0.020115360260009766, 0.020047584533691407, 0.020160959243774413, 0.020215744018554686, 0.020149696350097657, 0.01990713691711426, 0.019941375732421874, 0.019969152450561523, 0.019968864440917968, 0.01997439956665039, 0.020100896835327148, 0.02045756721496582, 0.020212959289550782, 0.020046464920043944, 0.01995372772216797, 0.020054208755493165, 0.019945280075073242, 0.020899839401245117, 0.020520736694335937, 0.02063587188720703, 0.020347936630249024, 0.020184032440185545, 0.02058393669128418, 0.02015056037902832, 0.02009110450744629, 0.020291040420532227, 0.023632415771484373, 0.0214270076751709, 0.020867071151733398, 0.02068070411682129, 0.02052479934692383, 0.020486400604248046, 0.020463584899902344, 0.02032374382019043, 0.020372159957885744, 0.020260000228881837, 0.020400928497314452, 0.02023401641845703, 0.02006435203552246, 0.02000089645385742, 0.02004377555847168, 0.02007263946533203, 0.02003852844238281, 0.020155616760253906, 0.020145856857299804, 0.020243520736694335, 0.020655359268188477, 0.020233951568603515, 0.020407520294189452, 0.02034899139404297, 0.020192192077636718, 0.02020534324645996, 0.02038374328613281, 0.02044108772277832, 0.02020147132873535, 0.020230144500732423, 0.020262912750244142, 0.0200130558013916, 0.020151424407958984, 0.02042310333251953, 0.02047635269165039, 0.020551679611206054, 0.020414464950561522, 0.020352384567260743, 0.020356832504272462, 0.020296607971191406, 0.020170976638793945, 0.020215583801269532, 0.02007651138305664, 0.020101152420043945, 0.020055328369140625, 0.020275072097778322, 0.02014224052429199, 0.02005276870727539, 0.020147680282592773, 0.020133760452270506, 0.020268831253051758, 0.020084768295288085, 0.020216543197631835, 0.02066640090942383, 0.020508672714233397, 0.020437023162841798, 0.02030940818786621, 0.020183679580688476, 0.02018284797668457, 0.020123424530029296, 0.020416704177856446, 0.02009718322753906, 0.020107295989990233, 0.020141151428222655, 0.021075328826904296, 0.02377996826171875, 0.02055072021484375, 0.020313024520874023, 0.020195232391357423, 0.020172895431518553, 0.020068351745605468, 0.020063711166381837, 0.020046432495117186, 0.019957696914672852, 0.019998720169067383, 0.020041215896606446, 0.020007423400878906, 0.02002934455871582, 0.020156511306762694, 0.020100927352905272, 0.020180608749389647, 0.020122175216674806, 0.019982336044311523, 0.020017152786254884, 0.019967615127563478, 0.01995404815673828, 0.019935199737548828, 0.020025375366210938, 0.019903839111328123, 0.020091552734375, 0.02004355239868164, 0.02006675148010254, 0.020187999725341795, 0.020316352844238283, 0.020236959457397462, 0.020137887954711914, 0.020137887954711914, 0.02016102409362793, 0.02014361572265625, 0.020004159927368165, 0.019989728927612305, 0.020026527404785156, 0.020038047790527345, 0.020037824630737305, 0.020125696182250977, 0.020245824813842773, 0.02012406349182129, 0.02019158363342285, 0.02010675239562988, 0.02003945541381836, 0.020005535125732422, 0.020106847763061524, 0.02013430404663086, 0.02058243179321289, 0.020447200775146484, 0.020166496276855468, 0.020025503158569335, 0.02011484718322754, 0.020748895645141603, 0.020346527099609376, 0.020522464752197267, 0.020404863357543945, 0.020434911727905274, 0.02034502410888672, 0.020230239868164062, 0.02044460868835449, 0.020627071380615234, 0.021202943801879884, 0.020936479568481447, 0.020840543746948242, 0.020803327560424804, 0.02067728042602539, 0.0205166072845459, 0.0205250244140625, 0.020682016372680665, 0.02075721549987793, 0.02067046356201172, 0.020639711380004883, 0.02072719955444336, 0.02056982421875, 0.020560800552368166, 0.02065420722961426, 0.020541311264038086, 0.020527103424072265, 0.02041152000427246, 0.02028163146972656, 0.02024448013305664, 0.020188831329345704, 0.020181791305541992, 0.020277088165283202, 0.020311487197875976, 0.020446304321289063, 0.020566015243530272, 0.020350624084472656, 0.02022617530822754, 0.02027248001098633, 0.02024457550048828, 0.020117919921875, 0.020180255889892577, 0.020185855865478514, 0.020664447784423827, 0.02025817680358887, 0.020121599197387697, 0.020158367156982424, 0.020193056106567384, 0.02029257583618164, 0.020191200256347658, 0.020170751571655272, 0.02001625633239746, 0.020068832397460937, 0.020074111938476562, 0.020062047958374022, 0.02000787162780762, 0.02004319953918457, 0.020127967834472658, 0.02009440040588379, 0.020024223327636717, 0.020026432037353516, 0.02004262351989746, 0.02000230407714844, 0.020126335144042967, 0.020465248107910155, 0.020325727462768554, 0.020388864517211915, 0.02020966339111328, 0.020199392318725588, 0.020137792587280275, 0.020246751785278322, 0.02090291213989258, 0.02038502311706543, 0.020905376434326172, 0.02038435173034668, 0.020370559692382814, 0.02049849510192871, 0.020369792938232423, 0.020213567733764648, 0.02009779167175293, 0.020229440689086914, 0.02012175941467285, 0.02003785514831543, 0.02003740882873535, 0.021663200378417968, 0.02022060775756836, 0.020037887573242187, 0.021786495208740233, 0.02113523292541504, 0.020045440673828126, 0.0200914249420166, 0.019964096069335937, 0.02004777526855469, 0.020084287643432615, 0.021340063095092773, 0.020185728073120118, 0.02003971290588379, 0.020414560317993165, 0.020227872848510742, 0.02014364814758301, 0.020283872604370118, 0.020699136734008788, 0.020131263732910156, 0.02004025650024414, 0.02021171188354492, 0.02021785545349121, 0.020201631546020508, 0.020565887451171876, 0.02030518341064453, 0.0200231990814209, 0.02000694465637207, 0.020024063110351563, 0.02011894416809082, 0.0201582088470459, 0.020400991439819337, 0.020225536346435546, 0.020124160766601562, 0.01999667167663574, 0.020064384460449218, 0.020045183181762696, 0.020015615463256836, 0.020000448226928712, 0.019976608276367186, 0.019938560485839845, 0.0200546875, 0.01992288017272949, 0.020142143249511718, 0.01997558403015137, 0.020073055267333984, 0.020463327407836913, 0.02036147117614746, 0.02017830467224121, 0.02034550476074219, 0.020610464096069335, 0.02069878387451172, 0.02074831962585449]",tokens/s,49.279330302269486,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: Phi3ForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 1243, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 1121, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 842, in forward attn_outputs, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 530, in forward attn_output = self._flash_attention_forward( File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 628, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,868.429824,2698.903552,0.0,2296.38144,2202.20672,s,1,7.24753955078125,7.24753955078125,0.0,7.24753955078125,7.24753955078125,7.24753955078125,7.24753955078125,[7.24753955078125],,kWh,5.310936195822554e-06,5.631233561172464e-07,1.9725015780180932e-06,7.846561129957894e-06,,MB,1337.07776,2797.469696,0.0,2382.364672,2267.889152,s,10,0.33901145553588863,0.03390114555358886,0.0004934655246154013,0.03384880065917969,0.03409556617736816,0.034682134819030756,0.03515138973236084,"[0.03526870346069336, 0.03383347320556641, 0.03393219375610351, 0.03390780639648437, 0.03396521759033203, 0.03370889663696289, 0.03351254272460937, 0.033680511474609376, 0.03333798217773438, 0.03386412811279297]",tokens/s,7551.367242010475,kWh,1.1599132491895672e-06,1.2789866780850397e-07,7.649025078650581e-07,2.052714424863129e-06,tokens/kWh,124712915.2010853,MB,1393.31584,2799.566848,0.0,2382.364672,2267.891712,s,10,14.279466674804686,1.427946667480469,0.007438894382254479,1.427375,1.4374607543945312,1.4378158874511717,1.4380999938964842,"[1.4373818359375, 1.432779052734375, 1.4138319091796876, 1.4342960205078126, 1.4252979736328124, 1.4294520263671875, 1.424212158203125, 1.4381710205078124, 1.4207003173828125, 1.4233443603515625]",tokens/s,44.11929481313187,kWh,4.157835801414538e-05,4.585726926528528e-06,2.401208953593618e-05,7.017617447661008e-05,tokens/kWh,897740.5860303496,,s,630,14.275095460891713,0.022658881683955117,0.0004133282191809532,0.02256270408630371,0.023061339950561522,0.02321803970336914,0.024053648796081545,"[0.022609695434570313, 0.022956287384033203, 0.022810592651367187, 0.02270796775817871, 0.022781503677368163, 0.022710687637329103, 0.022641151428222657, 0.022691839218139647, 0.02301683235168457, 0.02286031913757324, 0.022653024673461915, 0.02262835121154785, 0.022738176345825194, 0.022675392150878906, 0.023957504272460937, 0.026112831115722657, 0.023195648193359376, 0.023143680572509765, 0.023227136611938478, 0.023053855895996095, 0.02259312057495117, 0.022504320144653322, 0.022781087875366212, 0.02250428771972656, 0.022347776412963868, 0.023536928176879884, 0.022630687713623046, 0.022581472396850585, 0.022812896728515625, 0.024053760528564453, 0.022777856826782225, 0.022689792633056642, 0.02249932861328125, 0.02223094367980957, 0.02229574394226074, 0.022322080612182618, 0.022451391220092775, 0.022532928466796876, 0.022372127532958985, 0.022560991287231446, 0.022597312927246094, 0.022878528594970703, 0.022689792633056642, 0.02238627243041992, 0.022450592041015623, 0.02272870445251465, 0.02253004837036133, 0.022460384368896483, 0.023042144775390624, 0.023594783782958983, 0.022528160095214845, 0.02225177574157715, 0.02228348731994629, 0.022286016464233397, 0.022350080490112306, 0.022339456558227538, 0.022282976150512696, 0.022280191421508787, 0.022285919189453125, 0.026027616500854493, 0.023362367630004884, 0.022625951766967772, 0.022440288543701174, 0.02233580780029297, 0.02226790428161621, 0.022527999877929687, 0.022275360107421874, 0.022297311782836914, 0.022421503067016603, 0.022179840087890625, 0.02227609634399414, 0.02234163284301758, 0.022410720825195313, 0.022595872879028322, 0.022533376693725585, 0.02250444793701172, 0.022637823104858398, 0.022562591552734376, 0.022354175567626953, 0.022315744400024415, 0.022514848709106444, 0.022985311508178712, 0.023042303085327148, 0.022757375717163086, 0.022978368759155272, 0.022931648254394532, 0.022824960708618162, 0.022603200912475585, 0.022807104110717773, 0.023008991241455078, 0.02296246337890625, 0.02289641571044922, 0.0227412166595459, 0.02286809539794922, 0.02298860740661621, 0.022685760498046874, 0.022683647155761717, 0.02287820816040039, 0.023244384765625, 0.023030176162719726, 0.02287615966796875, 0.022982656478881838, 0.022662912368774414, 0.02309280014038086, 0.02298931121826172, 0.02289811134338379, 0.02286591911315918, 0.022755744934082032, 0.02289289665222168, 0.02263039970397949, 0.02258527946472168, 0.02255264091491699, 0.02291302490234375, 0.022966272354125978, 0.02283027267456055, 0.02290940856933594, 0.022521472930908202, 0.022422239303588866, 0.022865055084228515, 0.022670175552368162, 0.022691328048706053, 0.022690303802490236, 0.022982656478881838, 0.023266592025756837, 0.023155424118041994, 0.023240095138549806, 0.02291257667541504, 0.023134624481201172, 0.023119903564453124, 0.0229171199798584, 0.022544160842895507, 0.02249135971069336, 0.022346847534179686, 0.022535072326660157, 0.022386144638061524, 0.022321407318115234, 0.02236240005493164, 0.022294464111328124, 0.022441503524780273, 0.0224400634765625, 0.0223604793548584, 0.022278144836425783, 0.022214656829833986, 0.022414880752563475, 0.022340063095092774, 0.022253568649291993, 0.022312959671020507, 0.022331392288208008, 0.022517759323120116, 0.022771711349487304, 0.022378496170043945, 0.022308704376220703, 0.02238070487976074, 0.02253209686279297, 0.02241535949707031, 0.022949663162231446, 0.023048351287841797, 0.022456384658813475, 0.02249728012084961, 0.022368032455444335, 0.022364032745361327, 0.022267744064331053, 0.022307327270507812, 0.022237184524536133, 0.02213248062133789, 0.02222719955444336, 0.022369983673095704, 0.022275903701782226, 0.02227577590942383, 0.022264287948608397, 0.022346080780029295, 0.022271999359130858, 0.022355039596557616, 0.022337919235229493, 0.022420000076293946, 0.02231705665588379, 0.022502719879150392, 0.02239967918395996, 0.0224768009185791, 0.022460351943969725, 0.022485055923461915, 0.022288383483886717, 0.022304767608642577, 0.022450176239013672, 0.0224168643951416, 0.022305311203002928, 0.022510591506958007, 0.022333984375, 0.022411359786987304, 0.02226076889038086, 0.0222807674407959, 0.022194591522216797, 0.022364160537719727, 0.02228428840637207, 0.02222489547729492, 0.022335424423217773, 0.022172895431518555, 0.022263935089111328, 0.02256355285644531, 0.022437887191772463, 0.02234940719604492, 0.022408960342407226, 0.022403743743896483, 0.02267136001586914, 0.02307472038269043, 0.02275542449951172, 0.022413312911987306, 0.022350976943969727, 0.02503353691101074, 0.02286169624328613, 0.022505184173583985, 0.022306432723999025, 0.022408031463623048, 0.02221670341491699, 0.02234163284301758, 0.022419456481933595, 0.022404735565185546, 0.02304857635498047, 0.023173120498657225, 0.02287001609802246, 0.023215488433837892, 0.02401318359375, 0.023488704681396484, 0.023142463684082033, 0.02304204750061035, 0.0231026554107666, 0.023031744003295898, 0.023155391693115233, 0.022843584060668946, 0.022691839218139647, 0.022611967086791994, 0.02267568016052246, 0.02288412857055664, 0.022916223526000975, 0.022807424545288085, 0.022666816711425782, 0.02311622428894043, 0.022726655960083008, 0.02274870491027832, 0.02257148742675781, 0.02245347213745117, 0.022588191986083986, 0.02267683219909668, 0.022935615539550782, 0.023056800842285157, 0.023223743438720704, 0.022727424621582032, 0.022750879287719728, 0.02306083106994629, 0.023185407638549805, 0.02302284812927246, 0.02318172836303711, 0.02259529685974121, 0.022628639221191408, 0.023065919876098632, 0.022573759078979492, 0.022372032165527345, 0.022394975662231444, 0.022617311477661134, 0.022617088317871094, 0.022573055267333983, 0.022417407989501953, 0.0224849910736084, 0.02244918441772461, 0.022502592086791992, 0.022548255920410157, 0.022581247329711913, 0.023017120361328126, 0.023007232666015624, 0.02287808036804199, 0.022677791595458983, 0.022587583541870116, 0.022425600051879883, 0.022662879943847657, 0.022944032669067384, 0.02285523223876953, 0.02272505569458008, 0.022540288925170897, 0.022386688232421875, 0.022512992858886718, 0.022339712142944335, 0.022432031631469725, 0.02266726493835449, 0.0226060791015625, 0.02235539245605469, 0.022766143798828124, 0.022321151733398437, 0.022355775833129882, 0.022550880432128908, 0.023066335678100586, 0.0229683837890625, 0.02299228858947754, 0.022975135803222656, 0.022835039138793947, 0.02281078338623047, 0.022755008697509765, 0.022890079498291017, 0.022772287368774412, 0.022743328094482422, 0.02247443199157715, 0.022469823837280273, 0.02235699272155762, 0.022452224731445314, 0.022715808868408204, 0.022504032135009764, 0.022443552017211914, 0.022482912063598634, 0.02238924789428711, 0.022388736724853517, 0.022431072235107423, 0.022805152893066408, 0.022749088287353517, 0.022468704223632813, 0.02248294448852539, 0.022486400604248048, 0.022183935165405275, 0.022331392288208008, 0.022218719482421875, 0.022331167221069335, 0.02225152015686035, 0.022286048889160158, 0.02387158393859863, 0.02300729560852051, 0.02283558464050293, 0.022602783203125, 0.02262015914916992, 0.02294268798828125, 0.022587392807006838, 0.022763103485107423, 0.022382911682128907, 0.02269603157043457, 0.022195711135864257, 0.022325759887695314, 0.02233344078063965, 0.022206464767456056, 0.022341279983520507, 0.022284639358520507, 0.02235580825805664, 0.02235753631591797, 0.02326905632019043, 0.024503231048583984, 0.022890495300292968, 0.022757375717163086, 0.02278134346008301, 0.02258390426635742, 0.02243929672241211, 0.022477439880371094, 0.022562816619873048, 0.022525056838989258, 0.02309209632873535, 0.02293475151062012, 0.022763647079467774, 0.022761600494384766, 0.022706720352172853, 0.022548479080200197, 0.022250591278076173, 0.0224736328125, 0.0225948486328125, 0.022786272048950194, 0.022915584564208984, 0.02282700729370117, 0.02248255920410156, 0.02241779136657715, 0.022511615753173828, 0.02290812873840332, 0.022819551467895507, 0.02299091148376465, 0.0229171199798584, 0.022765567779541016, 0.022910623550415038, 0.022836767196655273, 0.022946624755859374, 0.023123584747314452, 0.02282534408569336, 0.022888383865356444, 0.022810304641723633, 0.022660991668701173, 0.022507680892944335, 0.022395135879516602, 0.02224742317199707, 0.022529119491577147, 0.022315935134887697, 0.02244528007507324, 0.02248784065246582, 0.02253209686279297, 0.02251491165161133, 0.02272275161743164, 0.023057216644287108, 0.023123615264892577, 0.023025791168212892, 0.022957088470458985, 0.023012319564819337, 0.023065919876098632, 0.022876544952392577, 0.022825279235839845, 0.022665216445922853, 0.02279596710205078, 0.024129856109619142, 0.023145727157592774, 0.022744863510131837, 0.022576095581054688, 0.022474336624145507, 0.022253984451293944, 0.022291807174682616, 0.02253481674194336, 0.02246451187133789, 0.02246451187133789, 0.02253107261657715, 0.02334422492980957, 0.022704032897949217, 0.022599679946899414, 0.0225501766204834, 0.02252150344848633, 0.02239967918395996, 0.022808576583862306, 0.02244534492492676, 0.022479583740234375, 0.02308412742614746, 0.022501983642578126, 0.022361408233642577, 0.022323999404907226, 0.0224050235748291, 0.022331520080566405, 0.022268096923828126, 0.022188032150268554, 0.022271007537841798, 0.02256175994873047, 0.022525951385498046, 0.022456127166748045, 0.022312223434448244, 0.02245315170288086, 0.02248819160461426, 0.022317951202392577, 0.022388736724853517, 0.023005184173583985, 0.02248636817932129, 0.022340255737304686, 0.0223191032409668, 0.022374399185180666, 0.022404415130615234, 0.022616767883300783, 0.022569696426391603, 0.022611967086791994, 0.022591487884521484, 0.02251276779174805, 0.023710016250610352, 0.023859071731567382, 0.022597919464111327, 0.022415775299072266, 0.0223191032409668, 0.022198272705078126, 0.02226348876953125, 0.02249123191833496, 0.022649055480957032, 0.022560768127441407, 0.022427263259887694, 0.02241766357421875, 0.022535903930664063, 0.022390560150146486, 0.02281100845336914, 0.025470912933349608, 0.023007551193237306, 0.0227775993347168, 0.022675712585449218, 0.022502880096435546, 0.022609439849853516, 0.022521951675415038, 0.022467487335205077, 0.02249283218383789, 0.022346080780029295, 0.022421056747436524, 0.022409664154052735, 0.022304767608642577, 0.02249513626098633, 0.023375200271606444, 0.022418176651000977, 0.022392736434936524, 0.022367359161376953, 0.022402015686035157, 0.022510976791381837, 0.022954624176025392, 0.02266428756713867, 0.022585792541503905, 0.02280291175842285, 0.023044095993041993, 0.023181312561035155, 0.023021184921264648, 0.023455583572387695, 0.022683168411254884, 0.022481536865234374, 0.024053375244140626, 0.024017663955688478, 0.023221471786499023, 0.023014175415039063, 0.023033504486083985, 0.02268297576904297, 0.022565439224243165, 0.022780128479003906, 0.022915456771850588, 0.02328329658508301, 0.023169279098510742, 0.023507040023803712, 0.023192895889282226, 0.023370336532592774, 0.022794240951538085, 0.022959999084472656, 0.022999168395996094, 0.022936767578125, 0.022923519134521484, 0.022778432846069337, 0.022675455093383787, 0.022386304855346678, 0.022808448791503906, 0.023007328033447266, 0.022689695358276366, 0.022668928146362306, 0.022643583297729492, 0.022566911697387695, 0.02275065612792969, 0.022964736938476563, 0.023066335678100586, 0.02292156791687012, 0.02286591911315918, 0.022687744140625, 0.022773759841918945, 0.022593536376953126, 0.02257475280761719, 0.022493696212768553, 0.022322656631469727, 0.022554208755493164, 0.02267407989501953, 0.0226776008605957, 0.02241539192199707, 0.022437887191772463, 0.02239673614501953, 0.02233977508544922, 0.022351200103759766, 0.022432416915893556, 0.022389984130859374, 0.022428447723388673, 0.02226380729675293, 0.022372159957885742, 0.022446271896362304, 0.022681503295898436, 0.023115455627441408, 0.022519615173339842, 0.022395551681518553, 0.022214591979980467, 0.022373472213745117, 0.022385568618774415, 0.022237184524536133, 0.022222848892211915, 0.02213644790649414, 0.022355583190917967, 0.02230143928527832, 0.022238592147827148, 0.022272640228271485, 0.02224127960205078, 0.022331392288208008, 0.022447744369506837, 0.022466943740844725, 0.02247065544128418, 0.022382591247558595, 0.02240883255004883, 0.022351936340332033, 0.022391103744506837, 0.022386688232421875, 0.022252704620361326, 0.02206185531616211, 0.022286399841308594, 0.02231705665588379, 0.02231705665588379, 0.02233344078063965, 0.022417152404785156, 0.022395135879516602, 0.022305919647216798, 0.022419872283935546, 0.022290367126464844, 0.02236582374572754, 0.02233353614807129, 0.022532159805297852, 0.022385120391845703, 0.022357503890991212, 0.022335712432861327, 0.022368831634521483, 0.022228992462158204, 0.02226095962524414, 0.022573055267333983, 0.02230131149291992, 0.022625663757324218, 0.022348575592041016, 0.02235955238342285, 0.022483455657958985, 0.02271177673339844, 0.02269811248779297, 0.02271843147277832, 0.022852031707763672, 0.02303385543823242, 0.022810111999511717, 0.022511104583740234, 0.022428031921386718, 0.022479488372802735, 0.02269593620300293, 0.023193439483642577, 0.022914304733276367, 0.022975391387939453, 0.023209823608398437, 0.02322012710571289, 0.023179519653320314, 0.02299456024169922, 0.02286630439758301, 0.02278153610229492, 0.02257961654663086, 0.022583295822143554, 0.022605344772338866, 0.022583776473999024, 0.02242355155944824, 0.02243174362182617, 0.022425600051879883, 0.02248908805847168, 0.022403072357177735, 0.02249728012084961, 0.022703231811523436, 0.02289344024658203, 0.022826080322265626, 0.022814752578735352, 0.022764415740966798, 0.022775360107421875, 0.022865440368652342, 0.022731359481811524]",tokens/s,44.13280469654006,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,885.055488,6089.998336,0.0,5687.476224,5685.679104,s,1,7.51331103515625,7.51331103515625,0.0,7.51331103515625,7.51331103515625,7.51331103515625,7.51331103515625,[7.51331103515625],,kWh,6.143444679173626e-06,6.631059226378801e-07,1.9638904599772466e-06,8.770441061788752e-06,,MB,1207.72608,6324.87936,0.0,5911.871488,5850.451456,s,10,2.101728271484375,0.2101728271484375,0.0036978395990788573,0.2117917938232422,0.21283652191162108,0.21290685501098633,0.21296312149047852,"[0.2002953338623047, 0.21245590209960938, 0.21193440246582032, 0.20767922973632813, 0.20976205444335938, 0.21277725219726562, 0.21164918518066406, 0.2093768310546875, 0.21297718811035157, 0.21282089233398438]",tokens/s,1218.0451844005336,kWh,6.029830263690071e-06,6.649794397504468e-07,4.001556489224392e-06,1.069636619266491e-05,tokens/kWh,23933361.609810382,MB,1219.559424,6324.87936,0.0,5911.871488,5850.454016,s,10,15.718735229492188,1.5718735229492187,0.0012470025189164056,1.5718263549804687,1.573653173828125,1.573788195800781,1.5738962133789063,"[1.571714111328125, 1.5721929931640626, 1.5712301025390625, 1.5695126953125, 1.5716595458984375, 1.5705223388671874, 1.5739232177734375, 1.57241845703125, 1.5719385986328125, 1.5736231689453124]",tokens/s,40.07956052456219,kWh,4.60255180754731e-05,5.076394852478721e-06,3.0517360014776125e-05,8.161927294272794e-05,tokens/kWh,771876.5155407223,,s,630,15.716359773635855,0.02494660281529502,0.0003109259840188629,0.02489622402191162,0.025104640579223632,0.02520702714920044,0.026850826263427736,"[0.02717900848388672, 0.02582841682434082, 0.025195520401000978, 0.024876991271972657, 0.024821664810180662, 0.02477680015563965, 0.024741344451904297, 0.024721952438354493, 0.024713216781616212, 0.024721248626708985, 0.024696992874145507, 0.024659040451049805, 0.024646560668945314, 0.024694143295288087, 0.024699296951293945, 0.02469811248779297, 0.024720352172851564, 0.02471731185913086, 0.02474393653869629, 0.024715263366699217, 0.02470297622680664, 0.02474575996398926, 0.02475222396850586, 0.02477020835876465, 0.024756704330444336, 0.02479705619812012, 0.024759744644165037, 0.024779359817504884, 0.0251044807434082, 0.0249487361907959, 0.024862720489501954, 0.024875104904174803, 0.02485766410827637, 0.02483305549621582, 0.024781728744506838, 0.024836799621582032, 0.024959199905395506, 0.02496329689025879, 0.024942272186279296, 0.02536079978942871, 0.025255615234375, 0.025069503784179686, 0.025047008514404296, 0.02509833526611328, 0.025189567565917968, 0.02515635108947754, 0.025012287139892578, 0.024962560653686523, 0.024909791946411134, 0.0249451847076416, 0.02493440055847168, 0.025003711700439454, 0.024936767578125, 0.02493235206604004, 0.024911712646484375, 0.024920223236083984, 0.024995840072631836, 0.025093664169311525, 0.024981983184814455, 0.0249487361907959, 0.025032255172729494, 0.025121152877807616, 0.025057184219360353, 0.027023103713989256, 0.025613824844360353, 0.025109247207641603, 0.024893440246582032, 0.024917247772216797, 0.024805280685424806, 0.02478371238708496, 0.024729215621948242, 0.024759807586669923, 0.024781696319580077, 0.024821184158325196, 0.024758560180664062, 0.02478108787536621, 0.024838144302368165, 0.024774656295776368, 0.0248658561706543, 0.02479404830932617, 0.024791040420532227, 0.024798559188842773, 0.024793344497680662, 0.024922527313232423, 0.02483558464050293, 0.02496588706970215, 0.02479283142089844, 0.024854528427124024, 0.024838239669799804, 0.024813472747802736, 0.024784448623657227, 0.024755935668945312, 0.024814048767089845, 0.02484864044189453, 0.02481376075744629, 0.02488710403442383, 0.02489695930480957, 0.02534204864501953, 0.02514374351501465, 0.024907808303833007, 0.025055103302001953, 0.0251060791015625, 0.025035455703735353, 0.02501420783996582, 0.02498348808288574, 0.02506048011779785, 0.024990495681762696, 0.024923711776733398, 0.02493075180053711, 0.024988895416259767, 0.02492464065551758, 0.02493062400817871, 0.024909664154052734, 0.02500035285949707, 0.024925792694091797, 0.024907327651977538, 0.0248734073638916, 0.02496329689025879, 0.02492972755432129, 0.024969728469848632, 0.024946687698364257, 0.025004032135009766, 0.0250283203125, 0.024991008758544923, 0.024961824417114257, 0.02495715141296387, 0.02669603157043457, 0.025518239974975584, 0.025087615966796876, 0.024892736434936523, 0.024726463317871095, 0.024700384140014648, 0.02472502326965332, 0.024683456420898437, 0.024639551162719726, 0.024653823852539062, 0.024653888702392577, 0.024678367614746094, 0.024700864791870118, 0.02467580795288086, 0.02475040054321289, 0.024738048553466795, 0.02475801658630371, 0.024843711853027344, 0.024746496200561522, 0.02546886444091797, 0.024727935791015624, 0.02475212860107422, 0.02474095916748047, 0.024755104064941406, 0.02479462432861328, 0.024828191757202148, 0.024811456680297852, 0.024844575881958007, 0.024795007705688477, 0.02481177520751953, 0.024856447219848633, 0.024825567245483397, 0.024819936752319336, 0.024789119720458986, 0.024790975570678712, 0.02489129638671875, 0.02496726417541504, 0.025126752853393556, 0.02508576011657715, 0.02503891181945801, 0.025140960693359374, 0.0250982723236084, 0.025092639923095704, 0.025021663665771486, 0.024984352111816405, 0.024940799713134766, 0.024937503814697264, 0.024926687240600588, 0.02495318412780762, 0.024949792861938477, 0.024949087142944335, 0.025072160720825194, 0.02501215934753418, 0.025011999130249023, 0.025090175628662108, 0.025028831481933595, 0.025091264724731447, 0.025000543594360353, 0.02504924774169922, 0.025001440048217773, 0.025088544845581054, 0.025071487426757813, 0.02507542419433594, 0.026910272598266602, 0.025631135940551757, 0.025217023849487305, 0.024942720413208008, 0.0248092155456543, 0.024776031494140625, 0.02480175971984863, 0.02474015998840332, 0.02473103904724121, 0.024748640060424806, 0.024745439529418944, 0.02474220848083496, 0.024728960037231445, 0.024703840255737304, 0.02477187156677246, 0.024756959915161133, 0.0247459831237793, 0.024766464233398438, 0.02472755241394043, 0.02470911979675293, 0.02471731185913086, 0.024707071304321288, 0.02482585525512695, 0.024938335418701173, 0.02480348777770996, 0.02475539207458496, 0.024740480422973634, 0.02486895942687988, 0.024780895233154295, 0.02475382423400879, 0.024742240905761718, 0.024757375717163087, 0.024816511154174804, 0.02482329559326172, 0.02478950309753418, 0.02478879928588867, 0.02485878372192383, 0.024938751220703125, 0.024977344512939453, 0.02499772834777832, 0.02508185577392578, 0.02509228706359863, 0.025038400650024415, 0.024981216430664064, 0.02496678352355957, 0.02502134323120117, 0.024927295684814454, 0.024883392333984376, 0.024909791946411134, 0.02494054412841797, 0.024965951919555664, 0.024905567169189454, 0.024915359497070313, 0.024904415130615233, 0.02497331237792969, 0.024895488739013674, 0.024928255081176756, 0.0249487361907959, 0.02498953628540039, 0.02494435119628906, 0.02495350456237793, 0.02497420883178711, 0.02503696060180664, 0.0268690242767334, 0.025638624191284178, 0.025177087783813477, 0.025018463134765623, 0.024842144012451172, 0.024833887100219727, 0.024762527465820312, 0.0247459831237793, 0.024707136154174806, 0.024719295501708986, 0.024663904190063476, 0.024709280014038087, 0.024766239166259765, 0.024895328521728516, 0.024774816513061522, 0.02476486396789551, 0.024754175186157225, 0.024770431518554688, 0.024775840759277343, 0.024705728530883788, 0.024694175720214845, 0.02474176025390625, 0.024826656341552733, 0.02492313575744629, 0.024980480194091798, 0.024811519622802734, 0.024799232482910157, 0.024823520660400392, 0.02483363151550293, 0.02478540802001953, 0.024807071685791014, 0.02478748893737793, 0.02484783935546875, 0.024916576385498046, 0.024848320007324218, 0.024821535110473632, 0.024858272552490235, 0.025023040771484376, 0.0250830078125, 0.02511267280578613, 0.02510723114013672, 0.02514739227294922, 0.02504911994934082, 0.025047008514404296, 0.02504207992553711, 0.025102655410766603, 0.025031488418579103, 0.02501193618774414, 0.025064800262451174, 0.025053855895996093, 0.02495692825317383, 0.024893024444580077, 0.02497171211242676, 0.024997856140136717, 0.024936447143554686, 0.024958976745605467, 0.024999935150146483, 0.025040895462036132, 0.02495852851867676, 0.024905920028686523, 0.024959232330322264, 0.025010175704956054, 0.02497331237792969, 0.026970111846923828, 0.025660863876342775, 0.025251903533935548, 0.024955392837524414, 0.024852703094482422, 0.024731424331665038, 0.024710176467895507, 0.024689151763916017, 0.024681055068969726, 0.024670080184936525, 0.024669567108154298, 0.024690847396850586, 0.024770240783691406, 0.02475212860107422, 0.02472137641906738, 0.024681440353393556, 0.02468979263305664, 0.024691360473632813, 0.024748191833496094, 0.024704927444458007, 0.024739839553833007, 0.02474393653869629, 0.024762367248535155, 0.024795135498046874, 0.02476464080810547, 0.02473347282409668, 0.02480441665649414, 0.024759231567382814, 0.024806720733642578, 0.024810176849365234, 0.024786815643310547, 0.02477267265319824, 0.024784191131591797, 0.024888063430786134, 0.02487295913696289, 0.02484223937988281, 0.02485638427734375, 0.024881343841552734, 0.025048576354980468, 0.02508435249328613, 0.02509235191345215, 0.025058975219726564, 0.025102783203125, 0.025006912231445313, 0.024994720458984376, 0.02493235206604004, 0.025095327377319336, 0.025016895294189455, 0.024973920822143555, 0.025011104583740236, 0.025049888610839843, 0.024987648010253907, 0.02497952079772949, 0.024967103958129882, 0.025028608322143556, 0.024976768493652345, 0.025010271072387694, 0.025004352569580078, 0.025028831481933595, 0.024991392135620117, 0.025063776016235353, 0.025079296112060546, 0.025024991989135742, 0.026806272506713868, 0.025763744354248046, 0.025210655212402344, 0.02502454376220703, 0.024894975662231447, 0.024791711807250975, 0.024748159408569337, 0.024725376129150392, 0.024720703125, 0.024728384017944336, 0.024674400329589844, 0.024685760498046876, 0.024675039291381835, 0.02468454360961914, 0.024756223678588866, 0.02476851272583008, 0.02474553680419922, 0.024791168212890624, 0.02475004768371582, 0.02475872039794922, 0.024769792556762694, 0.02476304054260254, 0.02482585525512695, 0.024961023330688475, 0.02492969512939453, 0.024926816940307617, 0.024886495590209962, 0.02485327911376953, 0.024813087463378906, 0.0248668155670166, 0.02480175971984863, 0.024821760177612305, 0.024798847198486327, 0.024811904907226564, 0.024841567993164063, 0.024849056243896484, 0.024944095611572265, 0.025090688705444335, 0.025147296905517577, 0.02512214469909668, 0.02511529541015625, 0.0251494083404541, 0.025117919921875, 0.02501433563232422, 0.025033472061157226, 0.025097408294677735, 0.02506015968322754, 0.024992895126342774, 0.024990591049194336, 0.025010208129882812, 0.025014080047607423, 0.02498067283630371, 0.025087039947509767, 0.025061279296875, 0.02507321548461914, 0.02493075180053711, 0.024948287963867187, 0.02499612808227539, 0.02498316764831543, 0.024924415588378906, 0.024936031341552735, 0.026639039993286134, 0.02499286460876465, 0.026998783111572267, 0.025712608337402344, 0.025171648025512694, 0.024891712188720702, 0.02479088020324707, 0.02477004814147949, 0.024762367248535155, 0.024813568115234375, 0.02470979118347168, 0.024716928482055665, 0.024862464904785157, 0.0247589111328125, 0.02469638442993164, 0.024686431884765624, 0.024726112365722655, 0.024754175186157225, 0.02472470474243164, 0.024742687225341797, 0.024764415740966796, 0.024799039840698242, 0.024770496368408203, 0.024774911880493165, 0.024777952194213866, 0.024781600952148437, 0.02476780891418457, 0.024783103942871094, 0.024834495544433594, 0.02478220748901367, 0.024817855834960937, 0.02480940818786621, 0.0248951358795166, 0.024867679595947264, 0.024833536148071288, 0.024891904830932617, 0.024985599517822265, 0.02499180793762207, 0.024993728637695313, 0.024995071411132812, 0.025173952102661133, 0.025202592849731444, 0.025172704696655272, 0.025114112854003907, 0.025161535263061523, 0.025015167236328125, 0.025014272689819338, 0.02497331237792969, 0.024956544876098632, 0.024892864227294923, 0.0249496955871582, 0.024944799423217773, 0.024941471099853514, 0.025002944946289064, 0.024982816696166994, 0.02496076774597168, 0.02493129539489746, 0.02496009635925293, 0.024992671966552735, 0.025051136016845704, 0.025029695510864258, 0.025084863662719725, 0.025159679412841796, 0.02514463996887207, 0.025147743225097656, 0.026806079864501953, 0.02559791946411133, 0.025178304672241213, 0.024991968154907226, 0.024774112701416016, 0.0247108154296875, 0.024713375091552733, 0.02481407928466797, 0.024786527633666993, 0.02471779251098633, 0.024706207275390624, 0.024690784454345704, 0.024789695739746095, 0.024784063339233397, 0.024798240661621094, 0.024768287658691407, 0.02484223937988281, 0.024851743698120116, 0.024865184783935547, 0.024909343719482422, 0.024802047729492186, 0.0248154239654541, 0.024795103073120116, 0.024889631271362303, 0.024895456314086913, 0.02500998306274414, 0.024867008209228516, 0.024879104614257814, 0.02488115119934082, 0.024836095809936523, 0.02481952095031738, 0.02480940818786621, 0.024852767944335937, 0.0248723201751709, 0.02484489631652832, 0.024815616607666017, 0.02491587257385254, 0.025065568923950194, 0.025046592712402345, 0.02508025550842285, 0.025092096328735353, 0.02509823989868164, 0.025026399612426756, 0.025014432907104492, 0.0249835205078125, 0.025026336669921875, 0.025017663955688475, 0.02494937515258789, 0.02493276786804199, 0.024965024948120116, 0.024900672912597656, 0.024882112503051758, 0.024887296676635744, 0.025008127212524413, 0.024971263885498047, 0.024979455947875977, 0.024977407455444335, 0.02505523109436035, 0.025003711700439454, 0.024996255874633787, 0.025001888275146485, 0.02504649543762207, 0.025023103713989258, 0.027023359298706053, 0.025647071838378905, 0.02533990478515625, 0.024985408782958983, 0.024826047897338867, 0.02474415969848633, 0.02475379180908203, 0.02481558418273926, 0.02472979164123535, 0.024716447830200196, 0.024748895645141603, 0.02476851272583008, 0.024774112701416016, 0.024751903533935547, 0.02480953598022461, 0.024828607559204102, 0.024792959213256836, 0.024873088836669922, 0.024856544494628905, 0.0248436164855957, 0.02481590461730957, 0.024835615158081054, 0.024887775421142577, 0.024839712142944337, 0.024865663528442383, 0.02486457633972168, 0.024848575592041015, 0.024811296463012694, 0.024793312072753905, 0.0247825927734375, 0.024840448379516603, 0.024886783599853517, 0.024867424011230467, 0.024860576629638673, 0.024854528427124024, 0.024841567993164063, 0.024999839782714844, 0.025018335342407227, 0.02507366371154785, 0.02515433692932129, 0.025249471664428712, 0.025221439361572267, 0.025179616928100584, 0.02514496040344238, 0.025053375244140624, 0.024990175247192385, 0.024987520217895506, 0.024975744247436524, 0.024975360870361327, 0.024950687408447265, 0.024932096481323242, 0.024975168228149415, 0.02497110366821289, 0.024996799468994142, 0.02498966407775879, 0.02509391975402832, 0.02529689598083496, 0.025010175704956054, 0.02498150444030762, 0.025007328033447265, 0.024986528396606447, 0.02499577522277832, 0.025014047622680665]",tokens/s,40.08561836671764,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1206, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1011, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 751, in forward attn_outputs, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 550, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,868.380672,2698.903552,0.0,2296.38144,2202.20672,s,1,7.29626220703125,7.29626220703125,0.0,7.29626220703125,7.29626220703125,7.29626220703125,7.29626220703125,[7.29626220703125],,kWh,4.935269266585844e-06,5.347519941443314e-07,9.266674079744242e-07,6.396688668704599e-06,,MB,1193.074688,2799.566848,0.0,2386.558976,2267.495936,s,10,2.0040520782470703,0.20040520782470703,0.0010294885050289014,0.20062445068359375,0.2013868698120117,0.20153730697631836,0.20165765670776367,"[0.20110902404785155, 0.2004202880859375, 0.1984439697265625, 0.20005290222167968, 0.201687744140625, 0.1986623077392578, 0.20056195068359375, 0.20107350158691406, 0.20068695068359374, 0.20135343933105468]",tokens/s,1277.4119134864068,kWh,6.072289156546027e-06,6.696652717528203e-07,4.015144935469592e-06,1.075709936376844e-05,tokens/kWh,23798236.99149301,MB,1241.423872,2799.566848,0.0,2386.558976,2267.498496,s,10,13.698938476562502,1.3698938476562499,0.006363824259220363,1.3717947998046875,1.37600439453125,1.37707666015625,1.3779344726562501,"[1.3573558349609376, 1.3711552734375, 1.3737548828125, 1.372434326171875, 1.368803466796875, 1.3665272216796875, 1.3604898681640625, 1.3745025634765624, 1.37814892578125, 1.37576611328125]",tokens/s,45.98896484409113,kWh,3.975353427262645e-05,4.384528206361284e-06,2.324657131833177e-05,6.73846337973195e-05,tokens/kWh,934931.2513813215,,s,630,13.694597980499262,0.021737457111903596,0.0005535640063114479,0.021635440826416018,0.022145173835754393,0.022337561798095703,0.023140980796813967,"[0.022306367874145507, 0.02171900749206543, 0.021530784606933594, 0.02131590461730957, 0.02144041633605957, 0.021298559188842773, 0.021612447738647463, 0.021524639129638673, 0.021922143936157226, 0.022409536361694335, 0.021760000228881835, 0.021547008514404296, 0.02146303939819336, 0.02142617607116699, 0.02171254348754883, 0.02195471954345703, 0.02174380874633789, 0.02152038383483887, 0.02225712013244629, 0.021580320358276367, 0.021462591171264648, 0.02161631965637207, 0.021711231231689453, 0.021430656433105467, 0.021331968307495116, 0.02179657554626465, 0.02137116813659668, 0.021403135299682616, 0.021739839553833008, 0.02177862358093262, 0.021581823348999024, 0.02153267288208008, 0.021751775741577148, 0.021568607330322266, 0.02150495910644531, 0.02138857650756836, 0.021311616897583006, 0.02139401626586914, 0.021387231826782226, 0.021710880279541017, 0.021243648529052736, 0.021408000946044923, 0.021378175735473633, 0.02138368034362793, 0.021259904861450196, 0.021295360565185547, 0.0214021110534668, 0.021329919815063478, 0.02136284828186035, 0.021222623825073242, 0.021398111343383788, 0.021298240661621094, 0.021486560821533204, 0.021458911895751952, 0.02138051223754883, 0.021502080917358397, 0.021473823547363283, 0.021526079177856445, 0.0213221435546875, 0.0212807674407959, 0.021272607803344726, 0.021312704086303712, 0.021961408615112303, 0.022466144561767577, 0.02177235221862793, 0.02162723159790039, 0.02130534362792969, 0.021547008514404296, 0.02181030464172363, 0.021978015899658202, 0.021772415161132812, 0.021910879135131837, 0.021608959197998046, 0.021432319641113282, 0.021423648834228516, 0.02163871955871582, 0.021730207443237306, 0.021555200576782226, 0.02186854362487793, 0.02210736083984375, 0.021886751174926757, 0.023056543350219727, 0.021665824890136718, 0.021941055297851564, 0.021825536727905274, 0.021800512313842772, 0.021629375457763673, 0.021591232299804686, 0.02191257667541504, 0.021804800033569337, 0.02156159973144531, 0.021595808029174806, 0.02194144058227539, 0.021834720611572267, 0.02145894432067871, 0.021428224563598632, 0.021292255401611327, 0.021692928314208985, 0.02165577507019043, 0.02177235221862793, 0.02202115249633789, 0.022037471771240234, 0.021970720291137696, 0.021687999725341797, 0.02162719917297363, 0.02169059181213379, 0.02147270393371582, 0.021647712707519532, 0.021717216491699217, 0.021925888061523437, 0.02209129524230957, 0.02205094337463379, 0.021813600540161134, 0.021802303314208984, 0.022022848129272462, 0.022273279190063475, 0.021821664810180663, 0.02180713653564453, 0.021534751892089844, 0.021552736282348633, 0.021735872268676758, 0.02154489517211914, 0.021551359176635743, 0.021498144149780272, 0.021450719833374023, 0.021561344146728514, 0.02234553527832031, 0.02189091110229492, 0.02148796844482422, 0.021539936065673827, 0.021521312713623047, 0.02126848030090332, 0.02134982490539551, 0.021291616439819337, 0.021337696075439453, 0.02138559913635254, 0.02144588851928711, 0.021295040130615235, 0.021762527465820313, 0.022542688369750978, 0.021557247161865235, 0.021409791946411134, 0.021618688583374023, 0.021315040588378905, 0.021215456008911133, 0.021584192276000978, 0.02145484733581543, 0.021436416625976562, 0.021301248550415038, 0.02126348876953125, 0.021358879089355468, 0.021230079650878905, 0.021330015182495117, 0.02127667236328125, 0.0245614070892334, 0.028383359909057618, 0.02162220764160156, 0.021661535263061523, 0.021277311325073243, 0.021227712631225585, 0.021298912048339842, 0.02129680061340332, 0.021305023193359376, 0.02124652862548828, 0.021247488021850586, 0.021293983459472657, 0.021370880126953123, 0.021149696350097655, 0.02122774314880371, 0.021217056274414062, 0.022576831817626954, 0.028219711303710936, 0.02190729522705078, 0.021446271896362303, 0.021824031829833983, 0.021995519638061522, 0.022181024551391603, 0.021873504638671874, 0.021850112915039063, 0.021640447616577147, 0.02170732879638672, 0.021592256546020507, 0.021493759155273438, 0.021439647674560545, 0.021563776016235353, 0.02187516784667969, 0.021391359329223633, 0.021743616104125976, 0.021855520248413085, 0.02233129692077637, 0.02195465660095215, 0.02192755126953125, 0.022198623657226562, 0.022168991088867187, 0.021897247314453125, 0.022461023330688477, 0.02207744026184082, 0.02199737548828125, 0.02164975929260254, 0.022091583251953126, 0.022200351715087892, 0.022394208908081054, 0.022325536727905274, 0.022130752563476564, 0.02213715171813965, 0.022050592422485353, 0.02189743995666504, 0.021950464248657226, 0.02175699234008789, 0.02178758430480957, 0.021535808563232423, 0.02154787254333496, 0.021588064193725585, 0.02156662368774414, 0.021797407150268556, 0.021521856307983398, 0.021544960021972655, 0.02154537582397461, 0.021535200119018556, 0.021786048889160158, 0.022118976593017578, 0.021737472534179687, 0.021503423690795897, 0.021770816802978515, 0.02210201644897461, 0.021636383056640625, 0.021615327835083006, 0.021743743896484376, 0.021496896743774415, 0.021459199905395507, 0.021432863235473634, 0.021975008010864258, 0.02194643211364746, 0.02166537666320801, 0.021211551666259765, 0.021589471817016603, 0.02142457580566406, 0.021375072479248046, 0.021317632675170898, 0.021405696868896484, 0.02160755157470703, 0.021979007720947265, 0.021914623260498048, 0.02174959945678711, 0.021654943466186523, 0.021588096618652342, 0.021563743591308592, 0.02146291160583496, 0.021483999252319336, 0.02168832015991211, 0.021900896072387696, 0.021606271743774413, 0.022360288619995117, 0.021778911590576173, 0.021675392150878905, 0.02184012794494629, 0.02165705680847168, 0.02179574394226074, 0.022122047424316407, 0.02191404724121094, 0.02189017677307129, 0.021820032119750976, 0.02183193588256836, 0.021523935317993164, 0.021410335540771486, 0.021400768280029295, 0.021438880920410155, 0.02175619125366211, 0.02200992012023926, 0.021882335662841798, 0.022035167694091796, 0.021819263458251952, 0.021635072708129883, 0.02149177551269531, 0.0214138240814209, 0.021382848739624025, 0.021767520904541017, 0.02187772750854492, 0.02163039970397949, 0.02137875175476074, 0.02120921516418457, 0.021354848861694337, 0.021391775131225584, 0.02182691192626953, 0.021844640731811523, 0.021798912048339843, 0.02184601593017578, 0.021790271759033204, 0.021659391403198242, 0.021512895584106444, 0.021464607238769532, 0.021703136444091796, 0.021917695999145507, 0.02185215950012207, 0.02205695915222168, 0.022140256881713866, 0.022043296813964844, 0.021984575271606445, 0.021893760681152345, 0.02212460708618164, 0.022068992614746093, 0.022103391647338867, 0.021922719955444335, 0.021831647872924805, 0.02152556800842285, 0.021455839157104493, 0.021465087890625, 0.0212541446685791, 0.02136457633972168, 0.021386560440063478, 0.021358848571777344, 0.02133462333679199, 0.02146099281311035, 0.02147942352294922, 0.022191551208496092, 0.02474380874633789, 0.021902719497680665, 0.021549312591552735, 0.021454751968383787, 0.021219968795776367, 0.021209087371826172, 0.0212807674407959, 0.02121673583984375, 0.02123119926452637, 0.021280960083007814, 0.021364543914794924, 0.021918527603149413, 0.022114112854003908, 0.022190143585205078, 0.022165760040283203, 0.02214473533630371, 0.022343711853027345, 0.022070623397827147, 0.021793695449829103, 0.021841920852661133, 0.02176323127746582, 0.021480287551879883, 0.021544960021972655, 0.021458080291748047, 0.02140604782104492, 0.02189299201965332, 0.021885568618774415, 0.02155683135986328, 0.021318048477172852, 0.02130240058898926, 0.021572320938110352, 0.02132156753540039, 0.021432096481323243, 0.02167862319946289, 0.022066560745239258, 0.022095903396606446, 0.02225971221923828, 0.021959264755249022, 0.022091167449951172, 0.0219835205078125, 0.021811519622802734, 0.021807104110717773, 0.021761056900024413, 0.021506271362304687, 0.021979551315307617, 0.021448223114013672, 0.021238048553466796, 0.02128700828552246, 0.021330368041992186, 0.021207040786743164, 0.021248191833496095, 0.02134534454345703, 0.02149772834777832, 0.022139263153076173, 0.022063392639160156, 0.021569503784179687, 0.021456256866455078, 0.02135491180419922, 0.02136300849914551, 0.021341503143310545, 0.021439104080200194, 0.021450496673583983, 0.021459199905395507, 0.022214656829833986, 0.02176470375061035, 0.021412191390991212, 0.021395456314086913, 0.021382944107055664, 0.021276895523071288, 0.021346303939819337, 0.021581823348999024, 0.02187884712219238, 0.021669984817504883, 0.02184115219116211, 0.021690975189208983, 0.021661119461059572, 0.021593759536743164, 0.021479520797729492, 0.021436960220336913, 0.021536191940307616, 0.021588191986083985, 0.021554943084716796, 0.021375871658325194, 0.021212575912475586, 0.021337696075439453, 0.021205184936523437, 0.021236223220825197, 0.021336383819580078, 0.021338111877441408, 0.02125993537902832, 0.021348703384399415, 0.021218656539916992, 0.021246463775634765, 0.021349952697753905, 0.02121993637084961, 0.02126950454711914, 0.02126950454711914, 0.02146099281311035, 0.021327871322631836, 0.021526527404785157, 0.021298912048339842, 0.02121084785461426, 0.021344831466674805, 0.021313535690307618, 0.021436063766479493, 0.021475360870361327, 0.02126265525817871, 0.02123980712890625, 0.021345760345458983, 0.0214083194732666, 0.021321088790893554, 0.02129280090332031, 0.02133078384399414, 0.02198294448852539, 0.021886751174926757, 0.02159872055053711, 0.02151379203796387, 0.02169491195678711, 0.02293350410461426, 0.025462783813476563, 0.021987327575683592, 0.022214656829833986, 0.022984256744384767, 0.02165331268310547, 0.02153071975708008, 0.02152911949157715, 0.02307891273498535, 0.022342687606811525, 0.0216844482421875, 0.021717504501342775, 0.02144211196899414, 0.0214715518951416, 0.02143270492553711, 0.02162019157409668, 0.021654048919677735, 0.02147737693786621, 0.021935583114624024, 0.021819456100463867, 0.021586048126220704, 0.021600608825683595, 0.021602304458618164, 0.02173513603210449, 0.021647615432739256, 0.02165353584289551, 0.021599456787109374, 0.02147203254699707, 0.021906911849975588, 0.021651327133178713, 0.021408416748046874, 0.02152454376220703, 0.02199135971069336, 0.02224742317199707, 0.02200371170043945, 0.0220897274017334, 0.022185983657836913, 0.022411264419555665, 0.021950464248657226, 0.02190540885925293, 0.02181033515930176, 0.0217476806640625, 0.021812095642089843, 0.02205244827270508, 0.02181942367553711, 0.021939647674560546, 0.02184828758239746, 0.021932960510253906, 0.02200739288330078, 0.02172336006164551, 0.021582880020141602, 0.021467775344848634, 0.02149193572998047, 0.022532224655151367, 0.021577695846557617, 0.02196646308898926, 0.021467552185058594, 0.021415456771850586, 0.021371360778808593, 0.021426048278808594, 0.0214814395904541, 0.02178268814086914, 0.021995328903198243, 0.022003904342651367, 0.02249241638183594, 0.022057567596435547, 0.02183302307128906, 0.021807199478149415, 0.021918176651000976, 0.021982528686523437, 0.021806047439575194, 0.022248960494995116, 0.021711360931396483, 0.02172854423522949, 0.02184614372253418, 0.021846912384033204, 0.0220218563079834, 0.02206915283203125, 0.021991455078125, 0.022038719177246095, 0.02194371223449707, 0.02206889533996582, 0.02186057662963867, 0.02197715187072754, 0.0220861759185791, 0.022149120330810547, 0.022197248458862305, 0.022047967910766603, 0.02161964797973633, 0.021564128875732422, 0.021586111068725586, 0.021860288619995116, 0.022091392517089845, 0.02140582466125488, 0.021452447891235352, 0.022510175704956056, 0.022302175521850588, 0.021623327255249025, 0.02184752082824707, 0.02182143974304199, 0.023022111892700196, 0.021618688583374023, 0.021590015411376954, 0.021630783081054688, 0.02152899169921875, 0.021657440185546876, 0.02179475212097168, 0.021936031341552736, 0.021416032791137695, 0.02185932731628418, 0.022957056045532227, 0.022255071640014647, 0.021624383926391603, 0.022858976364135742, 0.022269695281982423, 0.02169036865234375, 0.021495712280273437, 0.02141788864135742, 0.02126620864868164, 0.021567903518676757, 0.021446399688720703, 0.02216985511779785, 0.022197343826293944, 0.02223356819152832, 0.021928096771240236, 0.021623071670532228, 0.021393407821655275, 0.02264374351501465, 0.021576671600341796, 0.02149580764770508, 0.021648895263671874, 0.02137548828125, 0.021380287170410156, 0.021593088150024413, 0.02314854431152344, 0.02199923133850098, 0.021637504577636718, 0.021800960540771484, 0.021579296112060546, 0.021461536407470703, 0.021469120025634766, 0.021397632598876955, 0.02146940803527832, 0.0214749755859375, 0.021406944274902345, 0.021459743499755858, 0.021856096267700194, 0.02181727981567383, 0.02158582305908203, 0.021592384338378907, 0.02166783905029297, 0.02202934455871582, 0.02194892883300781, 0.022163423538208007, 0.021989248275756837, 0.02312246322631836, 0.022155616760253908, 0.022482688903808595, 0.021977247238159178, 0.021804895401000977, 0.02172697639465332, 0.021419519424438475, 0.02134448051452637, 0.02142185592651367, 0.02135078430175781, 0.02174310493469238, 0.02153152084350586, 0.022034431457519533, 0.021720544815063476, 0.02155388832092285, 0.02166697692871094, 0.021633855819702147, 0.021921375274658202, 0.02199760055541992, 0.021588191986083985, 0.021608448028564452, 0.022015359878540038, 0.022057600021362304, 0.022384639739990234, 0.02219824028015137, 0.022118080139160157, 0.0222043514251709, 0.022289920806884765, 0.02204355239868164, 0.021903455734252928, 0.02330953598022461, 0.02174857521057129, 0.021647199630737305, 0.02172719955444336, 0.021673215866088866, 0.021655424118041992, 0.02163580894470215, 0.021634815216064453, 0.021623231887817382, 0.021573631286621094, 0.02162073516845703, 0.02155628776550293]",tokens/s,46.003541023774694,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,867.856384,4804.44416,0.0,4401.922048,4400.206336,s,1,7.37841748046875,7.37841748046875,0.0,7.37841748046875,7.37841748046875,7.37841748046875,7.37841748046875,[7.37841748046875],,kWh,4.949717208334429e-06,5.388015072771084e-07,2.105279461928067e-06,7.5937981775396046e-06,,MB,1183.21152,4903.010304,0.0,4490.002432,4455.927296,s,10,1.586316879272461,0.15863168792724608,0.006245534243113243,0.16039868927001955,0.1633203369140625,0.16408578186035155,0.16469813781738282,"[0.14963296508789062, 0.16276019287109375, 0.16485122680664063, 0.15991238403320313, 0.16315023803710937, 0.15953146362304688, 0.16088499450683594, 0.15961251831054687, 0.14402359008789062, 0.16195730590820312]",tokens/s,1613.801147456808,kWh,4.466229564647132e-06,4.925415332828462e-07,2.973899685515031e-06,7.932670783445009e-06,tokens/kWh,32271602.71598011,MB,1212.035072,4911.398912,0.0,4498.39104,4455.929856,s,10,13.548433959960937,1.3548433959960937,0.0025823938091832036,1.354104553222656,1.3582045776367189,1.358208367919922,1.3582114001464844,"[1.352126220703125, 1.352859130859375, 1.3521011962890626, 1.3529039306640624, 1.352084716796875, 1.35530517578125, 1.3581453857421875, 1.3564923095703125, 1.358212158203125, 1.3582037353515626]",tokens/s,46.499839159404694,kWh,3.974894703535349e-05,4.3836588282026825e-06,2.6239568129688365e-05,7.037217399324453e-05,tokens/kWh,895240.2125028532,,s,630,13.546216690063476,0.02150193125406901,0.00046422688745873707,0.021388095855712892,0.02165515537261963,0.022057723236083984,0.02428231872558594,"[0.024194591522216796, 0.022776447296142577, 0.022095712661743164, 0.021735424041748046, 0.02142207908630371, 0.021370880126953123, 0.021344255447387696, 0.02126028823852539, 0.02131551933288574, 0.021262399673461913, 0.021257951736450197, 0.021223712921142578, 0.02122956848144531, 0.021325151443481447, 0.021456607818603514, 0.021386175155639647, 0.021448415756225588, 0.021450847625732423, 0.02143836784362793, 0.021420320510864257, 0.021376256942749024, 0.021314304351806642, 0.021301248550415038, 0.02132489585876465, 0.021376991271972658, 0.021334976196289063, 0.02134182357788086, 0.021264768600463866, 0.021256223678588867, 0.021348320007324218, 0.021334016799926758, 0.021319679260253906, 0.021403648376464843, 0.021305215835571288, 0.021338336944580077, 0.021364639282226563, 0.02148761558532715, 0.02139094352722168, 0.021408159255981444, 0.021290048599243164, 0.021322015762329102, 0.02133468818664551, 0.02129007911682129, 0.021385951995849608, 0.021509855270385743, 0.021524831771850585, 0.021528703689575195, 0.021604352951049805, 0.021603328704833984, 0.021559776306152342, 0.02149839973449707, 0.02144256019592285, 0.021436063766479493, 0.021379423141479493, 0.021372928619384765, 0.021351423263549805, 0.021318592071533204, 0.021263904571533203, 0.02126902389526367, 0.021820991516113282, 0.021305791854858397, 0.0212541446685791, 0.021243904113769533, 0.024647359848022462, 0.02273516845703125, 0.022038240432739258, 0.021670175552368165, 0.021395231246948244, 0.02131337547302246, 0.021324159622192383, 0.0212807674407959, 0.021307392120361326, 0.021338111877441408, 0.021223039627075196, 0.021168512344360353, 0.02126028823852539, 0.021325567245483398, 0.021350656509399414, 0.021364736557006835, 0.021327840805053712, 0.021283935546875, 0.02137388801574707, 0.021280799865722656, 0.021278688430786133, 0.021336063385009766, 0.021309120178222656, 0.0213602237701416, 0.021539072036743163, 0.02140617561340332, 0.021385215759277345, 0.021346303939819337, 0.021694047927856445, 0.021340576171875, 0.021370271682739257, 0.02139401626586914, 0.021385215759277345, 0.021377279281616212, 0.021323392868041992, 0.02140943908691406, 0.021346784591674803, 0.021333728790283203, 0.021286304473876954, 0.02126646423339844, 0.021299936294555663, 0.02128873634338379, 0.02130575942993164, 0.021278207778930663, 0.021332416534423828, 0.021514240264892577, 0.021550559997558595, 0.021573247909545897, 0.02165443229675293, 0.02160963249206543, 0.02159292793273926, 0.021602304458618164, 0.021544416427612303, 0.0214385929107666, 0.021404064178466797, 0.021432319641113282, 0.02135558319091797, 0.021370847702026366, 0.02138598442077637, 0.02130905532836914, 0.02136841583251953, 0.021519359588623048, 0.021396703720092773, 0.024307296752929686, 0.022756895065307616, 0.022073663711547852, 0.021667776107788087, 0.02151078414916992, 0.021390655517578124, 0.021363456726074218, 0.02130732727050781, 0.021340160369873046, 0.021337568283081056, 0.021340703964233397, 0.02126643180847168, 0.021221023559570312, 0.021280160903930666, 0.02129190444946289, 0.02123958396911621, 0.021430559158325195, 0.021316768646240235, 0.021350400924682617, 0.021286943435668945, 0.021332767486572264, 0.021273887634277344, 0.02128767967224121, 0.021391359329223633, 0.021372415542602538, 0.021388832092285155, 0.021406688690185548, 0.02142416000366211, 0.021442527770996093, 0.02147532844543457, 0.021397407531738282, 0.02133616065979004, 0.021349536895751954, 0.021295839309692383, 0.021313663482666015, 0.02130860710144043, 0.02132009506225586, 0.021383583068847658, 0.02130659294128418, 0.02128156852722168, 0.021344192504882814, 0.02131564712524414, 0.021352447509765626, 0.02159596824645996, 0.021551296234130858, 0.02153267288208008, 0.021594112396240234, 0.021493759155273438, 0.02147327995300293, 0.021458688735961913, 0.02143667221069336, 0.02146873664855957, 0.021397056579589843, 0.02134899139404297, 0.021367040634155274, 0.021292160034179688, 0.021326047897338867, 0.021319520950317382, 0.02130121612548828, 0.021439327239990234, 0.02149580764770508, 0.021389312744140625, 0.02141926383972168, 0.024277183532714845, 0.022794687271118164, 0.02200217628479004, 0.021629951477050782, 0.02135536003112793, 0.021309440612792968, 0.02123366355895996, 0.02115718460083008, 0.02121561622619629, 0.021236032485961915, 0.02125827217102051, 0.021266016006469726, 0.021221248626708985, 0.021312000274658204, 0.021334016799926758, 0.02124185562133789, 0.021313535690307618, 0.02144278335571289, 0.021413759231567384, 0.021319135665893555, 0.02134796714782715, 0.021281600952148438, 0.02128691291809082, 0.021280832290649414, 0.021266368865966796, 0.02126582336425781, 0.02138492774963379, 0.021336959838867187, 0.02133616065979004, 0.021952415466308595, 0.021413888931274414, 0.02137398338317871, 0.02137392044067383, 0.021259584426879884, 0.021300960540771484, 0.021338623046875, 0.021311967849731446, 0.021432319641113282, 0.0213637752532959, 0.021428415298461914, 0.021420799255371093, 0.02138528060913086, 0.021310752868652343, 0.02151081657409668, 0.021606399536132814, 0.021622783660888673, 0.021725183486938478, 0.021617952346801757, 0.02160041618347168, 0.021574207305908203, 0.02147871971130371, 0.021460927963256837, 0.021394176483154295, 0.021399616241455078, 0.021464384078979493, 0.021371519088745117, 0.021440511703491212, 0.021383167266845703, 0.021366464614868165, 0.021446624755859376, 0.021545312881469728, 0.02143846321105957, 0.021456895828247072, 0.02447577667236328, 0.02279654312133789, 0.022013952255249023, 0.021577375411987305, 0.021365087509155275, 0.02129100799560547, 0.021216640472412108, 0.021219968795776367, 0.02131372833251953, 0.021471040725708008, 0.021397504806518555, 0.02130534362792969, 0.02124799919128418, 0.02129715156555176, 0.021331968307495116, 0.021271968841552736, 0.021324384689331056, 0.021301248550415038, 0.021307392120361326, 0.021265472412109375, 0.021263296127319337, 0.021198848724365234, 0.021231327056884765, 0.021301536560058593, 0.02124799919128418, 0.021351903915405274, 0.021360960006713867, 0.021387487411499023, 0.021313535690307618, 0.021327871322631836, 0.021284383773803713, 0.02127244758605957, 0.021396064758300783, 0.021300640106201172, 0.021340639114379882, 0.021940351486206055, 0.02124595260620117, 0.021291263580322267, 0.021282560348510744, 0.02123289680480957, 0.021289312362670898, 0.021372671127319335, 0.0213789119720459, 0.021486400604248047, 0.021639360427856445, 0.021701887130737306, 0.021713184356689452, 0.02158812713623047, 0.021532575607299806, 0.021516511917114258, 0.021372928619384765, 0.021391359329223633, 0.02140176010131836, 0.021344095230102538, 0.021364736557006835, 0.02143846321105957, 0.021437599182128907, 0.021439327239990234, 0.021529823303222655, 0.02145929527282715, 0.021428672790527344, 0.02134809684753418, 0.021340063095092773, 0.024528831481933595, 0.022758975982666015, 0.021981855392456055, 0.021612543106079102, 0.021403648376464843, 0.02131878471374512, 0.021236608505249024, 0.021161983489990235, 0.02124720001220703, 0.021247808456420898, 0.021313791275024415, 0.021295839309692383, 0.021331968307495116, 0.021374496459960937, 0.021391839981079103, 0.0212992000579834, 0.021384544372558593, 0.021646080017089845, 0.021475231170654297, 0.021409120559692383, 0.021418880462646485, 0.0213885440826416, 0.021428096771240236, 0.021299871444702148, 0.021373151779174804, 0.021331743240356447, 0.021397504806518555, 0.02136400032043457, 0.021418720245361327, 0.021384672164916994, 0.021371583938598632, 0.02135024070739746, 0.021884927749633788, 0.021823488235473632, 0.021381023406982422, 0.021345375061035156, 0.02132252883911133, 0.02145699119567871, 0.021305599212646485, 0.021305215835571288, 0.021323776245117186, 0.021352703094482423, 0.02144041633605957, 0.02156937599182129, 0.02152774429321289, 0.021701440811157227, 0.021671392440795897, 0.021772735595703124, 0.02154876708984375, 0.021502336502075194, 0.021501951217651367, 0.021454208374023436, 0.02139743995666504, 0.02135420799255371, 0.02133705520629883, 0.021340160369873046, 0.021352447509765626, 0.021489664077758788, 0.02151219177246094, 0.021497856140136717, 0.021661663055419923, 0.021520416259765626, 0.02147030448913574, 0.024429344177246095, 0.02263039970397949, 0.021925888061523437, 0.021590272903442384, 0.021302719116210938, 0.02135024070739746, 0.021287647247314453, 0.02126972770690918, 0.021367263793945313, 0.02136070442199707, 0.023580671310424805, 0.02127180862426758, 0.021385183334350587, 0.021375392913818358, 0.02140403175354004, 0.021415935516357423, 0.0214234561920166, 0.02134227180480957, 0.02138153648376465, 0.021319711685180664, 0.02135862350463867, 0.021428352355957032, 0.021389312744140625, 0.022313983917236328, 0.021399999618530275, 0.021406272888183593, 0.02139308738708496, 0.02128518486022949, 0.021342144012451172, 0.021364959716796875, 0.021387199401855468, 0.021464384078979493, 0.02137353515625, 0.02130499267578125, 0.021278911590576172, 0.021325983047485352, 0.021270431518554688, 0.021380672454833983, 0.021445152282714843, 0.021427679061889648, 0.021457439422607423, 0.021438175201416016, 0.02139779281616211, 0.02156879997253418, 0.02156822395324707, 0.02162483215332031, 0.02167193603515625, 0.021529951095581056, 0.02146371269226074, 0.021421503067016602, 0.021369407653808594, 0.02143846321105957, 0.02142207908630371, 0.02142361640930176, 0.021465599060058595, 0.021425600051879882, 0.02150662422180176, 0.02145894432067871, 0.02302566337585449, 0.021336063385009766, 0.021360832214355467, 0.02141779136657715, 0.021387264251708983, 0.02417203140258789, 0.02266160011291504, 0.021930240631103517, 0.02156723213195801, 0.021487360000610353, 0.021309696197509765, 0.021311487197875977, 0.02131283187866211, 0.021318336486816407, 0.021288095474243166, 0.021313791275024415, 0.021209695816040038, 0.021210304260253908, 0.021326528549194337, 0.02130748748779297, 0.021466144561767576, 0.021504735946655272, 0.0213404483795166, 0.021350400924682617, 0.021323776245117186, 0.02135180854797363, 0.021327871322631836, 0.02135308837890625, 0.021340160369873046, 0.02141168022155762, 0.021440671920776366, 0.021413888931274414, 0.021364736557006835, 0.021405696868896484, 0.021411264419555664, 0.0214451847076416, 0.021376575469970703, 0.02209049606323242, 0.021399072647094727, 0.02139561653137207, 0.021411327362060546, 0.0232260799407959, 0.021403520584106446, 0.021351327896118166, 0.02133795166015625, 0.021508256912231447, 0.021458112716674804, 0.021445568084716798, 0.02153868865966797, 0.021601280212402343, 0.021627904891967774, 0.021532800674438475, 0.021458816528320313, 0.021482816696166994, 0.021456895828247072, 0.0214368953704834, 0.021467424392700194, 0.02152342414855957, 0.02146575927734375, 0.021444223403930665, 0.02145964813232422, 0.021481472015380858, 0.021419776916503906, 0.021467103958129882, 0.021524768829345703, 0.021511327743530272, 0.021514272689819335, 0.021457727432250977, 0.024707071304321288, 0.02294988822937012, 0.022079488754272462, 0.021786624908447266, 0.021474815368652343, 0.02143449592590332, 0.021348735809326173, 0.021237312316894533, 0.02127097511291504, 0.021245407104492187, 0.021285280227661133, 0.02121513557434082, 0.021247648239135743, 0.021348928451538084, 0.02331443214416504, 0.02125823974609375, 0.021394975662231447, 0.021373088836669923, 0.021426496505737306, 0.021407743453979493, 0.021358591079711914, 0.021275999069213868, 0.021338783264160156, 0.02132086372375488, 0.02132259178161621, 0.021374624252319337, 0.021315200805664063, 0.021295455932617186, 0.021338687896728516, 0.021327680587768554, 0.021331968307495116, 0.021376991271972658, 0.021358335494995117, 0.021379072189331053, 0.021474687576293946, 0.021449535369873048, 0.02146108818054199, 0.02160367965698242, 0.021432607650756837, 0.021424448013305664, 0.021460447311401367, 0.021440576553344727, 0.02138764762878418, 0.02145430374145508, 0.021482175827026367, 0.021541887283325196, 0.02147020721435547, 0.021436416625976562, 0.021474687576293946, 0.021520896911621092, 0.02149510383605957, 0.021541696548461914, 0.021499168395996093, 0.021490400314331054, 0.021438144683837892, 0.021570943832397462, 0.021674848556518553, 0.0215184326171875, 0.02147737693786621, 0.021417152404785155, 0.02137376022338867, 0.021371999740600587, 0.023073631286621092, 0.02428441619873047, 0.02291551971435547, 0.022102304458618164, 0.021739519119262696, 0.021399551391601563, 0.02130534362792969, 0.0212992000579834, 0.02128895950317383, 0.021518335342407227, 0.021346303939819337, 0.021347904205322267, 0.021208576202392578, 0.021291967391967773, 0.021409631729125977, 0.021557407379150392, 0.02151628875732422, 0.021546304702758787, 0.021535423278808592, 0.02151628875732422, 0.02150399971008301, 0.021486656188964844, 0.021440671920776366, 0.021467935562133788, 0.021381120681762695, 0.02135481643676758, 0.021345983505249022, 0.021366783142089844, 0.021311487197875977, 0.021368032455444337, 0.021336095809936523, 0.02135865592956543, 0.02138591957092285, 0.021429664611816408, 0.021367456436157225, 0.021514175415039062, 0.021393407821655275, 0.021444255828857423, 0.021399904251098632, 0.021295103073120117, 0.021364736557006835, 0.022722560882568358, 0.021329631805419923, 0.021341663360595703, 0.02160867118835449, 0.02176880073547363, 0.021819391250610352, 0.021833471298217774, 0.021735679626464843, 0.02166374397277832, 0.021647647857666017, 0.021564992904663086, 0.021516447067260743, 0.02149760055541992, 0.021381471633911135, 0.02148521614074707, 0.02141209602355957, 0.021405376434326173, 0.021456480026245117, 0.021471296310424805, 0.021475391387939455, 0.021494367599487304, 0.021450687408447265, 0.021448640823364257]",tokens/s,46.50745033940896,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: Phi3ForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,882.167808,3447.586816,0.0,3045.064704,2842.846208,s,1,7.24684375,7.24684375,0.0,7.24684375,7.24684375,7.24684375,7.24684375,[7.24684375],,kWh,5.644512066601237e-06,6.147419892112981e-07,2.052779420003148e-06,8.312033475815684e-06,,MB,1214.705664,3642.621952,0.0,3229.61408,2982.452736,s,10,2.4826019592285156,0.24826019592285156,0.002192780847353011,0.24785032653808592,0.2508522247314453,0.2511474411010742,0.25138361419677735,"[0.2514426574707031, 0.24397113037109375, 0.24741127014160155, 0.24952365112304686, 0.24706668090820313, 0.24812602233886719, 0.24757463073730468, 0.25078662109375, 0.24623654174804688, 0.25046275329589845]",tokens/s,1031.1761780754964,kWh,7.483775386113211e-06,8.2512768431257e-07,4.934412779435162e-06,1.3243315849860945e-05,tokens/kWh,19330506.264614087,MB,1246.162944,3642.621952,0.0,3229.61408,2982.455296,s,10,13.804732666015628,1.3804732666015627,0.0049082723871483,1.380882568359375,1.3869291381835938,1.3869294128417968,1.3869296325683593,"[1.38528125, 1.38108740234375, 1.3779637451171876, 1.3869296875, 1.377422119140625, 1.382515380859375, 1.37380908203125, 1.380677734375, 1.3721171875, 1.3869290771484375]",tokens/s,45.63652301293227,kWh,4.045775356389158e-05,4.4616313161530956e-06,2.506975011276471e-05,6.998913499280938e-05,tokens/kWh,900139.7146353154,,s,630,13.802404392242446,0.021908578400384814,0.00034643937838156554,0.02183065605163574,0.022272461509704588,0.02253029909133911,0.02307008056640625,"[0.02304614448547363, 0.02224675178527832, 0.021957279205322266, 0.021690143585205077, 0.02171107292175293, 0.021901344299316405, 0.02193404769897461, 0.02188822364807129, 0.02205731201171875, 0.022004159927368164, 0.02259347152709961, 0.021940288543701173, 0.022188032150268554, 0.02212646484375, 0.02202432060241699, 0.02205695915222168, 0.02205504035949707, 0.02204863929748535, 0.021816896438598632, 0.02182793617248535, 0.021929248809814453, 0.021917728424072264, 0.022003583908081055, 0.021841920852661133, 0.021795839309692384, 0.022104127883911133, 0.02182905578613281, 0.02173993682861328, 0.02182963180541992, 0.02191564750671387, 0.021989376068115234, 0.02175200080871582, 0.021966751098632813, 0.021798112869262695, 0.02174460792541504, 0.02217897605895996, 0.022389312744140626, 0.022257600784301758, 0.022265920639038084, 0.022673408508300782, 0.021925439834594728, 0.022176191329956053, 0.021819391250610352, 0.02188467216491699, 0.022225183486938478, 0.02197859191894531, 0.022061567306518554, 0.02184601593017578, 0.021838943481445314, 0.021626911163330077, 0.021697439193725587, 0.022218719482421875, 0.021813247680664064, 0.021735424041748046, 0.02253932762145996, 0.021812095642089843, 0.021696575164794923, 0.021780479431152345, 0.021972991943359374, 0.021719039916992186, 0.021795040130615236, 0.021759775161743163, 0.022112255096435548, 0.02272096061706543, 0.022237184524536133, 0.022009855270385743, 0.021992927551269532, 0.021776927947998046, 0.021720991134643555, 0.02173753547668457, 0.02188496017456055, 0.02187059211730957, 0.02230681610107422, 0.022050207138061523, 0.021967456817626952, 0.021735424041748046, 0.021882720947265625, 0.021724735260009766, 0.021752351760864257, 0.02228540802001953, 0.022037408828735353, 0.021913663864135742, 0.021872352600097657, 0.02169481658935547, 0.02174355125427246, 0.021956192016601563, 0.021820959091186524, 0.02184281539916992, 0.021682079315185548, 0.022349727630615233, 0.021885120391845703, 0.021720800399780273, 0.021516576766967773, 0.02156572723388672, 0.02155254364013672, 0.02169068717956543, 0.022001663208007814, 0.021850112915039063, 0.021975040435791016, 0.022008960723876952, 0.022176639556884765, 0.023216127395629883, 0.021958656311035156, 0.021839616775512695, 0.021692352294921877, 0.021870784759521485, 0.02171696090698242, 0.02171116828918457, 0.02172502326965332, 0.021630975723266603, 0.021981184005737304, 0.022159263610839842, 0.021954208374023437, 0.021760448455810547, 0.021712480545043947, 0.02170921516418457, 0.02165760040283203, 0.02156662368774414, 0.02174038314819336, 0.021932031631469725, 0.021835775375366212, 0.02225302314758301, 0.02211484718322754, 0.022210559844970702, 0.02207744026184082, 0.022317247390747072, 0.02274643135070801, 0.02187593650817871, 0.021654367446899414, 0.021657920837402343, 0.021940832138061524, 0.02178428840637207, 0.021690656661987304, 0.0217262077331543, 0.02163587188720703, 0.021729375839233397, 0.021673215866088866, 0.021880992889404295, 0.022251808166503906, 0.022757823944091798, 0.022304767608642577, 0.022204416275024414, 0.022055072784423826, 0.022060895919799806, 0.021917119979858398, 0.021882463455200195, 0.021906400680541994, 0.021825536727905274, 0.02179190444946289, 0.021703519821166993, 0.021632896423339844, 0.021694080352783203, 0.021686784744262694, 0.021636159896850585, 0.021863359451293946, 0.021626880645751953, 0.022015520095825195, 0.0216396484375, 0.02179804801940918, 0.021769056320190428, 0.022519264221191406, 0.022335296630859376, 0.022110944747924806, 0.021993471145629884, 0.021879871368408202, 0.021812255859375, 0.022261663436889647, 0.022091455459594726, 0.022052896499633788, 0.022157407760620116, 0.022018304824829103, 0.021688255310058593, 0.021755903244018555, 0.021735424041748046, 0.02155500793457031, 0.02153696060180664, 0.021551103591918946, 0.021524383544921876, 0.021624927520751954, 0.02169856071472168, 0.021689823150634766, 0.021678112030029298, 0.02153487968444824, 0.02150982475280762, 0.021544704437255858, 0.021688352584838866, 0.021600128173828125, 0.022944768905639647, 0.021651456832885742, 0.022834495544433595, 0.022852800369262696, 0.02264031982421875, 0.02214512062072754, 0.022131744384765624, 0.022128864288330077, 0.022456544876098633, 0.021878368377685548, 0.02185116767883301, 0.021769983291625977, 0.02170675277709961, 0.021639167785644533, 0.02155731201171875, 0.021798336029052734, 0.02172800064086914, 0.021612287521362305, 0.021569696426391602, 0.021706367492675783, 0.021520608901977538, 0.021530624389648437, 0.021614591598510743, 0.021769567489624022, 0.0218438720703125, 0.021496576309204103, 0.02150399971008301, 0.02164240074157715, 0.021822303771972657, 0.021927871704101563, 0.021876735687255858, 0.021741632461547852, 0.021833728790283204, 0.021802207946777345, 0.02162892723083496, 0.02156787109375, 0.021737728118896484, 0.021993631362915038, 0.022261760711669923, 0.022130687713623046, 0.02207744026184082, 0.02186854362487793, 0.021954336166381837, 0.022498592376708985, 0.024343488693237304, 0.022164960861206055, 0.022060831069946288, 0.022004032135009767, 0.02202454376220703, 0.02186835289001465, 0.02190979194641113, 0.02192793655395508, 0.021934112548828124, 0.02174527931213379, 0.022274303436279296, 0.0217326717376709, 0.021664447784423828, 0.021680383682250975, 0.024119136810302734, 0.023140031814575194, 0.022020511627197266, 0.022005279541015624, 0.02228006362915039, 0.021973503112792968, 0.022089248657226564, 0.022755136489868166, 0.021909311294555665, 0.02177471923828125, 0.02166988754272461, 0.02162387275695801, 0.021634176254272462, 0.02160211181640625, 0.021596000671386718, 0.021784736633300782, 0.021753759384155275, 0.02164892768859863, 0.02168275260925293, 0.02170038414001465, 0.023070655822753906, 0.02246028709411621, 0.022497695922851564, 0.021871904373168945, 0.022514432907104493, 0.022699712753295898, 0.022265504837036133, 0.02215385627746582, 0.021808992385864256, 0.022001823425292968, 0.02229043197631836, 0.02214297676086426, 0.021886592864990236, 0.021821823120117188, 0.022668607711791994, 0.021746368408203126, 0.02154035186767578, 0.02162451171875, 0.021823488235473632, 0.021658048629760743, 0.02159654426574707, 0.021607936859130858, 0.021570047378540038, 0.021656736373901368, 0.021629535675048828, 0.02165171241760254, 0.02147942352294922, 0.02148259162902832, 0.021578655242919922, 0.02166579246520996, 0.021547008514404296, 0.021552160263061525, 0.021685216903686525, 0.02228540802001953, 0.022492063522338866, 0.02229043197631836, 0.02189468765258789, 0.02165193557739258, 0.02164240074157715, 0.021590591430664063, 0.021868799209594728, 0.021847648620605467, 0.02163142395019531, 0.02163302421569824, 0.02155465507507324, 0.02162508773803711, 0.02153091239929199, 0.02164124870300293, 0.021690336227416993, 0.02190540885925293, 0.022818784713745117, 0.021953727722167967, 0.021650272369384764, 0.021989376068115234, 0.02162646484375, 0.021620351791381835, 0.02163167953491211, 0.021826656341552734, 0.02184294319152832, 0.021784576416015625, 0.021711872100830077, 0.02187775993347168, 0.02199113655090332, 0.021944608688354492, 0.021768415451049804, 0.021655328750610353, 0.021620512008666992, 0.021749984741210936, 0.021921472549438478, 0.0219703369140625, 0.021781408309936523, 0.021978303909301757, 0.021768415451049804, 0.02178441619873047, 0.021756256103515625, 0.021901727676391602, 0.02164735984802246, 0.021679584503173827, 0.02238108825683594, 0.023706911087036132, 0.022448863983154297, 0.022149120330810547, 0.021876735687255858, 0.021790719985961913, 0.02173651123046875, 0.021992063522338866, 0.021868640899658204, 0.02223535919189453, 0.022245376586914063, 0.0225599365234375, 0.02225040054321289, 0.022279775619506836, 0.022771839141845704, 0.022192319869995116, 0.02231705665588379, 0.021931039810180665, 0.02174665641784668, 0.021643264770507813, 0.021807104110717773, 0.021761407852172853, 0.021741247177124022, 0.021905887603759767, 0.021960384368896486, 0.02166147232055664, 0.02169139289855957, 0.021673824310302733, 0.021622943878173827, 0.021731327056884766, 0.021659231185913085, 0.021747648239135744, 0.021854240417480467, 0.02196659278869629, 0.02213270378112793, 0.022827775955200195, 0.021985279083251954, 0.021767711639404295, 0.021674463272094727, 0.02171708869934082, 0.021761247634887695, 0.02163167953491211, 0.021702655792236326, 0.021722431182861327, 0.021893375396728514, 0.02204204750061035, 0.02185113525390625, 0.021841920852661133, 0.021809152603149414, 0.021737472534179687, 0.02203647994995117, 0.021823488235473632, 0.021685983657836912, 0.021659936904907227, 0.02161257553100586, 0.022007776260375977, 0.021639167785644533, 0.021723392486572266, 0.02157481575012207, 0.021604959487915038, 0.022193567276000976, 0.021673952102661133, 0.021654144287109375, 0.021589088439941406, 0.021699296951293946, 0.021604543685913087, 0.021581823348999024, 0.021964799880981444, 0.02204876708984375, 0.021745664596557617, 0.021999616622924805, 0.022006944656372072, 0.021848928451538085, 0.021809152603149414, 0.021561344146728514, 0.02163520050048828, 0.021614463806152343, 0.021706655502319337, 0.02170899200439453, 0.02164521598815918, 0.02228428840637207, 0.022040576934814454, 0.021960704803466798, 0.021716352462768554, 0.021670528411865234, 0.021659648895263672, 0.02163711929321289, 0.02276710319519043, 0.021977664947509766, 0.02183363151550293, 0.02171404838562012, 0.021742496490478515, 0.02167193603515625, 0.02166374397277832, 0.021534719467163087, 0.02167136001586914, 0.021743167877197267, 0.02168115234375, 0.023068672180175782, 0.022401023864746093, 0.0221265926361084, 0.021991424560546875, 0.021796031951904295, 0.021907808303833008, 0.021967327117919922, 0.0219238395690918, 0.022003616333007812, 0.02213657569885254, 0.021967199325561522, 0.02190947151184082, 0.021723007202148436, 0.022050559997558592, 0.022004127502441406, 0.0218787841796875, 0.021727231979370116, 0.02172083282470703, 0.021632543563842775, 0.02196672058105469, 0.022272256851196288, 0.022070880889892577, 0.021905792236328124, 0.021797504425048828, 0.021841920852661133, 0.021929344177246093, 0.02203059196472168, 0.021823295593261717, 0.021964607238769532, 0.021779455184936524, 0.021890815734863282, 0.021893119812011717, 0.021995519638061522, 0.02213395118713379, 0.02199852752685547, 0.022021312713623047, 0.022024288177490234, 0.02200432014465332, 0.021763071060180664, 0.021732351303100587, 0.021798015594482422, 0.021947456359863282, 0.021845983505249023, 0.0219769287109375, 0.02186649513244629, 0.021995008468627928, 0.021869056701660155, 0.021932031631469725, 0.021651456832885742, 0.021790271759033204, 0.02162940788269043, 0.02176406478881836, 0.021776384353637695, 0.02188697624206543, 0.02183590316772461, 0.021726560592651368, 0.02172982406616211, 0.02171664047241211, 0.02169878387451172, 0.021895456314086913, 0.02180284881591797, 0.021831680297851562, 0.02170163154602051, 0.022518400192260743, 0.021986528396606444, 0.021803871154785156, 0.021673919677734375, 0.022034143447875975, 0.021737823486328123, 0.021712608337402343, 0.02187411117553711, 0.021744480133056642, 0.022146335601806642, 0.021643999099731446, 0.02168012809753418, 0.02163711929321289, 0.021790719985961913, 0.021573663711547852, 0.0215982723236084, 0.021563232421875, 0.021622175216674804, 0.02177289581298828, 0.02156755256652832, 0.02160207939147949, 0.021729503631591797, 0.021688255310058593, 0.02155107116699219, 0.021593183517456056, 0.021541023254394533, 0.021668256759643553, 0.021801408767700196, 0.021667264938354493, 0.021568031311035157, 0.021567039489746094, 0.021722848892211915, 0.021803359985351562, 0.02168809509277344, 0.02163702392578125, 0.021668256759643553, 0.021766143798828123, 0.02157513618469238, 0.02167612838745117, 0.021708959579467772, 0.021690975189208983, 0.021624448776245118, 0.021602624893188475, 0.0215982723236084, 0.021612543106079102, 0.021614591598510743, 0.021676191329956053, 0.02180816078186035, 0.022106752395629883, 0.021810400009155274, 0.022245376586914063, 0.02241139221191406, 0.0218955192565918, 0.021875200271606447, 0.02187264060974121, 0.022680704116821288, 0.021978271484375, 0.02204617691040039, 0.021942176818847657, 0.02171120071411133, 0.02167193603515625, 0.02159587287902832, 0.02192207908630371, 0.022980159759521484, 0.02205904006958008, 0.02196067237854004, 0.022135295867919923, 0.021858688354492187, 0.02189936065673828, 0.021837535858154296, 0.021819391250610352, 0.021902496337890626, 0.022005727767944336, 0.02219664001464844, 0.022497087478637694, 0.022694623947143555, 0.022318143844604493, 0.021982303619384767, 0.022077215194702147, 0.02208483123779297, 0.02207619285583496, 0.021952512741088868, 0.02179478454589844, 0.02186038398742676, 0.022364160537719727, 0.02243715286254883, 0.022094560623168946, 0.021942272186279296, 0.021800703048706054, 0.021862655639648437, 0.021812799453735352, 0.02194272041320801, 0.022132736206054687, 0.021937503814697265, 0.02193020820617676, 0.02203481674194336, 0.02192595291137695, 0.02204364776611328, 0.021934207916259767, 0.022097951889038087, 0.021861215591430665, 0.0218603515625, 0.021753856658935547, 0.021743616104125976, 0.02191564750671387, 0.021751392364501954, 0.021746240615844726, 0.022605663299560548, 0.02174332809448242, 0.021616928100585936, 0.02161039924621582, 0.021632928848266602, 0.021670080184936522, 0.02160576057434082, 0.021768287658691408, 0.024789535522460937, 0.022837247848510742, 0.022003103256225585, 0.02190332794189453, 0.021883455276489258, 0.021804607391357422, 0.021889503479003907, 0.021634111404418944, 0.021553951263427733, 0.02175814437866211, 0.02146303939819336]",tokens/s,45.64422126003554,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,867.708928,2698.903552,0.0,2296.38144,2202.20672,s,1,7.20566845703125,7.20566845703125,0.0,7.20566845703125,7.20566845703125,7.20566845703125,7.20566845703125,[7.20566845703125],,kWh,4.989219870837284e-06,5.342583400424964e-07,1.901668187975769e-06,7.42514639885555e-06,,MB,1305.509888,2797.469696,0.0,2382.364672,2267.889152,s,10,0.2868409271240234,0.028684092712402343,0.001110909784972339,0.028358511924743653,0.0290583740234375,0.030486594963073727,0.03162917171478272,"[0.03191481590270996, 0.027990751266479492, 0.028483007431030275, 0.028036703109741212, 0.028185855865478514, 0.028639039993286132, 0.02797305679321289, 0.028642688751220703, 0.028740991592407228, 0.02823401641845703]",tokens/s,8924.807298831225,kWh,1.0150304529369058e-06,1.1194008284612027e-07,6.682933587084322e-07,1.7952638944914586e-06,tokens/kWh,142597420.23749477,MB,1349.44768,2797.469696,0.0,2382.364672,2267.891712,s,10,12.7222490234375,1.27222490234375,0.010027180159853778,1.2703740234375,1.2882837646484375,1.2893047241210938,1.2901214916992187,"[1.288056884765625, 1.26739697265625, 1.27260009765625, 1.266229248046875, 1.2698204345703126, 1.266865478515625, 1.2543466796875, 1.275679931640625, 1.29032568359375, 1.2709276123046875]",tokens/s,49.51954633488039,kWh,3.6564277659565476e-05,4.032167338520448e-06,2.1983446984890415e-05,6.257989198297633e-05,tokens/kWh,1006713.147046306,,s,630,12.717896696090696,0.02018713761284238,0.0005291124876017078,0.020095312118530273,0.020585785484313963,0.020703671646118166,0.02270597204208375,"[0.019918495178222657, 0.019978591918945313, 0.020141759872436524, 0.01979132843017578, 0.019795808792114258, 0.019969247817993165, 0.020289312362670897, 0.02066419219970703, 0.02059222412109375, 0.02094038391113281, 0.020587455749511718, 0.02032640075683594, 0.02027231979370117, 0.02055865669250488, 0.02003068733215332, 0.020005664825439452, 0.020227167129516603, 0.02021228790283203, 0.020341087341308593, 0.019969120025634765, 0.019816511154174803, 0.019997535705566408, 0.020199424743652345, 0.020452959060668945, 0.02044963264465332, 0.020377119064331053, 0.020418079376220703, 0.020564992904663085, 0.02347430419921875, 0.0210512638092041, 0.0208175048828125, 0.020894176483154298, 0.020545215606689454, 0.0204736328125, 0.02055740737915039, 0.020435840606689452, 0.020407487869262695, 0.020138816833496095, 0.020049823760986327, 0.020183456420898437, 0.020184320449829103, 0.020312511444091796, 0.020262912750244142, 0.02038991928100586, 0.024432607650756836, 0.020479583740234376, 0.020220319747924806, 0.020031488418579102, 0.020060447692871092, 0.02021347236633301, 0.0205614070892334, 0.020676704406738283, 0.020844959259033204, 0.02051215934753418, 0.020464223861694338, 0.02033459281921387, 0.020129791259765627, 0.02028351974487305, 0.020350303649902344, 0.020523168563842772, 0.020238719940185546, 0.019990495681762695, 0.02022198486328125, 0.01972831916809082, 0.02001113510131836, 0.020182720184326174, 0.019792160034179686, 0.019790143966674806, 0.0196997127532959, 0.02026678466796875, 0.019965375900268555, 0.019877664566040038, 0.020015871047973633, 0.020285696029663087, 0.020668415069580077, 0.020527103424072265, 0.02247443199157715, 0.021506368637084963, 0.021065216064453125, 0.020554208755493165, 0.02037763214111328, 0.02097267150878906, 0.020284095764160157, 0.020015296936035157, 0.02006412887573242, 0.020017248153686523, 0.020043807983398436, 0.01990355110168457, 0.019851200103759764, 0.019903488159179687, 0.019759103775024413, 0.01999612808227539, 0.019878431320190428, 0.019828351974487304, 0.019819904327392578, 0.019845727920532227, 0.019853216171264648, 0.019873855590820312, 0.020404672622680663, 0.020604991912841798, 0.02058559989929199, 0.020477792739868165, 0.020416671752929688, 0.02040291213989258, 0.019955360412597656, 0.01993494415283203, 0.01989651107788086, 0.019824384689331054, 0.01984182357788086, 0.02005401611328125, 0.020139392852783204, 0.01983875274658203, 0.01990934371948242, 0.019908031463623046, 0.019947488784790038, 0.019997184753417968, 0.01985763168334961, 0.019800064086914062, 0.01986528015136719, 0.019920991897583007, 0.01988764762878418, 0.01979871940612793, 0.019814016342163086, 0.019700096130371093, 0.019779552459716798, 0.01977961540222168, 0.019700319290161132, 0.019755008697509766, 0.020463615417480468, 0.020231744766235352, 0.019849664688110353, 0.01986774444580078, 0.0202589111328125, 0.02032211112976074, 0.02049228858947754, 0.020476032257080078, 0.020352895736694337, 0.020302911758422852, 0.019884544372558592, 0.019757503509521483, 0.019916160583496094, 0.01980847930908203, 0.01988444709777832, 0.020028831481933594, 0.020044384002685548, 0.019860576629638672, 0.01976825523376465, 0.020381664276123045, 0.019959007263183594, 0.020169504165649416, 0.020489599227905272, 0.020244768142700195, 0.01995974349975586, 0.020021663665771485, 0.020208639144897463, 0.020050912857055663, 0.019912736892700195, 0.020477888107299804, 0.020404287338256836, 0.02090188789367676, 0.020637439727783202, 0.020615423202514648, 0.020578176498413085, 0.020424928665161133, 0.02041436767578125, 0.020363359451293944, 0.020195232391357423, 0.020118879318237304, 0.020058528900146484, 0.020099327087402342, 0.02015817642211914, 0.020141376495361327, 0.020085151672363282, 0.020015520095825197, 0.020010751724243166, 0.020004575729370117, 0.019847551345825196, 0.019964223861694337, 0.020104448318481447, 0.020431615829467772, 0.020233983993530272, 0.02022985649108887, 0.020415008544921873, 0.020407712936401368, 0.020433504104614256, 0.02036735916137695, 0.020389888763427736, 0.020422815322875976, 0.020615039825439455, 0.020813823699951172, 0.020342784881591795, 0.02002943992614746, 0.020012447357177734, 0.020306528091430662, 0.020279296875, 0.02050662422180176, 0.02019327926635742, 0.020190431594848634, 0.02031612777709961, 0.02011628723144531, 0.020170816421508787, 0.020072383880615233, 0.02020783996582031, 0.020346656799316406, 0.020460575103759766, 0.020466655731201173, 0.020561471939086914, 0.021002687454223633, 0.020533248901367186, 0.02038278388977051, 0.020184000015258788, 0.019974143981933593, 0.019953664779663087, 0.020154367446899413, 0.020285472869873047, 0.020606943130493164, 0.020691072463989258, 0.020332416534423827, 0.020242431640625, 0.02003763198852539, 0.020014368057250976, 0.019876575469970702, 0.019938976287841796, 0.020071775436401366, 0.019935775756835937, 0.019892255783081056, 0.019810047149658203, 0.019932960510253905, 0.019835807800292968, 0.01987331199645996, 0.020107744216918945, 0.0198287353515625, 0.019808256149291992, 0.019886079788208007, 0.019804159164428712, 0.01982259178161621, 0.019721887588500978, 0.019865951538085937, 0.019704832077026366, 0.019747840881347657, 0.019877824783325195, 0.01974892807006836, 0.019748863220214845, 0.019980384826660157, 0.01982044792175293, 0.019865503311157228, 0.01988617515563965, 0.01984921646118164, 0.020101119995117187, 0.019850431442260744, 0.020048704147338867, 0.019859359741210936, 0.019815519332885743, 0.019874624252319336, 0.019773216247558595, 0.019972320556640624, 0.019894271850585937, 0.019871488571166992, 0.01998464012145996, 0.02049977684020996, 0.020111711502075195, 0.020367712020874024, 0.020231264114379883, 0.020108192443847657, 0.019954784393310547, 0.01998736000061035, 0.01991663932800293, 0.01993744087219238, 0.019980224609375, 0.019847232818603514, 0.019761215209960936, 0.019754207611083985, 0.020083135604858398, 0.019890464782714844, 0.01984480094909668, 0.019943807601928713, 0.020335744857788086, 0.019956544876098634, 0.019870912551879883, 0.0198189754486084, 0.019847007751464845, 0.01991651153564453, 0.020427520751953126, 0.02280054473876953, 0.026818208694458008, 0.02078441619873047, 0.02046860885620117, 0.019895679473876954, 0.019812095642089845, 0.01982703971862793, 0.01978223991394043, 0.01986355209350586, 0.019818496704101563, 0.019797119140625, 0.01983171272277832, 0.019867359161376955, 0.020594944000244142, 0.020606367111206055, 0.020492895126342774, 0.020107263565063475, 0.01997209548950195, 0.019914751052856446, 0.019775487899780272, 0.019763200759887696, 0.0197956485748291, 0.020150367736816405, 0.019914079666137695, 0.0198603515625, 0.019861440658569336, 0.019740095138549806, 0.01989596748352051, 0.019995616912841796, 0.019801151275634764, 0.019907712936401367, 0.02025657653808594, 0.020347103118896485, 0.020133344650268555, 0.01990096092224121, 0.02018022346496582, 0.020215776443481444, 0.020306720733642578, 0.020030975341796875, 0.02033072090148926, 0.020625696182250977, 0.02048201560974121, 0.020471647262573243, 0.020594079971313475, 0.02062620735168457, 0.020436288833618164, 0.020261024475097655, 0.020359712600708006, 0.020625408172607423, 0.0201167049407959, 0.01997001647949219, 0.020073280334472657, 0.01991468811035156, 0.01987571144104004, 0.019955904006958007, 0.01988403129577637, 0.01983692741394043, 0.019981920242309572, 0.01992367935180664, 0.020008544921875, 0.019884191513061523, 0.020124832153320314, 0.020060096740722656, 0.02024844741821289, 0.020038623809814454, 0.020174848556518556, 0.020300928115844726, 0.020407167434692383, 0.020488191604614257, 0.020528352737426758, 0.02069584083557129, 0.020496320724487305, 0.020611135482788086, 0.020074655532836914, 0.01992691230773926, 0.01983616065979004, 0.019913440704345704, 0.02011955261230469, 0.020004480361938477, 0.01987945556640625, 0.019874303817749024, 0.019759456634521486, 0.01991484832763672, 0.01979587173461914, 0.019816640853881837, 0.019772863388061522, 0.019902303695678712, 0.019735071182250978, 0.019766368865966798, 0.019735456466674805, 0.02002236747741699, 0.019906751632690428, 0.01980691146850586, 0.019732511520385743, 0.01966659164428711, 0.01970956802368164, 0.019877920150756834, 0.019763551712036132, 0.019801088333129883, 0.019710975646972655, 0.020193504333496093, 0.02046953582763672, 0.02016774368286133, 0.01995667266845703, 0.01987993621826172, 0.019755008697509766, 0.019842752456665037, 0.019751455307006834, 0.019748064041137697, 0.01983132743835449, 0.01981648063659668, 0.020000768661499024, 0.01982259178161621, 0.019836992263793946, 0.019680736541748046, 0.019665376663208008, 0.019754783630371094, 0.01965488052368164, 0.019742719650268553, 0.020086368560791015, 0.01970172882080078, 0.019860031127929688, 0.01987571144104004, 0.01964784049987793, 0.019731103897094728, 0.019754144668579103, 0.0198144645690918, 0.019888927459716797, 0.020033151626586913, 0.01999635124206543, 0.019778240203857423, 0.019796031951904297, 0.019944799423217772, 0.02018364715576172, 0.019959808349609375, 0.01981420707702637, 0.019784927368164062, 0.019696287155151367, 0.01971027183532715, 0.019869695663452147, 0.020082496643066407, 0.020062400817871095, 0.019832576751708984, 0.019806495666503908, 0.01967100715637207, 0.019772991180419922, 0.019795967102050782, 0.01967148780822754, 0.019705951690673826, 0.019742496490478517, 0.019840608596801756, 0.020302143096923828, 0.019757280349731444, 0.020350528717041017, 0.02063164710998535, 0.020535648345947264, 0.021102592468261717, 0.019955711364746095, 0.01977462387084961, 0.02003555107116699, 0.019846208572387697, 0.019863359451293944, 0.019859359741210936, 0.01984931182861328, 0.019813888549804686, 0.019741184234619142, 0.01978982353210449, 0.0197938232421875, 0.01986774444580078, 0.01978326416015625, 0.02009539222717285, 0.020445184707641603, 0.02045747184753418, 0.020531200408935548, 0.020449087142944335, 0.020637887954711914, 0.020613344192504882, 0.020563743591308595, 0.02207334327697754, 0.02037059211730957, 0.02017695999145508, 0.01996041679382324, 0.01986934471130371, 0.019925056457519533, 0.02025929641723633, 0.02008620834350586, 0.02017321586608887, 0.02024995231628418, 0.02087331199645996, 0.020451967239379882, 0.02012544059753418, 0.020242784500122072, 0.020535295486450195, 0.020410367965698242, 0.020109312057495117, 0.02006425666809082, 0.020200576782226563, 0.020191360473632812, 0.020631839752197265, 0.020381631851196288, 0.020705759048461916, 0.021045312881469727, 0.02046976089477539, 0.020172800064086914, 0.02023017692565918, 0.02012726402282715, 0.020082815170288086, 0.020113088607788085, 0.02026051139831543, 0.020575199127197265, 0.020371456146240235, 0.020244319915771483, 0.02018079948425293, 0.020123615264892578, 0.02045795249938965, 0.020148128509521485, 0.019965087890625, 0.020230335235595705, 0.01999305534362793, 0.020356447219848632, 0.02017980766296387, 0.020144384384155275, 0.020404224395751954, 0.020516351699829103, 0.020349695205688478, 0.020192415237426757, 0.020122207641601563, 0.020385759353637695, 0.020788448333740234, 0.02074870491027832, 0.023590431213378907, 0.020811967849731446, 0.02037945556640625, 0.020202367782592774, 0.020074335098266602, 0.020193439483642578, 0.020131839752197265, 0.02064588737487793, 0.019968000411987305, 0.019891424179077147, 0.020007551193237303, 0.019894399642944337, 0.019951648712158203, 0.020101024627685548, 0.020101215362548826, 0.020395872116088867, 0.02283535957336426, 0.021186111450195312, 0.020573759078979494, 0.020448127746582032, 0.020428800582885744, 0.020455423355102538, 0.02059894371032715, 0.020562879562377928, 0.020423328399658203, 0.02023027229309082, 0.020382944107055663, 0.020611616134643556, 0.020402559280395506, 0.020340736389160157, 0.020090272903442383, 0.020095232009887696, 0.020253023147583007, 0.020213056564331054, 0.020324480056762694, 0.023023775100708008, 0.020218271255493164, 0.020166751861572265, 0.020109216690063478, 0.020007232666015624, 0.020042720794677733, 0.02011801528930664, 0.020115903854370117, 0.020303680419921876, 0.020264928817749023, 0.020092927932739257, 0.020111616134643556, 0.020176544189453124, 0.020355167388916014, 0.021048511505126953, 0.020902463912963867, 0.021506208419799805, 0.020462751388549805, 0.02039904022216797, 0.02007049560546875, 0.02008064079284668, 0.019944704055786133, 0.01996028709411621, 0.020392127990722656, 0.02035868835449219, 0.020207263946533202, 0.020108192443847657, 0.02002351951599121, 0.020524192810058593, 0.020088640213012696, 0.020138816833496095, 0.01995350456237793, 0.019970207214355468, 0.020033536911010744, 0.020000768661499024, 0.019908607482910155, 0.019938623428344727, 0.02002332878112793, 0.019891904830932616, 0.01992188835144043, 0.02009225654602051, 0.019921247482299804, 0.019929344177246094, 0.019836992263793946, 0.019969343185424804, 0.02001519966125488, 0.020306720733642578, 0.02036627197265625, 0.020701120376586914, 0.020608064651489257, 0.02050819206237793, 0.020457632064819337, 0.020343008041381835, 0.02023948860168457, 0.02039852714538574, 0.020381311416625976, 0.020300575256347656, 0.020113407135009767, 0.02017807960510254, 0.019967840194702147, 0.019983360290527344, 0.0198604793548584, 0.019936256408691407, 0.020176895141601564, 0.020142335891723633, 0.019785472869873047, 0.01972047996520996, 0.01993903923034668, 0.02028544044494629, 0.020330495834350586, 0.020406400680541992, 0.02030761528015137, 0.020152351379394532, 0.020267200469970704, 0.02025651168823242, 0.020199167251586915, 0.020359199523925783, 0.02062998390197754, 0.020600831985473633, 0.020176704406738282, 0.020074335098266602, 0.020658527374267578]",tokens/s,49.53649294805587,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi3,microsoft/Phi-3-mini-4k-instruct,microsoft/Phi-3-mini-4k-instruct,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 1243, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 1121, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 855, in forward hidden_states = self.mlp(hidden_states) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/microsoft/Phi-3-mini-4k-instruct/0a67737cc96d2554230f90338b163bc6380a2a85/modeling_phi3.py"", line 233, in forward up_states = up_states * self.activation_fn(gate) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 39946 has 14.73 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 45.84 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1206, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 1011, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 751, in forward attn_outputs, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/phi/modeling_phi.py"", line 550, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,868.356096,2698.903552,0.0,2296.38144,2202.20672,s,1,7.4304921875,7.4304921875,0.0,7.4304921875,7.4304921875,7.4304921875,7.4304921875,[7.4304921875],,kWh,4.914905566647576e-06,5.337879213370768e-07,1.9800015839677165e-06,7.428695071952369e-06,,MB,1224.531968,2795.372544,0.0,2382.364672,2267.889152,s,10,1.9763736419677733,0.19763736419677733,0.0011626544133272346,0.19741062927246095,0.19867308197021483,0.19970930557250977,0.20053828445434568,"[0.20074552917480468, 0.19752192687988282, 0.19844281005859374, 0.19659642028808594, 0.19774761962890625, 0.1970124206542969, 0.19745706176757813, 0.19736419677734374, 0.1967193603515625, 0.19676629638671875]",tokens/s,1295.3016300354725,kWh,5.9780683988091845e-06,6.589968275859485e-07,3.975570073877726e-06,1.061263530027286e-05,tokens/kWh,24122189.518132035,MB,1272.733696,2795.372544,0.0,2382.364672,2267.891712,s,10,12.753380859374998,1.2753380859374999,0.007836931662319331,1.2758687744140624,1.2837486083984375,1.2858654541015624,1.2875589306640625,"[1.2686468505859374, 1.2704376220703124, 1.2766175537109374, 1.2832781982421875, 1.26868212890625, 1.2879822998046875, 1.28282470703125, 1.278873291015625, 1.260918212890625, 1.2751199951171874]",tokens/s,49.398665886848946,kWh,3.696096501410075e-05,4.076761668847462e-06,2.191453397152298e-05,6.29522606544712e-05,tokens/kWh,1000758.3420362112,,s,630,12.749815832138054,0.020237802908155654,0.00044132335835155885,0.02013979148864746,0.02069225368499756,0.020884209728240965,0.02164093292236328,"[0.020829727172851562, 0.02033673667907715, 0.01991142463684082, 0.019955711364746095, 0.020066368103027345, 0.020069664001464843, 0.02025129508972168, 0.020497919082641602, 0.02019993591308594, 0.020141855239868164, 0.019988704681396484, 0.021644895553588867, 0.02008310317993164, 0.020078048706054688, 0.02013871955871582, 0.020594335556030272, 0.020891712188720702, 0.02108425521850586, 0.021194751739501954, 0.020589855194091795, 0.02045382308959961, 0.020197343826293946, 0.020033824920654298, 0.020029695510864257, 0.019960607528686523, 0.019962528228759765, 0.019982688903808592, 0.01992684745788574, 0.019988672256469726, 0.01999667167663574, 0.019985471725463867, 0.019981599807739257, 0.019972831726074218, 0.0199300479888916, 0.020319711685180663, 0.020163103103637694, 0.020053567886352538, 0.01995142364501953, 0.019892864227294922, 0.019971647262573243, 0.01992291259765625, 0.019913183212280274, 0.01985478401184082, 0.019904224395751954, 0.020361919403076172, 0.019880096435546876, 0.01988198471069336, 0.01996015930175781, 0.019901119232177734, 0.019880928039550782, 0.01982259178161621, 0.019778816223144532, 0.019942144393920898, 0.020039680480957032, 0.019967199325561524, 0.019833311080932618, 0.019889471054077148, 0.020404991149902345, 0.020113664627075194, 0.02011136054992676, 0.01988102340698242, 0.019946399688720702, 0.019822751998901367, 0.020862560272216796, 0.020363807678222656, 0.019965951919555663, 0.01989401626586914, 0.019910976409912108, 0.020262847900390624, 0.020299167633056642, 0.02017545509338379, 0.019834880828857423, 0.019912607192993165, 0.020008640289306642, 0.019839391708374024, 0.01987174415588379, 0.019908607482910155, 0.019925216674804687, 0.01983024024963379, 0.01988595199584961, 0.019827327728271484, 0.01976300811767578, 0.019785696029663086, 0.019830303192138674, 0.019861631393432617, 0.019736959457397462, 0.019774688720703124, 0.019870399475097656, 0.019904319763183593, 0.020496000289916994, 0.020619935989379883, 0.02043289566040039, 0.020541215896606447, 0.02035875129699707, 0.020238176345825195, 0.020212223052978515, 0.01998896026611328, 0.019976032257080077, 0.020104896545410155, 0.02001875114440918, 0.019890911102294923, 0.01984716796875, 0.019904512405395508, 0.01992416000366211, 0.019948352813720704, 0.020199392318725588, 0.020315935134887695, 0.020149951934814454, 0.020326976776123048, 0.020740095138549804, 0.020254720687866212, 0.02070848083496094, 0.020628351211547852, 0.020561824798583983, 0.020559968948364257, 0.02043289566040039, 0.020368864059448242, 0.020193824768066405, 0.020040767669677734, 0.020099103927612303, 0.020183135986328125, 0.02037843132019043, 0.020576255798339844, 0.020559295654296875, 0.02058460807800293, 0.020532703399658202, 0.021060415267944336, 0.02042198371887207, 0.020604576110839844, 0.02052534484863281, 0.020009695053100587, 0.02002739143371582, 0.019928415298461913, 0.019921567916870116, 0.019963743209838868, 0.01994067192077637, 0.01991379165649414, 0.02056528091430664, 0.020060319900512696, 0.02004617691040039, 0.020059167861938478, 0.02006524848937988, 0.020004959106445314, 0.02002911949157715, 0.020086015701293945, 0.0201408634185791, 0.019865760803222655, 0.021086208343505858, 0.019815488815307616, 0.02039494323730469, 0.019992351531982422, 0.019867872238159178, 0.020191072463989258, 0.020185279846191406, 0.0201112003326416, 0.020074783325195314, 0.019902080535888673, 0.026028255462646484, 0.020832256317138673, 0.022151359558105467, 0.021532032012939455, 0.020057952880859375, 0.019902719497680663, 0.01999286460876465, 0.020293567657470705, 0.020340864181518554, 0.020135488510131836, 0.020000640869140624, 0.0198590087890625, 0.01994633674621582, 0.020156576156616212, 0.019918848037719726, 0.01997132873535156, 0.020137792587280275, 0.019801023483276368, 0.019826688766479493, 0.01982473564147949, 0.020254016876220703, 0.020947519302368163, 0.020198495864868164, 0.02007334327697754, 0.019871328353881838, 0.019939807891845702, 0.020000768661499024, 0.01983897590637207, 0.01984054374694824, 0.019892704010009764, 0.01993440055847168, 0.01990937614440918, 0.021423328399658204, 0.020697824478149413, 0.020326528549194336, 0.02017478370666504, 0.02017180824279785, 0.020009952545166014, 0.01992268753051758, 0.01977712059020996, 0.019911327362060548, 0.020445184707641603, 0.0204902400970459, 0.020625408172607423, 0.020625408172607423, 0.02048828887939453, 0.02043059158325195, 0.021078176498413086, 0.02047488021850586, 0.02076748847961426, 0.020723167419433593, 0.020656864166259767, 0.02040339279174805, 0.0202740478515625, 0.020238496780395507, 0.020348575592041014, 0.020084928512573243, 0.02027724838256836, 0.020226079940795897, 0.020372575759887695, 0.020992895126342774, 0.020520959854125977, 0.020603904724121092, 0.020311040878295897, 0.020256767272949217, 0.020123647689819335, 0.01997209548950195, 0.020075647354125977, 0.020891712188720702, 0.020689056396484374, 0.020703168869018556, 0.02013257598876953, 0.01999203109741211, 0.020084735870361328, 0.020182783126831055, 0.020461503982543944, 0.0203253116607666, 0.020311264038085936, 0.020550336837768555, 0.020418560028076172, 0.020313472747802735, 0.020466047286987303, 0.020359424591064452, 0.020250816345214844, 0.02022585678100586, 0.020307968139648438, 0.020383552551269533, 0.020596672058105468, 0.020408287048339843, 0.02037993621826172, 0.020170816421508787, 0.019951744079589842, 0.020041248321533204, 0.01997238349914551, 0.019995935440063478, 0.021117919921875, 0.020427711486816408, 0.020518911361694335, 0.02032217597961426, 0.02022822380065918, 0.02101481628417969, 0.020436704635620116, 0.020166688919067383, 0.02008252716064453, 0.020223199844360353, 0.020177888870239257, 0.020115135192871093, 0.020072704315185548, 0.019951520919799806, 0.020008928298950197, 0.02004595184326172, 0.01984102439880371, 0.020038719177246093, 0.019868703842163087, 0.02025052833557129, 0.02069481658935547, 0.020392160415649414, 0.020174848556518556, 0.01988198471069336, 0.020377599716186523, 0.02004364776611328, 0.02016841506958008, 0.020971744537353516, 0.020461599349975587, 0.020242591857910157, 0.020162271499633788, 0.020060159683227538, 0.020594112396240233, 0.02047612762451172, 0.020800128936767578, 0.020119359970092773, 0.019962047576904295, 0.01983078384399414, 0.019789695739746094, 0.019744895935058595, 0.019781631469726564, 0.019783679962158202, 0.019787296295166016, 0.019820959091186523, 0.0198351993560791, 0.01982956886291504, 0.01980307197570801, 0.019828800201416016, 0.01976304054260254, 0.019719871520996093, 0.020189599990844728, 0.019780864715576173, 0.02018534469604492, 0.021141376495361328, 0.019986080169677733, 0.01991321563720703, 0.019941024780273438, 0.019944448471069336, 0.019942272186279298, 0.019845184326171876, 0.019993471145629882, 0.019832128524780272, 0.019862207412719726, 0.02063564872741699, 0.020191232681274415, 0.020041568756103516, 0.019869855880737305, 0.02019852828979492, 0.020108448028564454, 0.019850784301757813, 0.020070592880249025, 0.019922943115234376, 0.01990012741088867, 0.02016057586669922, 0.02059507179260254, 0.020723520278930666, 0.020480031967163085, 0.020213760375976563, 0.02023423957824707, 0.020981407165527342, 0.020296031951904298, 0.019957759857177734, 0.019969343185424804, 0.020058176040649415, 0.020236928939819335, 0.020348640441894533, 0.020171039581298827, 0.020558847427368163, 0.02069196891784668, 0.020684576034545897, 0.020534975051879883, 0.020744863510131835, 0.02085001564025879, 0.02089833641052246, 0.020827487945556642, 0.020650272369384767, 0.021320192337036133, 0.022384447097778322, 0.02074742317199707, 0.02074297523498535, 0.02052681541442871, 0.020256351470947266, 0.02166214370727539, 0.02003388786315918, 0.02011136054992676, 0.020049951553344728, 0.019891807556152344, 0.020003007888793944, 0.020006399154663086, 0.02002195167541504, 0.01998847961425781, 0.019874048233032227, 0.020244192123413086, 0.021764127731323243, 0.02051481628417969, 0.020297727584838866, 0.020060159683227538, 0.020875040054321288, 0.020764896392822266, 0.020822015762329102, 0.020641311645507813, 0.020801248550415038, 0.02051148796081543, 0.020332096099853515, 0.0203690242767334, 0.020290367126464842, 0.020744192123413087, 0.020264223098754884, 0.020009695053100587, 0.020008960723876954, 0.019964000701904298, 0.02001299285888672, 0.020143871307373048, 0.020539615631103517, 0.020570112228393556, 0.020442527770996095, 0.02118320083618164, 0.021141056060791016, 0.021083871841430665, 0.02072777557373047, 0.020494976043701173, 0.0216312313079834, 0.020266752243041992, 0.020203168869018556, 0.020048223495483398, 0.019965919494628906, 0.02021583938598633, 0.02013315200805664, 0.019929344177246094, 0.02014614486694336, 0.02004748725891113, 0.020266111373901368, 0.020231935501098634, 0.020086496353149415, 0.02003753662109375, 0.020037120819091796, 0.020208511352539062, 0.020508672714233397, 0.020637439727783202, 0.020473344802856445, 0.020637887954711914, 0.02127097511291504, 0.02059507179260254, 0.020803327560424804, 0.020645376205444335, 0.020793472290039063, 0.02055401611328125, 0.02055792045593262, 0.020537343978881836, 0.020291584014892578, 0.020285024642944335, 0.020820575714111327, 0.020156063079833985, 0.020199199676513672, 0.020179328918457032, 0.019998720169067383, 0.01999168014526367, 0.019956607818603516, 0.020228000640869142, 0.019955808639526368, 0.02027510452270508, 0.02012067222595215, 0.02000160026550293, 0.020339040756225585, 0.02040179252624512, 0.02014361572265625, 0.02004774475097656, 0.020274015426635743, 0.020019136428833007, 0.021025888442993163, 0.020392351150512696, 0.020331008911132813, 0.020334016799926757, 0.02042310333251953, 0.020383712768554687, 0.02019139289855957, 0.020289344787597655, 0.020446752548217772, 0.020689504623413086, 0.020352287292480467, 0.02009782409667969, 0.02004172706604004, 0.020049184799194337, 0.02032713508605957, 0.020380992889404297, 0.02042032051086426, 0.020294527053833007, 0.02020319938659668, 0.020212127685546876, 0.020357120513916017, 0.01998182487487793, 0.019878400802612304, 0.02044927978515625, 0.020129375457763672, 0.020017568588256835, 0.02003558349609375, 0.020056032180786134, 0.020454879760742187, 0.020498943328857423, 0.020536991119384767, 0.02069932746887207, 0.020609247207641603, 0.020316160202026368, 0.02008185577392578, 0.02001091194152832, 0.020290048599243164, 0.020574047088623048, 0.02081391906738281, 0.020537824630737306, 0.020348928451538087, 0.020274560928344728, 0.020040224075317383, 0.019974239349365236, 0.019931135177612306, 0.020035039901733397, 0.01999907112121582, 0.020383167266845702, 0.02087139129638672, 0.020689279556274413, 0.020619583129882813, 0.02055276870727539, 0.020193599700927736, 0.02019318389892578, 0.01997987174987793, 0.02000339126586914, 0.02006185531616211, 0.02056847953796387, 0.020566368103027345, 0.020172479629516602, 0.02006662368774414, 0.019985887527465822, 0.019792287826538087, 0.020824064254760744, 0.020563135147094725, 0.02020227241516113, 0.02009836769104004, 0.020208160400390626, 0.02025200080871582, 0.019966880798339845, 0.020088768005371092, 0.019944576263427733, 0.02000985527038574, 0.019963647842407225, 0.019863040924072265, 0.01995452880859375, 0.020105119705200195, 0.020000768661499024, 0.020105472564697267, 0.01992064094543457, 0.01985910415649414, 0.020074975967407226, 0.019822463989257813, 0.019832704544067385, 0.019814495086669923, 0.019871007919311522, 0.020073215484619142, 0.01983283233642578, 0.0200581111907959, 0.01987583923339844, 0.019783872604370117, 0.021081920623779296, 0.019887104034423828, 0.019827295303344726, 0.019888351440429688, 0.020179136276245117, 0.020048959732055664, 0.01986444854736328, 0.020468959808349608, 0.01997475242614746, 0.020045055389404296, 0.01996054458618164, 0.019838911056518554, 0.019861343383789063, 0.0198590087890625, 0.019814815521240235, 0.019830623626708985, 0.019829696655273437, 0.019844608306884767, 0.01985971260070801, 0.019861503601074217, 0.019961856842041017, 0.020092927932739257, 0.02051807975769043, 0.020360000610351564, 0.020089887619018556, 0.019800256729125977, 0.019882015228271484, 0.020009727478027345, 0.019920896530151368, 0.020000768661499024, 0.01983897590637207, 0.019860799789428712, 0.019851968765258788, 0.01977756881713867, 0.019856704711914062, 0.020369407653808593, 0.0200130558013916, 0.019802112579345704, 0.01994041633605957, 0.0198189754486084, 0.020135967254638672, 0.019981855392456054, 0.019825664520263672, 0.01979792022705078, 0.019871103286743165, 0.01981500816345215, 0.01982876777648926, 0.019748863220214845, 0.020434944152832032, 0.020522911071777342, 0.02028870391845703, 0.02032089614868164, 0.022581727981567382, 0.020848447799682618, 0.02035696029663086, 0.020178592681884766, 0.019990047454833983, 0.020074560165405275, 0.02040656089782715, 0.0203372802734375, 0.02062351989746094, 0.020604864120483398, 0.020606880187988282, 0.020570112228393556, 0.020475135803222657, 0.020509439468383787, 0.020246368408203125, 0.020263071060180663, 0.020162559509277343, 0.019988447189331054, 0.019938976287841796, 0.02003596878051758, 0.020008960723876954, 0.01999667167663574, 0.019961248397827147, 0.01988400077819824, 0.0200710391998291, 0.020149791717529297, 0.020101472854614257, 0.02020569610595703, 0.020379648208618165, 0.02029507255554199, 0.020220352172851563, 0.020352287292480467, 0.020337535858154298, 0.020420608520507814, 0.02068889617919922, 0.020531295776367187, 0.02041651153564453, 0.020287391662597656, 0.020346879959106445, 0.02045654487609863, 0.020263839721679687, 0.020117504119873047, 0.019986431121826173, 0.020062559127807616, 0.020004512786865235, 0.019924224853515624]",tokens/s,49.412478446314395,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,870.2976,4804.44416,0.0,4401.922048,4400.206336,s,1,7.4995068359375,7.4995068359375,0.0,7.4995068359375,7.4995068359375,7.4995068359375,7.4995068359375,[7.4995068359375],,kWh,4.986207945830756e-06,5.428366326079535e-07,1.9263904299515744e-06,7.455435008390284e-06,,MB,1226.113024,4903.010304,0.0,4490.002432,4455.927296,s,10,1.6589028015136718,0.16589028015136717,0.004960145745024634,0.1667471466064453,0.17046709899902346,0.1715550064086914,0.17242533233642576,"[0.15395309448242187, 0.16774118041992186, 0.163581787109375, 0.16917459106445312, 0.16226678466796876, 0.16575311279296875, 0.17264291381835936, 0.16834127807617189, 0.170225341796875, 0.16522271728515625]",tokens/s,1543.1886652214457,kWh,4.665699514352027e-06,5.14545037719745e-07,3.104852572063899e-06,8.28509712413567e-06,tokens/kWh,30898853.225779995,MB,1261.166592,4909.30176,0.0,4496.293888,4455.929856,s,10,13.015702392578126,1.3015702392578126,0.0029788443698253887,1.3016301879882812,1.3047347900390625,1.3050354858398439,1.3052760424804688,"[1.304371826171875, 1.298853759765625, 1.3007095947265626, 1.299589599609375, 1.305336181640625, 1.30369189453125, 1.30198095703125, 1.30466796875, 1.3012794189453125, 1.29522119140625]",tokens/s,48.40307353364514,kWh,3.8137272366896267e-05,4.206177183557419e-06,2.512128111993588e-05,6.746473067038957e-05,tokens/kWh,933821.2629617872,,s,630,13.013538677215564,0.02065641059875488,0.00039742109644933264,0.02057968044281006,0.020940511322021484,0.02133509302139282,0.022656267948150635,"[0.022656639099121093, 0.021364736557006835, 0.020943296432495116, 0.020684799194335936, 0.02068195152282715, 0.02067523193359375, 0.020538784027099608, 0.020648160934448243, 0.02056038475036621, 0.02044927978515625, 0.020445184707641603, 0.02039753532409668, 0.0204866886138916, 0.020651071548461915, 0.020665279388427736, 0.02067251205444336, 0.02087936019897461, 0.02087068748474121, 0.020756959915161133, 0.02040540885925293, 0.020427040100097656, 0.0206177921295166, 0.020532672882080077, 0.020705856323242188, 0.02072403144836426, 0.02062508773803711, 0.020586496353149415, 0.020715295791625978, 0.021237247467041014, 0.02070806312561035, 0.02063155174255371, 0.020569408416748047, 0.020741983413696288, 0.020721952438354493, 0.020761152267456055, 0.020559871673583984, 0.02066022491455078, 0.021839103698730468, 0.020779775619506835, 0.020864320755004884, 0.020666656494140626, 0.02065782356262207, 0.02044704055786133, 0.02039494323730469, 0.020391904830932617, 0.020371488571166992, 0.02040959930419922, 0.02044595146179199, 0.02049363136291504, 0.02047216033935547, 0.02049839973449707, 0.02052537536621094, 0.02213484764099121, 0.021235712051391603, 0.020557600021362303, 0.020443359375, 0.02045916748046875, 0.020454912185668944, 0.02057097625732422, 0.020533248901367186, 0.020491935729980468, 0.020545888900756835, 0.02051625633239746, 0.02280009651184082, 0.021416416168212892, 0.020871135711669923, 0.020577983856201174, 0.020394304275512695, 0.020299776077270508, 0.02029363250732422, 0.020260608673095704, 0.020206911087036133, 0.02028767967224121, 0.020201887130737305, 0.020283103942871094, 0.02031270408630371, 0.02050806427001953, 0.022000223159790038, 0.020750207901000975, 0.021172159194946288, 0.020654272079467774, 0.020797088623046876, 0.021020544052124022, 0.02049830436706543, 0.02035305595397949, 0.02027782440185547, 0.020238336563110353, 0.020270751953125, 0.020232032775878907, 0.020279008865356444, 0.02024937629699707, 0.020274848937988282, 0.02067878341674805, 0.020580575942993163, 0.020358591079711913, 0.020308544158935547, 0.020316160202026368, 0.020297727584838866, 0.020342784881591795, 0.02032640075683594, 0.020350976943969725, 0.02035286331176758, 0.020594688415527345, 0.020551103591918946, 0.020492448806762695, 0.02041708755493164, 0.020421823501586913, 0.020924543380737303, 0.02059107208251953, 0.020992223739624023, 0.020844512939453125, 0.02083148765563965, 0.020963552474975587, 0.020990528106689454, 0.020818016052246095, 0.020754335403442382, 0.020738208770751953, 0.020756320953369142, 0.02067990493774414, 0.020539583206176756, 0.02060348892211914, 0.02067865562438965, 0.02067865562438965, 0.020746431350708007, 0.02063465690612793, 0.02070368003845215, 0.022626079559326173, 0.021457216262817384, 0.020860063552856446, 0.020500320434570313, 0.020354047775268554, 0.020597888946533204, 0.021155712127685546, 0.020708351135253905, 0.02062950325012207, 0.02059878349304199, 0.02094220733642578, 0.020553407669067384, 0.020592992782592773, 0.020682559967041016, 0.020761375427246095, 0.02083148765563965, 0.02073878479003906, 0.020710592269897462, 0.020444000244140625, 0.020495744705200197, 0.02077555274963379, 0.02088140869140625, 0.020955039978027345, 0.02086822319030762, 0.020818208694458006, 0.02077964782714844, 0.020696447372436522, 0.020502944946289063, 0.02057356834411621, 0.020573087692260742, 0.020440576553344726, 0.02036591911315918, 0.020541343688964844, 0.020711423873901368, 0.02037513542175293, 0.020427167892456053, 0.020389408111572267, 0.020335071563720702, 0.020332256317138673, 0.020324640274047852, 0.02037555122375488, 0.020391071319580078, 0.020364128112792968, 0.020370880126953126, 0.020447551727294924, 0.020805631637573242, 0.020469791412353517, 0.020531423568725587, 0.020877248764038087, 0.020522687911987306, 0.02063164710998535, 0.020551071166992188, 0.020528287887573243, 0.02081158447265625, 0.02069910430908203, 0.020981695175170897, 0.0206167049407959, 0.020470272064208983, 0.0204902400970459, 0.020426752090454102, 0.02044313621520996, 0.02038902473449707, 0.020403039932250976, 0.02270207977294922, 0.02151219177246094, 0.020938304901123046, 0.020599231719970704, 0.020428800582885744, 0.020369407653808593, 0.020314111709594726, 0.020551679611206054, 0.02068908882141113, 0.020641599655151367, 0.020551200866699218, 0.020627935409545897, 0.020810880661010743, 0.020396671295166015, 0.02025497627258301, 0.02022400093078613, 0.020195327758789062, 0.02026905632019043, 0.020363264083862305, 0.02023219108581543, 0.02024448013305664, 0.020322303771972656, 0.020428895950317383, 0.020389663696289063, 0.020570240020751952, 0.020250368118286132, 0.020244735717773438, 0.023522687911987306, 0.02036800003051758, 0.020385791778564453, 0.020438432693481445, 0.02056867218017578, 0.02062950325012207, 0.020434112548828126, 0.02039468765258789, 0.020353151321411134, 0.020355072021484375, 0.020352991104125976, 0.02031417655944824, 0.020356704711914062, 0.020470144271850586, 0.02047385597229004, 0.020553728103637696, 0.020596736907958983, 0.020467071533203124, 0.020671072006225585, 0.020532672882080077, 0.020624000549316405, 0.020629472732543945, 0.02064384078979492, 0.02063564872741699, 0.0208239688873291, 0.020695295333862305, 0.02065760040283203, 0.020601247787475584, 0.021094400405883788, 0.020643552780151366, 0.020629791259765624, 0.020562143325805665, 0.02067843246459961, 0.020694719314575196, 0.020699167251586915, 0.021657888412475585, 0.022605920791625978, 0.02158153533935547, 0.020875520706176758, 0.02062544059753418, 0.020625280380249023, 0.020703359603881834, 0.020548799514770507, 0.02055046463012695, 0.02044473648071289, 0.02045123291015625, 0.020763168334960936, 0.020579904556274415, 0.020528703689575194, 0.020535455703735352, 0.02078179168701172, 0.020755840301513673, 0.020709503173828126, 0.020711936950683595, 0.020668319702148438, 0.020775007247924804, 0.020739391326904298, 0.020703327178955077, 0.02063408088684082, 0.020716960906982423, 0.020594688415527345, 0.020649919509887694, 0.021156639099121095, 0.02091961669921875, 0.02098246383666992, 0.020824064254760744, 0.020793344497680662, 0.0208035831451416, 0.020688032150268553, 0.021316383361816408, 0.02068671989440918, 0.020639936447143556, 0.020508672714233397, 0.020479488372802734, 0.02048252868652344, 0.020322336196899413, 0.020491935729980468, 0.020440416336059572, 0.0204354248046875, 0.020440767288208008, 0.02051750373840332, 0.020512191772460938, 0.02069584083557129, 0.020751583099365235, 0.021080863952636718, 0.02064588737487793, 0.020596736907958983, 0.020591840744018555, 0.020660736083984374, 0.02066870307922363, 0.020708959579467775, 0.020593120574951173, 0.020619199752807616, 0.020750335693359375, 0.020736000061035157, 0.020786815643310547, 0.02071548843383789, 0.020633312225341798, 0.02056262397766113, 0.02266540718078613, 0.02142902374267578, 0.020964448928833007, 0.020699840545654297, 0.021127391815185546, 0.020770816802978515, 0.02082815933227539, 0.020774911880493165, 0.02075596809387207, 0.02053376007080078, 0.020551679611206054, 0.020561920166015626, 0.020754432678222655, 0.02063100814819336, 0.020488351821899415, 0.020738431930541992, 0.020436128616333007, 0.020365312576293947, 0.020439903259277345, 0.020422431945800783, 0.02047305679321289, 0.020652767181396484, 0.020408191680908204, 0.020412832260131835, 0.020608800888061524, 0.020711551666259764, 0.020524383544921875, 0.020407039642333983, 0.020359359741210937, 0.020444095611572264, 0.020385791778564453, 0.020375520706176757, 0.02044927978515625, 0.020587648391723633, 0.020458784103393555, 0.020623872756958008, 0.020940799713134766, 0.020506208419799804, 0.020429216384887695, 0.020402175903320312, 0.02039971160888672, 0.020414880752563477, 0.020450687408447264, 0.020462207794189453, 0.020471807479858398, 0.021272192001342772, 0.02265535926818848, 0.020539392471313478, 0.020502527236938475, 0.020469600677490235, 0.020496543884277345, 0.020770719528198242, 0.020764768600463866, 0.021079072952270506, 0.021035999298095704, 0.020715520858764647, 0.020796863555908204, 0.02080415916442871, 0.02071347236633301, 0.020692991256713866, 0.02074355125427246, 0.020648576736450194, 0.020488191604614257, 0.0226331844329834, 0.021350400924682617, 0.020909568786621095, 0.020616992950439453, 0.020390623092651366, 0.02043289566040039, 0.02062710380554199, 0.020750431060791014, 0.020869375228881836, 0.02065203285217285, 0.020505855560302735, 0.020412384033203126, 0.020794143676757814, 0.02108415985107422, 0.02065203285217285, 0.02063564872741699, 0.020633792877197264, 0.02076652717590332, 0.020727519989013673, 0.020617088317871093, 0.020547199249267577, 0.020865728378295898, 0.02069702339172363, 0.020721567153930663, 0.020545663833618163, 0.02066022491455078, 0.020467679977416994, 0.02039414405822754, 0.020371456146240235, 0.020367231369018555, 0.020318336486816406, 0.02083020782470703, 0.020520959854125977, 0.020579456329345703, 0.020914432525634765, 0.02058537673950195, 0.020426464080810548, 0.020357120513916017, 0.021405696868896484, 0.02064384078979492, 0.02050662422180176, 0.020565696716308594, 0.020537664413452148, 0.02065737533569336, 0.020734687805175782, 0.02054355239868164, 0.020535232543945313, 0.02054150390625, 0.02058380889892578, 0.02075916862487793, 0.02070479965209961, 0.021121503829956055, 0.020650144577026366, 0.020508319854736327, 0.020531295776367187, 0.020424448013305663, 0.020443456649780273, 0.020492319107055665, 0.020422367095947264, 0.020410144805908203, 0.020580991744995118, 0.020797311782836916, 0.02044313621520996, 0.022747488021850587, 0.021573312759399416, 0.02099260711669922, 0.02063155174255371, 0.020438495635986327, 0.0205317440032959, 0.020685823440551757, 0.021096479415893554, 0.021033952713012696, 0.02072159957885742, 0.020482112884521484, 0.020414079666137695, 0.02052908706665039, 0.02043948745727539, 0.02028303909301758, 0.020510879516601563, 0.020639936447143556, 0.020725759506225586, 0.02065203285217285, 0.02070560073852539, 0.020610143661499023, 0.02059324836730957, 0.021159936904907226, 0.02248518371582031, 0.02063545608520508, 0.020555776596069338, 0.02036735916137695, 0.02050819206237793, 0.020838687896728516, 0.020404415130615236, 0.020441024780273438, 0.02076470375061035, 0.020502464294433594, 0.02044483184814453, 0.020438976287841797, 0.02061961555480957, 0.020359392166137694, 0.020373439788818358, 0.021294368743896484, 0.022108896255493164, 0.020651296615600587, 0.020533695220947265, 0.02052694320678711, 0.02047148895263672, 0.020482816696166993, 0.020509759902954103, 0.020609983444213865, 0.020530431747436524, 0.020559999465942384, 0.020568704605102538, 0.02064588737487793, 0.02064793586730957, 0.020586496353149415, 0.02085865592956543, 0.020742368698120118, 0.020531200408935548, 0.020555776596069338, 0.020824064254760744, 0.02061516761779785, 0.0205467529296875, 0.02036204719543457, 0.02033270454406738, 0.020426143646240236, 0.022767616271972657, 0.021728927612304688, 0.02142380714416504, 0.02068751907348633, 0.020539392471313478, 0.02142617607116699, 0.020755903244018555, 0.020652608871459963, 0.020426464080810548, 0.020410655975341797, 0.020312063217163084, 0.020703231811523438, 0.020651264190673826, 0.020836767196655274, 0.0206646728515625, 0.020973024368286134, 0.020429344177246095, 0.020516864776611327, 0.020338687896728515, 0.020336639404296874, 0.02064588737487793, 0.020802976608276368, 0.020640352249145507, 0.02062131118774414, 0.020545536041259766, 0.02059040069580078, 0.020444704055786134, 0.020580799102783202, 0.020426176071166993, 0.020379743576049804, 0.020365024566650392, 0.020374496459960936, 0.02040012741088867, 0.020358335494995116, 0.020577119827270507, 0.021424095153808595, 0.020768640518188476, 0.02069430351257324, 0.020652896881103517, 0.02065123176574707, 0.020503135681152345, 0.02049247932434082, 0.020473184585571288, 0.02040415954589844, 0.020450016021728516, 0.020461664199829102, 0.02047545623779297, 0.02051020812988281, 0.020547872543334962, 0.020570112228393556, 0.020539968490600587, 0.020539392471313478, 0.020940479278564454, 0.02045779228210449, 0.02086092758178711, 0.020639968872070313, 0.020803104400634764, 0.020695392608642577, 0.020521152496337892, 0.020577247619628907, 0.020359935760498046, 0.020353023529052734, 0.020379648208618165, 0.022273887634277345, 0.02136252784729004, 0.02087139129638672, 0.020591199874877928, 0.02041983985900879, 0.02036185646057129, 0.020471935272216798, 0.02063155174255371, 0.02062745666503906, 0.020602880477905275, 0.020543487548828124, 0.020522815704345703, 0.02049452781677246, 0.020930559158325195, 0.02058345603942871, 0.020460256576538084, 0.02040243148803711, 0.020382848739624024, 0.0203571834564209, 0.020300607681274414, 0.020299232482910157, 0.020576608657836913, 0.020424224853515624, 0.020494495391845703, 0.020451839447021485, 0.020325855255126955, 0.02032694435119629, 0.020334144592285157, 0.020372928619384764, 0.020331520080566406, 0.020316064834594725, 0.020875040054321288, 0.020521312713623046, 0.02052707290649414, 0.020400096893310547, 0.02035305595397949, 0.020387840270996094, 0.020365312576293947, 0.0203240966796875, 0.02032569694519043, 0.020337024688720704, 0.02030240058898926, 0.020332544326782227, 0.02034223937988281, 0.020601375579833985, 0.02067046356201172, 0.02063974380493164, 0.0206376953125, 0.02060697555541992, 0.020619264602661135, 0.02059878349304199, 0.0206661434173584, 0.02062281608581543, 0.02057878494262695, 0.020605215072631834, 0.020846368789672852, 0.020786687850952147, 0.0208057918548584, 0.02064851188659668, 0.02052070426940918, 0.02053494453430176, 0.020533855438232423, 0.020619264602661135]",tokens/s,48.41112134265368,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1234, in __init__ self.transformer = DbrxModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1007, in __init__ self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1007, in self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 787, in __init__ self.ffn = DbrxFFN(config=config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 764, in __init__ self.experts = DbrxExperts( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 703, in __init__ self.mlp = DbrxExpertGLU( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 681, in __init__ self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 369940 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1234, in __init__ self.transformer = DbrxModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1007, in __init__ self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1007, in self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 783, in __init__ self.norm_attn_norm = DbrxNormAttentionNorm( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 595, in __init__ self.attn = DBRX_ATTENTION_CLASSES[config._attn_implementation]( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 287, in __init__ self.Wqkv = nn.Linear( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 190.12 MiB is free. Process 369106 has 14.55 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 1.55 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1234, in __init__ self.transformer = DbrxModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1007, in __init__ self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1007, in self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 787, in __init__ self.ffn = DbrxFFN(config=config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 764, in __init__ self.experts = DbrxExperts( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 703, in __init__ self.mlp = DbrxExpertGLU( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 681, in __init__ self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 370664 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1234, in __init__ self.transformer = DbrxModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1007, in __init__ self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1007, in self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 787, in __init__ self.ffn = DbrxFFN(config=config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 764, in __init__ self.experts = DbrxExperts( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 703, in __init__ self.mlp = DbrxExpertGLU( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 681, in __init__ self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 371416 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1234, in __init__ self.transformer = DbrxModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1007, in __init__ self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1007, in self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 787, in __init__ self.ffn = DbrxFFN(config=config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 764, in __init__ self.experts = DbrxExperts( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 703, in __init__ self.mlp = DbrxExpertGLU( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 681, in __init__ self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 371051 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1234, in __init__ self.transformer = DbrxModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1007, in __init__ self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1007, in self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 783, in __init__ self.norm_attn_norm = DbrxNormAttentionNorm( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 595, in __init__ self.attn = DBRX_ATTENTION_CLASSES[config._attn_implementation]( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 287, in __init__ self.Wqkv = nn.Linear( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 190.12 MiB is free. Process 370317 has 14.55 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 1.55 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1234, in __init__ self.transformer = DbrxModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1007, in __init__ self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1007, in self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 787, in __init__ self.ffn = DbrxFFN(config=config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 764, in __init__ self.experts = DbrxExperts( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 703, in __init__ self.mlp = DbrxExpertGLU( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 681, in __init__ self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 369577 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1234, in __init__ self.transformer = DbrxModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1007, in __init__ self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1007, in self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 787, in __init__ self.ffn = DbrxFFN(config=config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 764, in __init__ self.experts = DbrxExperts( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 703, in __init__ self.mlp = DbrxExpertGLU( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 681, in __init__ self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 371770 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 560.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 168763 has 14.73 GiB memory in use. Of the allocated memory 14.62 GiB is allocated by PyTorch, and 1.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 169113 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 356, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 64.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 157517 has 14.71 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 85.33 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,882.29888,12530.352128,0.0,12127.830016,12122.08896,s,1,7.16305712890625,7.16305712890625,0.0,7.16305712890625,7.16305712890625,7.16305712890625,7.16305712890625,[7.16305712890625],,kWh,7.70302700416323e-06,8.422712230081737e-07,4.412225752002674e-06,1.2957523979174079e-05,,MB,1302.306816,12704.415744,0.0,12289.31072,12248.5888,s,10,1.7112500762939453,0.17112500762939453,0.0027938450717414926,0.17149176025390625,0.17410362854003908,0.17426426239013673,0.17439276947021484,"[0.16383164978027343, 0.1715323486328125, 0.17219465637207032, 0.1705452117919922, 0.17028553771972657, 0.17442489624023438, 0.171451171875, 0.17031094360351562, 0.17260572814941405, 0.17406793212890626]",tokens/s,1495.9824022589337,kWh,5.1015775225577026e-06,5.622665000355291e-07,3.38438968068967e-06,9.048233703282902e-06,tokens/kWh,28292814.75202364,MB,1345.789952,12712.804352,0.0,12297.699328,12248.59136,s,10,33.29478833007813,3.329478833007813,0.002793257228758513,3.329294189453125,3.3334555908203125,3.333936901855469,3.3343219506835937,"[3.3256884765625, 3.326703125, 3.331346923828125, 3.3259365234375, 3.328951416015625, 3.3333486328125, 3.334418212890625, 3.3295029296875, 3.329806640625, 3.32908544921875]",tokens/s,18.921880318153736,kWh,9.71356351424436e-05,1.071450976255313e-05,6.457402579710997e-05,0.00017242417070210673,tokens/kWh,365378.00787131896,,s,630,33.29203848648074,0.05284450553409637,0.0004357898091727626,0.05276875114440918,0.05306257095336914,0.053191564178466794,0.05566008026123047,"[0.05563340759277344, 0.053477409362792966, 0.05293558502197265, 0.052795360565185544, 0.052814975738525394, 0.052744670867919924, 0.05276480102539063, 0.05274153518676758, 0.05260960006713867, 0.052549247741699216, 0.05258076858520508, 0.05244140625, 0.052424671173095704, 0.0527503662109375, 0.05286832046508789, 0.05251561737060547, 0.052461280822753906, 0.05259497451782227, 0.05285683059692383, 0.05308732986450195, 0.05275289535522461, 0.05268931198120117, 0.05270937728881836, 0.05262259292602539, 0.05278591918945313, 0.0526841926574707, 0.052703838348388675, 0.052485889434814456, 0.052588577270507815, 0.052536705017089846, 0.05250668716430664, 0.052544288635253907, 0.05266022491455078, 0.05264384078979492, 0.05256806564331055, 0.05272371292114258, 0.05278294372558594, 0.05289519882202148, 0.052613822937011716, 0.053303295135498044, 0.052770465850830076, 0.05283638381958008, 0.05284486389160156, 0.0526827507019043, 0.05250201416015625, 0.05258086395263672, 0.05264937591552735, 0.05279916763305664, 0.05295542526245117, 0.05313552093505859, 0.05285116958618164, 0.05271308898925781, 0.052687232971191406, 0.05273507308959961, 0.0527674560546875, 0.05289123153686524, 0.05279395294189453, 0.05278214263916016, 0.05288851165771485, 0.052848575592041015, 0.05273811340332031, 0.05275033569335937, 0.052752384185791014, 0.05620703887939453, 0.05360262298583984, 0.05298755264282227, 0.052745887756347656, 0.052678913116455076, 0.052717601776123044, 0.05284307098388672, 0.05273948669433594, 0.05261481475830078, 0.05275459289550781, 0.05267724609375, 0.052512767791748044, 0.05254547119140625, 0.05274838256835938, 0.05261011123657226, 0.05270796966552734, 0.05264822387695312, 0.0527749137878418, 0.053133312225341796, 0.053125118255615236, 0.052918270111083986, 0.05269894409179687, 0.05263091278076172, 0.05258528137207031, 0.052602817535400394, 0.052641857147216795, 0.05271756744384765, 0.0526295051574707, 0.05250457763671875, 0.05247564697265625, 0.05245772933959961, 0.052679710388183594, 0.05262243270874024, 0.052602432250976563, 0.05258067321777344, 0.052682273864746096, 0.05271356964111328, 0.052752769470214844, 0.05297350311279297, 0.052856800079345706, 0.05274019241333008, 0.052770816802978515, 0.05266147232055664, 0.052648735046386716, 0.05260902404785156, 0.05260902404785156, 0.052719615936279295, 0.05284249496459961, 0.05286867141723633, 0.05307385635375977, 0.052906494140625, 0.05273395156860351, 0.05270115280151367, 0.052621345520019534, 0.05283225631713867, 0.05290713500976563, 0.05282495880126953, 0.05271343994140625, 0.05271555328369141, 0.052934688568115236, 0.05275849533081055, 0.05266838455200195, 0.05288553619384766, 0.05618735885620117, 0.053650657653808595, 0.05301107025146484, 0.05293686294555664, 0.05273798370361328, 0.05272326278686523, 0.052797183990478516, 0.05278796768188477, 0.05272780990600586, 0.05304067230224609, 0.05315798568725586, 0.052744575500488285, 0.052620864868164065, 0.05266185760498047, 0.05268124771118164, 0.05261507034301758, 0.05253878402709961, 0.052691967010498046, 0.05324163055419922, 0.05316425704956055, 0.05305535888671875, 0.052848766326904294, 0.052847999572753906, 0.052742782592773436, 0.05297308731079101, 0.05328736114501953, 0.05284457778930664, 0.05265750503540039, 0.052616928100585936, 0.052768894195556644, 0.05271587371826172, 0.05263203048706055, 0.052589984893798826, 0.05257072067260742, 0.05276406478881836, 0.052687454223632815, 0.052596641540527345, 0.05300643157958984, 0.052833599090576173, 0.05279199981689453, 0.05284659194946289, 0.052787071228027345, 0.05295731353759765, 0.05269712066650391, 0.05264361572265625, 0.052674751281738284, 0.05282358551025391, 0.05284092712402344, 0.05294694519042969, 0.05295222473144531, 0.0528612174987793, 0.05262188720703125, 0.052721664428710936, 0.05257830429077148, 0.05267660903930664, 0.05300428771972656, 0.05283955383300781, 0.05281472015380859, 0.05297734451293945, 0.052848224639892576, 0.052800094604492184, 0.05282118225097656, 0.052800289154052736, 0.05575398254394531, 0.05355513763427734, 0.05291872024536133, 0.05274867248535156, 0.05264550399780273, 0.05271088027954102, 0.05265926361083984, 0.052690784454345704, 0.05265177536010742, 0.052782817840576174, 0.052724254608154296, 0.05254348754882812, 0.052566017150878906, 0.052531200408935545, 0.05253734588623047, 0.05247795104980469, 0.05242879867553711, 0.0526879997253418, 0.05306867218017578, 0.05300630569458008, 0.05297078323364258, 0.05277065658569336, 0.05265055847167969, 0.0526646728515625, 0.05269606399536133, 0.05255456161499023, 0.05251465606689453, 0.05254076766967773, 0.05250559997558594, 0.05255987167358398, 0.05253020858764648, 0.052560672760009766, 0.05249833679199219, 0.05264003372192383, 0.05264384078979492, 0.05261936187744141, 0.052571807861328125, 0.05271289443969727, 0.05288345718383789, 0.05283308792114258, 0.05278211212158203, 0.05296022415161133, 0.05285638427734375, 0.052846656799316404, 0.052586879730224606, 0.05255782318115235, 0.052779006958007815, 0.05283225631713867, 0.052910079956054686, 0.052893695831298826, 0.05289295959472656, 0.05268121719360352, 0.05248543930053711, 0.052609886169433594, 0.05286624145507812, 0.05288438415527344, 0.05276198577880859, 0.052703838348388675, 0.05294255828857422, 0.05306601715087891, 0.05304729461669922, 0.05287756729125977, 0.053200160980224606, 0.05658345413208008, 0.05406512069702148, 0.05304191970825195, 0.05283830261230469, 0.05267865753173828, 0.0526890869140625, 0.052823871612548826, 0.05285907363891602, 0.05286912155151367, 0.052741344451904294, 0.05268764877319336, 0.052588542938232424, 0.05256380844116211, 0.052576446533203126, 0.05257139205932617, 0.05249212646484375, 0.05249465560913086, 0.05261894226074219, 0.05301692962646484, 0.05305193710327148, 0.052893695831298826, 0.05281769561767578, 0.052758464813232424, 0.0527608642578125, 0.05278860855102539, 0.05266495895385742, 0.05265817642211914, 0.05257833480834961, 0.05246355056762695, 0.052658206939697264, 0.052615169525146485, 0.052555648803710935, 0.052477760314941405, 0.052535102844238284, 0.05261363220214844, 0.05259017562866211, 0.05275484848022461, 0.05283430480957031, 0.05278271865844727, 0.05290230560302735, 0.052781024932861326, 0.05286812973022461, 0.05288812637329102, 0.05279580688476562, 0.05263372802734375, 0.05259574508666992, 0.05291913604736328, 0.052994144439697265, 0.05294204711914063, 0.0529681282043457, 0.05303827285766602, 0.05284310531616211, 0.052709407806396484, 0.05264371109008789, 0.05283609771728515, 0.052918846130371094, 0.05272137451171875, 0.052737438201904296, 0.052939647674560546, 0.0530063362121582, 0.052825664520263674, 0.05271926498413086, 0.05281603240966797, 0.055549217224121095, 0.05337481689453125, 0.05286716842651367, 0.05265692901611328, 0.05262451171875, 0.05263859176635742, 0.05266403198242187, 0.05266665649414062, 0.0526295051574707, 0.0526376953125, 0.0527154541015625, 0.05257427215576172, 0.05252710342407227, 0.0525269775390625, 0.052561695098876954, 0.052657726287841794, 0.05254412841796875, 0.0527749137878418, 0.05303519821166992, 0.05300835037231445, 0.05284390258789062, 0.052939552307128906, 0.05295452880859375, 0.052871265411376954, 0.053116798400878906, 0.052754913330078125, 0.05283542251586914, 0.05264681625366211, 0.052754302978515626, 0.05280681610107422, 0.05277590560913086, 0.05293056106567383, 0.052891647338867184, 0.05290393447875977, 0.05289295959472656, 0.052660991668701175, 0.05278086471557617, 0.05281763076782227, 0.052696609497070314, 0.05272668838500977, 0.052744190216064454, 0.053175678253173826, 0.05318105697631836, 0.05301862335205078, 0.05290598297119141, 0.052985855102539066, 0.0530239028930664, 0.052975486755371094, 0.05302985763549805, 0.05310259246826172, 0.05293260955810547, 0.052754081726074216, 0.05279296112060547, 0.05269782257080078, 0.05299558258056641, 0.052934558868408206, 0.052970081329345706, 0.053167873382568356, 0.053147903442382814, 0.05323980712890625, 0.053165248870849606, 0.05314419174194336, 0.053070014953613284, 0.05567097473144531, 0.053563358306884766, 0.05296335983276367, 0.05286419296264649, 0.052837184906005856, 0.05273952102661133, 0.05280339050292969, 0.05300300979614258, 0.05283187103271485, 0.052826496124267576, 0.05269216156005859, 0.05273241424560547, 0.0527968635559082, 0.052693889617919924, 0.05258444976806641, 0.052421630859375, 0.052502849578857425, 0.05280633544921875, 0.05323471832275391, 0.05335548782348633, 0.05306351852416992, 0.053020286560058597, 0.05308371353149414, 0.05298230361938477, 0.052912574768066406, 0.053093441009521486, 0.05291856002807617, 0.052970142364501954, 0.05279743957519531, 0.052813182830810546, 0.0529697265625, 0.05268108749389649, 0.05256764984130859, 0.052726177215576174, 0.052686622619628906, 0.05262768173217774, 0.05273788833618164, 0.05294300842285156, 0.05293875122070312, 0.052837535858154296, 0.052835166931152346, 0.052950462341308596, 0.05281644821166992, 0.05274214553833008, 0.05266841506958008, 0.05259254455566406, 0.052963520050048826, 0.0529898567199707, 0.05309235382080078, 0.053285888671875, 0.053136512756347655, 0.05297343826293945, 0.05273347091674805, 0.05279564666748047, 0.0530351676940918, 0.05297177505493164, 0.052875072479248046, 0.05296860885620117, 0.05301948928833008, 0.053059585571289064, 0.05280972671508789, 0.0527768325805664, 0.052768543243408204, 0.05593088150024414, 0.05350604629516602, 0.052891647338867184, 0.052709182739257815, 0.052587711334228515, 0.0527718391418457, 0.05273545455932617, 0.05284713745117187, 0.05279743957519531, 0.0526192626953125, 0.052621440887451174, 0.05250214385986328, 0.05251436614990234, 0.052571903228759764, 0.05255836868286133, 0.05252316665649414, 0.05247769546508789, 0.05272377777099609, 0.05308646392822266, 0.05333795166015625, 0.05306569671630859, 0.05282144165039063, 0.05262451171875, 0.05256780624389648, 0.0526580810546875, 0.05257231903076172, 0.052780799865722657, 0.05268035125732422, 0.05251702499389648, 0.0526545295715332, 0.052569183349609375, 0.05257622528076172, 0.052680927276611327, 0.05267123031616211, 0.05271343994140625, 0.052670463562011716, 0.052803455352783205, 0.053125247955322266, 0.05291417694091797, 0.05298515319824219, 0.05284268951416016, 0.05282457733154297, 0.05287894439697265, 0.05282793426513672, 0.0525726089477539, 0.05274415969848633, 0.05285884857177734, 0.0528612174987793, 0.05309641647338867, 0.05300128173828125, 0.05286099243164062, 0.052706016540527346, 0.05259075164794922, 0.05275033569335937, 0.05284659194946289, 0.05299609756469727, 0.05345014572143555, 0.05280624008178711, 0.05329510498046875, 0.05306982421875, 0.05282403182983399, 0.05275651168823242, 0.05280767822265625, 0.0554071044921875, 0.053472640991210935, 0.05291823959350586, 0.05269161605834961, 0.05260083389282227, 0.05272115325927734, 0.052754878997802734, 0.05274630355834961, 0.05273811340332031, 0.05274534225463867, 0.05282499313354492, 0.05261097717285156, 0.05263359832763672, 0.052563968658447265, 0.05259369659423828, 0.0526181755065918, 0.05256156921386719, 0.05304953765869141, 0.053081886291503906, 0.053162399291992186, 0.053008384704589843, 0.05284249496459961, 0.05271551895141602, 0.052729854583740236, 0.05277286529541016, 0.05272576141357422, 0.05263679885864258, 0.05268979263305664, 0.05265423965454102, 0.05276860809326172, 0.052582111358642575, 0.052717857360839844, 0.05261843109130859, 0.05273683166503906, 0.052598785400390625, 0.05272576141357422, 0.052803585052490234, 0.05294899368286133, 0.05314563369750976, 0.05293462371826172, 0.052867103576660156, 0.05301027297973633, 0.052918399810791016, 0.052760574340820314, 0.052660030364990236, 0.05258448028564453, 0.052770687103271485, 0.05297795104980469, 0.05296144104003906, 0.05294473648071289, 0.052877311706542966, 0.053048641204833984, 0.0526732177734375, 0.05270505523681641, 0.05291030502319336, 0.05296332931518555, 0.0530239028930664, 0.052940704345703124, 0.05285779190063476, 0.05287116622924805, 0.052738048553466796, 0.05279481506347656, 0.05283065414428711, 0.05570499038696289, 0.05368646240234375, 0.05285059356689453, 0.05274035263061523, 0.05272143936157227, 0.052714305877685545, 0.052780544281005856, 0.052746753692626956, 0.05271343994140625, 0.05265356826782226, 0.052765216827392575, 0.052729854583740236, 0.05255353546142578, 0.052510208129882815, 0.05267731094360351, 0.05253529739379883, 0.05261494445800781, 0.052735328674316406, 0.05306246566772461, 0.053276737213134764, 0.05300352096557617, 0.052875457763671876, 0.05270175933837891, 0.05265631866455078, 0.05286687850952149, 0.052789249420166016, 0.05270505523681641, 0.05270755386352539, 0.052582401275634766, 0.05258649444580078, 0.05262931060791016, 0.052545089721679684, 0.052666175842285154, 0.052687713623046875, 0.05262089538574219, 0.052740478515625, 0.0528869743347168, 0.05304582214355469, 0.052985855102539066, 0.05282160186767578, 0.05277328109741211, 0.05279334259033203, 0.05284403228759765, 0.05275923156738281, 0.0526313591003418, 0.05279500961303711, 0.05285724639892578, 0.052996063232421876, 0.0529062385559082, 0.05297945785522461, 0.05289574432373047, 0.052727584838867185, 0.052698848724365234, 0.05264575958251953, 0.05267718505859375, 0.053020286560058597, 0.05284422302246094, 0.05292022323608398, 0.05289625549316406, 0.05289814376831055, 0.0528790397644043, 0.052736385345458985, 0.052780990600585935]",tokens/s,18.92344322069168,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 560.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 169828 has 14.73 GiB memory in use. Of the allocated memory 14.62 GiB is allocated by PyTorch, and 1.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,882.987008,12530.352128,0.0,12127.830016,12122.08896,s,1,7.27475830078125,7.27475830078125,0.0,7.27475830078125,7.27475830078125,7.27475830078125,7.27475830078125,[7.27475830078125],,kWh,8.104448683305539e-06,8.66475839571068e-07,4.951392849998626e-06,1.3922317372875234e-05,,MB,1206.976512,12702.318592,0.0,12289.31072,12248.5888,s,10,11.061835083007814,1.1061835083007814,0.0035120959019139695,1.1066976928710939,1.1098046630859375,1.109964404296875,1.110092197265625,"[1.0999833984375, 1.1030943603515626, 1.1045450439453126, 1.10368115234375, 1.109599853515625, 1.1029927978515626, 1.10919482421875, 1.1097691650390624, 1.1101241455078126, 1.108850341796875]",tokens/s,231.4263393722475,kWh,3.2403021676670205e-05,3.571269727035649e-06,2.1485211632599895e-05,5.7459503036305746e-05,tokens/kWh,4455311.7669369085,MB,1255.05536,12710.7072,0.0,12297.699328,12248.59136,s,10,33.672875976562494,3.36728759765625,0.003624973762351431,3.3671678466796875,3.3718475585937497,3.372897412109375,3.373737294921875,"[3.36251025390625, 3.3637431640625, 3.3716142578125, 3.3657763671875, 3.36827880859375, 3.367775146484375, 3.366560546875, 3.37001611328125, 3.362654052734375, 3.373947265625]",tokens/s,18.709420616121477,kWh,9.832140003291215e-05,1.0846932733175633e-05,6.524391330620035e-05,0.00017441224607228815,tokens/kWh,361213.16833388276,,s,630,33.6700317993164,0.05344449491954985,0.00028881589293715753,0.05343684768676758,0.05368059425354004,0.0537969087600708,0.05488070735931397,"[0.055278846740722656, 0.05345507049560547, 0.05354352188110351, 0.05298995208740234, 0.05297148895263672, 0.05307315063476563, 0.05307881546020508, 0.05297971343994141, 0.05295820617675781, 0.052997119903564455, 0.05306982421875, 0.053161663055419923, 0.05304966354370117, 0.053276065826416016, 0.05318921661376953, 0.053310592651367186, 0.05324844741821289, 0.053555648803710935, 0.053507774353027344, 0.05365177536010742, 0.05337615966796875, 0.05328572845458984, 0.05320499038696289, 0.05306367874145508, 0.05301161575317383, 0.05314022445678711, 0.053091743469238284, 0.05317305755615234, 0.05341785430908203, 0.05331302261352539, 0.05340620803833008, 0.05320665740966797, 0.053221759796142576, 0.05336227035522461, 0.0533590087890625, 0.05349792098999023, 0.05349308776855469, 0.05364591979980469, 0.05351628875732422, 0.05346918487548828, 0.053411872863769534, 0.053317214965820314, 0.05321104049682617, 0.05341436767578125, 0.053305343627929686, 0.05339136123657227, 0.05341388702392578, 0.05334649658203125, 0.053343616485595706, 0.05346758270263672, 0.05348556900024414, 0.0535079345703125, 0.05349350357055664, 0.05362524795532227, 0.0539791374206543, 0.05352553558349609, 0.05349884796142578, 0.05351628875732422, 0.05358335876464844, 0.053623294830322264, 0.05337913513183594, 0.05336636734008789, 0.053438816070556644, 0.05491120147705078, 0.053425983428955076, 0.0530871696472168, 0.053008384704589843, 0.0530882568359375, 0.05306272125244141, 0.05299209594726562, 0.052972030639648435, 0.05310851287841797, 0.05324025726318359, 0.05317977523803711, 0.053279712677001954, 0.05337228775024414, 0.05338505554199219, 0.05347590255737305, 0.0534466552734375, 0.053198848724365234, 0.05335395050048828, 0.05334796905517578, 0.05343939208984375, 0.05333772659301758, 0.05318489456176758, 0.0531025276184082, 0.05324601745605469, 0.05313536071777344, 0.05312716674804688, 0.053200897216796876, 0.05333734512329102, 0.053212993621826174, 0.053273185729980466, 0.05341628646850586, 0.053368991851806644, 0.05328015899658203, 0.05345248031616211, 0.053377792358398436, 0.05352761459350586, 0.05352518463134766, 0.05364307022094727, 0.05348611068725586, 0.05349788665771484, 0.05331955337524414, 0.05340364837646484, 0.05328598403930664, 0.05345987319946289, 0.05348761749267578, 0.05334220886230469, 0.053493759155273435, 0.0534200325012207, 0.053512191772460936, 0.05337702560424805, 0.05352825546264649, 0.053520126342773436, 0.05345951843261719, 0.053571678161621096, 0.05359132766723633, 0.05373811340332031, 0.05365129470825195, 0.05361270523071289, 0.0534466552734375, 0.053495071411132813, 0.05361123275756836, 0.05348697662353516, 0.05353887939453125, 0.054640640258789064, 0.05344038391113281, 0.05324803161621094, 0.05308121490478516, 0.05294905471801758, 0.053128097534179686, 0.05322726440429688, 0.05339955139160156, 0.053322208404541015, 0.05324588775634766, 0.053327713012695316, 0.05341961669921875, 0.05316649627685547, 0.053368064880371095, 0.05328963088989258, 0.05326671981811523, 0.053456703186035154, 0.053900543212890624, 0.05360070419311523, 0.05358534240722656, 0.05332262420654297, 0.053286911010742184, 0.05363711929321289, 0.053321281433105466, 0.053344192504882815, 0.053408512115478514, 0.05344419097900391, 0.053502113342285156, 0.053424129486083986, 0.05383168029785156, 0.05347942352294922, 0.0533658561706543, 0.053288959503173826, 0.053392288208007815, 0.05351561737060547, 0.054162273406982424, 0.05360166549682617, 0.05362732696533203, 0.053602302551269534, 0.0533807373046875, 0.053537120819091795, 0.05354703903198242, 0.05354086303710937, 0.053587360382080076, 0.053572193145751956, 0.05353881454467774, 0.053601505279541016, 0.053596351623535154, 0.0535676155090332, 0.0535761604309082, 0.05363916778564453, 0.05355475234985352, 0.05369286346435547, 0.053789886474609375, 0.05358374404907226, 0.05364019012451172, 0.053688255310058594, 0.05363420867919922, 0.05358220672607422, 0.05360483169555664, 0.05365679931640625, 0.05370755386352539, 0.05385830307006836, 0.055126495361328125, 0.053575454711914064, 0.05307993698120117, 0.05306156921386719, 0.053002368927001955, 0.053173919677734376, 0.05307779312133789, 0.053031871795654294, 0.053172126770019534, 0.05305753707885742, 0.05305926513671875, 0.05316012954711914, 0.05324403381347656, 0.05335865783691406, 0.05328390502929688, 0.053384063720703125, 0.05320499038696289, 0.05334431838989258, 0.053533920288085936, 0.053469921112060545, 0.05330739212036133, 0.05332691192626953, 0.05320390319824219, 0.05316812896728516, 0.05319680023193359, 0.05327872085571289, 0.05318041610717773, 0.053404830932617185, 0.053389217376708986, 0.0532960319519043, 0.05345004653930664, 0.053426849365234376, 0.05333744049072266, 0.05340822219848633, 0.053313568115234376, 0.053524158477783204, 0.053604896545410154, 0.05344255828857422, 0.05344870376586914, 0.05348966217041016, 0.05347900772094726, 0.0533590087890625, 0.053460063934326174, 0.05338614273071289, 0.05349785614013672, 0.05362681579589844, 0.05358339309692383, 0.053604896545410154, 0.05371449661254883, 0.053655967712402344, 0.0540733757019043, 0.053440032958984376, 0.05336876678466797, 0.05366019058227539, 0.05367523193359375, 0.053588863372802734, 0.05357072067260742, 0.05338339233398438, 0.05353526306152344, 0.053593631744384765, 0.05340822219848633, 0.05354662322998047, 0.05366624069213867, 0.05463420867919922, 0.05340198516845703, 0.05329705429077149, 0.05313750457763672, 0.053036640167236325, 0.05339984130859375, 0.053229694366455076, 0.05315347290039062, 0.05304985427856445, 0.05323971176147461, 0.05312707138061523, 0.05320908737182617, 0.05343209457397461, 0.053432544708251956, 0.05324588775634766, 0.05323116683959961, 0.05321779251098633, 0.05347532653808594, 0.05396275329589844, 0.053623809814453124, 0.05350678253173828, 0.0534835205078125, 0.0532435188293457, 0.053285537719726564, 0.05325619125366211, 0.05316175842285156, 0.05333628845214844, 0.05345075225830078, 0.053337184906005856, 0.0534659538269043, 0.053477279663085936, 0.05339561462402344, 0.0534031982421875, 0.053309024810791014, 0.05340860748291015, 0.05367955017089844, 0.05360031890869141, 0.05414348983764648, 0.05355929565429687, 0.053542911529541014, 0.05341798400878906, 0.05338521575927734, 0.053381088256835935, 0.05339683151245117, 0.05331785583496094, 0.053454593658447264, 0.05347401428222656, 0.05359206390380859, 0.05362278366088867, 0.05363097763061524, 0.053526527404785154, 0.05351833724975586, 0.053628158569335935, 0.053754623413085935, 0.05370841598510742, 0.05391593551635742, 0.05352444839477539, 0.05359014511108398, 0.05358403015136719, 0.053448543548583985, 0.053493759155273435, 0.05362035369873047, 0.053416576385498044, 0.055191551208496094, 0.053560863494873046, 0.05321980667114258, 0.05305708694458008, 0.05299792098999023, 0.053184257507324216, 0.05333049774169922, 0.05321321487426758, 0.05321942520141602, 0.05338963317871094, 0.05342403030395508, 0.053294174194335936, 0.05321615982055664, 0.053233665466308595, 0.05329862213134766, 0.053117504119873045, 0.05306175994873047, 0.053359615325927735, 0.05350060653686523, 0.05360617446899414, 0.05343862533569336, 0.05326611328125, 0.05314771270751953, 0.05319132614135742, 0.05345580673217774, 0.05330422210693359, 0.05327667236328125, 0.05343164825439453, 0.05352719879150391, 0.053403072357177735, 0.0533427848815918, 0.053441535949707034, 0.05322982406616211, 0.053409950256347656, 0.05337929534912109, 0.053493663787841796, 0.053460895538330076, 0.05373539352416992, 0.0536165771484375, 0.0534381103515625, 0.053351425170898435, 0.053361888885498046, 0.053346176147460934, 0.05331558227539063, 0.053455551147460936, 0.05361072158813476, 0.05345280075073242, 0.0534917106628418, 0.0535203857421875, 0.05351424026489258, 0.05341715240478516, 0.053628799438476565, 0.053543777465820314, 0.05365564727783203, 0.05377024078369141, 0.053709983825683594, 0.05361340713500977, 0.05411958312988281, 0.05365200042724609, 0.05372742462158203, 0.053642654418945314, 0.053520256042480466, 0.05362364959716797, 0.05494169616699219, 0.05347942352294922, 0.0531517448425293, 0.053114879608154295, 0.052942848205566405, 0.05319475173950195, 0.05299222564697265, 0.05301241683959961, 0.05306351852416992, 0.05320294570922852, 0.05319475173950195, 0.05331353759765625, 0.05329305648803711, 0.05334182357788086, 0.0532279052734375, 0.05328003311157226, 0.05329967880249024, 0.05336089706420898, 0.053684223175048826, 0.053395263671875, 0.05322377777099609, 0.05329616165161133, 0.05321196746826172, 0.05321235275268555, 0.053238590240478514, 0.05331468963623047, 0.05325094223022461, 0.05347244644165039, 0.05339628982543945, 0.053356544494628906, 0.0534486083984375, 0.05333411026000977, 0.05333401489257812, 0.053422080993652345, 0.053480640411376956, 0.05376287841796875, 0.053525951385498045, 0.05346771240234375, 0.05345075225830078, 0.05344460678100586, 0.05325823974609375, 0.053379070281982424, 0.05331481552124023, 0.05348227310180664, 0.05357075119018555, 0.053545345306396486, 0.05343628692626953, 0.05363119888305664, 0.05362294387817383, 0.053639328002929684, 0.05350809478759765, 0.05359958267211914, 0.05361936187744141, 0.05361616134643555, 0.05388057708740234, 0.054078174591064454, 0.05363302230834961, 0.05353881454467774, 0.05359001541137695, 0.05360652923583984, 0.05353766250610351, 0.053486400604248044, 0.05357587051391602, 0.055189697265625, 0.05360521697998047, 0.05314796829223633, 0.05312134552001953, 0.05302220916748047, 0.05316284942626953, 0.053085662841796874, 0.053187103271484376, 0.05324595260620117, 0.05327462387084961, 0.053147647857666014, 0.05329724884033203, 0.05313260650634766, 0.053363296508789064, 0.053249919891357425, 0.053294750213623045, 0.05315427017211914, 0.05354819107055664, 0.0536965446472168, 0.05358675384521484, 0.053451904296875, 0.05337996673583984, 0.0533831672668457, 0.05332787322998047, 0.053288959503173826, 0.05348147201538086, 0.053372447967529296, 0.053319679260253904, 0.05329967880249024, 0.053580928802490234, 0.053381919860839844, 0.053421695709228514, 0.053338592529296874, 0.053497600555419925, 0.05336659240722656, 0.05361708831787109, 0.05368832015991211, 0.05362825775146484, 0.05359478378295898, 0.05363302230834961, 0.05343641662597656, 0.05353583908081055, 0.053605281829833984, 0.053498912811279296, 0.05363792037963867, 0.05385644912719727, 0.053663745880126956, 0.0534896011352539, 0.053354366302490235, 0.05356719970703125, 0.05350796890258789, 0.053489631652832034, 0.053686847686767576, 0.05366995239257812, 0.05368832015991211, 0.05380265426635742, 0.05372707366943359, 0.0537154541015625, 0.05363097763061524, 0.053738975524902345, 0.05358195114135742, 0.05360841751098633, 0.053680191040039064, 0.054806049346923826, 0.05346876907348633, 0.053122142791748046, 0.05297689437866211, 0.05287785720825195, 0.053009952545166016, 0.05293904113769531, 0.05317631912231445, 0.05316412734985351, 0.0531866569519043, 0.053198848724365234, 0.053233665466308595, 0.05322947311401367, 0.053144798278808594, 0.05308940887451172, 0.053216766357421875, 0.05328947067260742, 0.05345203018188477, 0.05357823944091797, 0.05341388702392578, 0.05325564956665039, 0.05335670471191406, 0.05309833526611328, 0.05321372985839844, 0.05320646286010742, 0.05327942276000976, 0.05313888168334961, 0.05335622406005859, 0.05333391952514648, 0.053437278747558596, 0.05341971206665039, 0.05344393539428711, 0.053193599700927734, 0.05322463989257813, 0.05340047836303711, 0.05350809478759765, 0.05349555206298828, 0.05354832077026367, 0.05347401428222656, 0.05359027099609375, 0.0534442253112793, 0.053421951293945315, 0.05326694488525391, 0.05327872085571289, 0.05329062271118164, 0.05341836929321289, 0.053454849243164064, 0.053501953125, 0.05343641662597656, 0.053491519927978515, 0.053346240997314456, 0.0535079345703125, 0.053456417083740236, 0.05350848007202148, 0.053508129119873044, 0.05347375869750977, 0.053505760192871094, 0.053631359100341794, 0.05372304153442383, 0.05360038375854492, 0.053530017852783204, 0.05355715179443359, 0.05348742294311523, 0.0553526382446289, 0.053842144012451174, 0.05320550537109375, 0.05313145446777344, 0.05375580978393555, 0.053184608459472656, 0.05317552185058594, 0.05316483306884766, 0.05324099349975586, 0.05344128036499023, 0.05336278533935547, 0.053373119354248044, 0.05345065689086914, 0.053442462921142575, 0.05322751998901367, 0.05318880081176758, 0.053137214660644534, 0.05346694564819336, 0.05367555236816406, 0.053672607421875, 0.05337606430053711, 0.053471454620361326, 0.05334089660644531, 0.053272575378417966, 0.053196609497070314, 0.05337107086181641, 0.053321727752685545, 0.053317344665527344, 0.053474750518798825, 0.05349871826171875, 0.05347894287109375, 0.05339350509643555, 0.05352444839477539, 0.05355152130126953, 0.05355110549926758, 0.05372079849243164, 0.053644927978515625, 0.05356540679931641, 0.05371360015869141, 0.053579776763916016, 0.053596160888671876, 0.05352243041992188, 0.05348147201538086, 0.05348688125610351, 0.05359500885009766, 0.05346028900146484, 0.05348611068725586, 0.05351436614990234, 0.05355097579956054, 0.0538724479675293, 0.053731521606445315, 0.05370265579223633, 0.053710849761962894, 0.05384960174560547, 0.05377280044555664, 0.05382688140869141, 0.05384457778930664, 0.0538317756652832, 0.053821342468261715, 0.05376009750366211, 0.053759998321533206, 0.053661697387695315, 0.05392588806152344]",tokens/s,18.711001039588883,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 170205 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 169476 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,882.896896,12530.352128,0.0,12127.830016,12122.08896,s,1,7.2202919921875,7.2202919921875,0.0,7.2202919921875,7.2202919921875,7.2202919921875,7.2202919921875,[7.2202919921875],,kWh,7.604624329186056e-06,8.31568194422373e-07,3.010557964002597e-06,1.1446750487611027e-05,,MB,1326.669824,12704.415744,0.0,12289.31072,12248.5888,s,10,1.7817932434082033,0.17817932434082034,0.0024478150576192105,0.17849166107177733,0.18066952056884766,0.18078581771850588,0.18087885543823243,"[0.17192950439453125, 0.17818797302246095, 0.17841004943847658, 0.1768067169189453, 0.1797464599609375, 0.1785732727050781, 0.17947891235351562, 0.18090211486816407, 0.1806436767578125, 0.17711456298828124]",tokens/s,1436.7548027644596,kWh,5.220272146874549e-06,5.756987808120636e-07,3.4895910853214348e-06,9.285562013008047e-06,tokens/kWh,27569682.873408448,MB,1382.907904,12714.901504,0.0,12297.699328,12248.59136,s,10,33.169950439453125,3.3169950439453126,0.002144470311395144,3.3168643798828126,3.319463427734375,3.3195075927734377,3.3195429248046877,"[3.3149462890625, 3.31375244140625, 3.31538427734375, 3.3181201171875, 3.315608642578125, 3.314974853515625, 3.31945361328125, 3.3195517578125, 3.3193466796875, 3.318811767578125]",tokens/s,18.993094401813245,kWh,9.696391268895809e-05,1.069525098051943e-05,6.435951874947854e-05,0.0001720186824189561,tokens/kWh,366239.2893265036,,s,630,33.16721828842166,0.0526463782355899,0.0004593346585512815,0.05256920051574707,0.05287371940612793,0.05303268432617188,0.055697619476318364,"[0.05573651123046875, 0.05378627014160156, 0.052547489166259766, 0.05245177459716797, 0.05215599822998047, 0.05228995132446289, 0.05222809600830078, 0.05231206512451172, 0.05217200088500976, 0.05221046447753906, 0.05215436935424805, 0.05223014450073242, 0.052405696868896484, 0.052431198120117185, 0.05227542495727539, 0.05232009506225586, 0.052107166290283204, 0.052248256683349606, 0.052697662353515626, 0.05285683059692383, 0.053028865814208986, 0.052767807006835935, 0.05258745574951172, 0.052588542938232424, 0.0523612174987793, 0.05243494415283203, 0.05296681594848633, 0.0525376968383789, 0.052408287048339844, 0.052420894622802736, 0.05243843078613281, 0.05246214294433594, 0.05233667373657227, 0.05254681777954102, 0.052494144439697264, 0.052456127166748044, 0.05273014450073242, 0.05285647964477539, 0.052762943267822264, 0.053025985717773436, 0.052758815765380856, 0.05286966323852539, 0.052729854583740236, 0.05263673782348633, 0.05244400024414062, 0.05253907012939453, 0.052549983978271486, 0.05242035293579102, 0.05241273498535156, 0.052459358215332035, 0.052396190643310546, 0.052663360595703125, 0.0525464973449707, 0.05249548721313477, 0.05247884750366211, 0.05261635208129883, 0.052892513275146484, 0.052961280822753906, 0.052746238708496096, 0.052717601776123044, 0.052699104309082034, 0.052991329193115236, 0.052822689056396484, 0.055602401733398435, 0.05331027221679688, 0.052400096893310544, 0.052448448181152345, 0.052179393768310545, 0.05227763366699219, 0.052133216857910156, 0.05239673614501953, 0.052364288330078126, 0.052329345703125, 0.05227939224243164, 0.05226496124267578, 0.05229363250732422, 0.05237097549438476, 0.0522982063293457, 0.052295169830322265, 0.05217705535888672, 0.052349281311035153, 0.052588417053222654, 0.05304742431640625, 0.05303910446166992, 0.0526929931640625, 0.05263552093505859, 0.05261939239501953, 0.052496383666992184, 0.052572158813476565, 0.05243084716796875, 0.05264691162109375, 0.05256499099731445, 0.052450942993164065, 0.05243328094482422, 0.05230172729492188, 0.05231625747680664, 0.052367359161376956, 0.05244518280029297, 0.052506622314453126, 0.052572158813476565, 0.052579872131347655, 0.05274262237548828, 0.052688064575195315, 0.05274297714233398, 0.05274323272705078, 0.05257721710205078, 0.05268889617919922, 0.05256806564331055, 0.05267407989501953, 0.0525255355834961, 0.052555072784423826, 0.05256230545043945, 0.05254790496826172, 0.05239577484130859, 0.05233689498901367, 0.052405918121337894, 0.05276019287109375, 0.052623680114746094, 0.05270774459838867, 0.05267251205444336, 0.052785057067871094, 0.053004383087158206, 0.05287046432495117, 0.05280428695678711, 0.05270937728881836, 0.05271481704711914, 0.0559659194946289, 0.05348108673095703, 0.052412799835205075, 0.05243494415283203, 0.05224179077148437, 0.052462207794189454, 0.05224038314819336, 0.05238982391357422, 0.052426815032958984, 0.0525596809387207, 0.05228153610229492, 0.052373504638671874, 0.052268447875976565, 0.052324832916259764, 0.052160415649414066, 0.052500705718994144, 0.052193279266357424, 0.052405567169189454, 0.05271532821655273, 0.05309254455566406, 0.052851390838623044, 0.05269504165649414, 0.052498432159423826, 0.05270054244995117, 0.052459583282470704, 0.05240275192260742, 0.05228953552246094, 0.052410369873046876, 0.0524653434753418, 0.05238595199584961, 0.05245756912231445, 0.05238790512084961, 0.05250576019287109, 0.05257712173461914, 0.05243289566040039, 0.052555774688720705, 0.05248409652709961, 0.05262745666503906, 0.052802974700927735, 0.052720222473144535, 0.05277219009399414, 0.052642463684082035, 0.05248614501953125, 0.05259468841552734, 0.052441089630126954, 0.05246361541748047, 0.052529151916503904, 0.05268380737304688, 0.052646881103515626, 0.05273155212402344, 0.05259203338623047, 0.05258732986450195, 0.05261734390258789, 0.05275350570678711, 0.05269187164306641, 0.052657470703125, 0.052623809814453124, 0.05279308700561523, 0.0529536018371582, 0.052948158264160154, 0.052671295166015625, 0.05278515243530273, 0.052798591613769534, 0.055994911193847655, 0.05351417541503906, 0.052536670684814456, 0.052652767181396484, 0.05252710342407227, 0.05252256011962891, 0.052297279357910155, 0.05248294448852539, 0.05237116622924805, 0.05254582214355469, 0.052383743286132815, 0.05276409530639648, 0.05262188720703125, 0.05261894226074219, 0.05244707107543945, 0.052486560821533204, 0.0523240966796875, 0.052627616882324216, 0.05280767822265625, 0.05275996780395508, 0.05266220855712891, 0.05261395263671875, 0.052460895538330075, 0.05251139068603516, 0.0524062385559082, 0.05252243041992188, 0.05236899185180664, 0.05257033538818359, 0.05264054489135742, 0.05255731201171875, 0.05286323165893555, 0.0524409294128418, 0.05252342224121094, 0.05254553604125976, 0.052441089630126954, 0.05271270370483398, 0.052617984771728514, 0.05271347045898438, 0.052762622833251956, 0.05265769577026367, 0.05273443222045898, 0.05261248016357422, 0.05259452819824219, 0.052689697265625, 0.052647937774658204, 0.0525926399230957, 0.052547103881835935, 0.052572639465332034, 0.05256806564331055, 0.05250457763671875, 0.05247523117065429, 0.052421279907226566, 0.052496448516845706, 0.05266835021972656, 0.05259238433837891, 0.05272601699829101, 0.052620609283447264, 0.052814529418945315, 0.052811775207519535, 0.05273395156860351, 0.05281296157836914, 0.05290864181518555, 0.05282022476196289, 0.055807361602783205, 0.05340998458862305, 0.052566814422607425, 0.05260083389282227, 0.052316158294677735, 0.05247180938720703, 0.05219728088378906, 0.05234492874145508, 0.052359169006347656, 0.05240627288818359, 0.05235478210449219, 0.05233692932128906, 0.052391937255859375, 0.05251891326904297, 0.052348926544189454, 0.05229568099975586, 0.0522158088684082, 0.05244851303100586, 0.0529169921875, 0.05290367889404297, 0.05267670440673828, 0.05256617736816406, 0.05243289566040039, 0.05252022552490234, 0.05242726516723633, 0.05243721771240235, 0.05231126403808594, 0.05233891296386719, 0.052376129150390624, 0.05243686294555664, 0.05246131134033203, 0.05237728118896484, 0.052355583190917966, 0.05251500701904297, 0.052418048858642576, 0.05247760009765625, 0.05245014572143555, 0.05276860809326172, 0.052940704345703124, 0.05279894256591797, 0.05261155319213867, 0.052592960357666016, 0.05269504165649414, 0.05271756744384765, 0.05271347045898438, 0.05258649444580078, 0.05248316955566406, 0.05255670547485351, 0.05256332778930664, 0.05278579330444336, 0.05307187271118164, 0.052531070709228515, 0.052416641235351565, 0.05252828979492188, 0.0525750732421875, 0.052736000061035154, 0.052743392944335936, 0.05284534454345703, 0.052829345703125, 0.052884319305419925, 0.05286707305908203, 0.05289984130859375, 0.05279129409790039, 0.055777057647705075, 0.053653472900390624, 0.052671199798583986, 0.052502464294433594, 0.052273502349853514, 0.05253078460693359, 0.05221007919311523, 0.05229676818847656, 0.05232940673828125, 0.052354400634765624, 0.052314273834228514, 0.052414718627929686, 0.05230412673950195, 0.0523612174987793, 0.052387039184570314, 0.052329246520996096, 0.05225376129150391, 0.05269190216064453, 0.052621311187744144, 0.05286502456665039, 0.05281792068481445, 0.052703231811523435, 0.052615169525146485, 0.052768768310546874, 0.05253459167480469, 0.05243769454956055, 0.05243068695068359, 0.052481697082519534, 0.052496288299560545, 0.05242736053466797, 0.05243830490112305, 0.052331230163574216, 0.05240118408203125, 0.05254243087768555, 0.05239807891845703, 0.05257622528076172, 0.052440704345703124, 0.05272198486328125, 0.05271356964111328, 0.052759998321533205, 0.05271814346313477, 0.052604927062988284, 0.05250252914428711, 0.052587806701660154, 0.05248483276367188, 0.05264998245239258, 0.05251379013061523, 0.05249507141113281, 0.05261135864257813, 0.05260406494140625, 0.05254025650024414, 0.05241190338134766, 0.05250646209716797, 0.05265260696411133, 0.05257561492919922, 0.052636287689208985, 0.05258659362792969, 0.05284966278076172, 0.0529090576171875, 0.05284864044189453, 0.052731136322021484, 0.05273062515258789, 0.05277459335327148, 0.05639785766601563, 0.05381907272338867, 0.05262137603759766, 0.052566238403320316, 0.05227926254272461, 0.05244521713256836, 0.052235870361328124, 0.052472225189208986, 0.05263974380493164, 0.05259648132324219, 0.052392192840576175, 0.052389537811279294, 0.05234518432617188, 0.05247932815551758, 0.052337310791015626, 0.05232217788696289, 0.05236953735351563, 0.05259193420410156, 0.053047489166259766, 0.05298566436767578, 0.05285283279418945, 0.05282585525512695, 0.052603744506835935, 0.05263926315307617, 0.052461246490478515, 0.05246569442749023, 0.052380416870117186, 0.05245280075073242, 0.05240585708618164, 0.052503166198730467, 0.052434558868408206, 0.05245132827758789, 0.05238150405883789, 0.05250892639160156, 0.052386463165283205, 0.05260060882568359, 0.05254361724853516, 0.05269308853149414, 0.05295084762573242, 0.052873409271240235, 0.05272361755371094, 0.0526328010559082, 0.052661121368408205, 0.052774368286132814, 0.05253174209594726, 0.052729854583740236, 0.05260083389282227, 0.052506622314453126, 0.052621311187744144, 0.052703231811523435, 0.052539134979248045, 0.05253555297851562, 0.05252022552490234, 0.0527305908203125, 0.05267251205444336, 0.05286092758178711, 0.05278700637817383, 0.05281744003295898, 0.05290415954589844, 0.05282860946655273, 0.05285836791992188, 0.05304115295410156, 0.05284233474731445, 0.05576950454711914, 0.05347971343994141, 0.052762016296386716, 0.05253823852539063, 0.05240342330932617, 0.05248284912109375, 0.05249635314941406, 0.05252092742919922, 0.05248006439208985, 0.0526110725402832, 0.05243289566040039, 0.052514816284179686, 0.05237750244140625, 0.052367454528808595, 0.052313343048095706, 0.052404991149902345, 0.05257558441162109, 0.0525953598022461, 0.05283567810058594, 0.052873119354248044, 0.052831008911132814, 0.052819393157958985, 0.052741950988769534, 0.05261590576171875, 0.052400127410888675, 0.05249001693725586, 0.05260031890869141, 0.05245993423461914, 0.052453697204589846, 0.052459232330322264, 0.052519199371337894, 0.052544513702392576, 0.05264281463623047, 0.05250252914428711, 0.05245888137817383, 0.05254777526855469, 0.05286547088623047, 0.05280771255493164, 0.05285193634033203, 0.05268892669677734, 0.05270806503295898, 0.05294403076171875, 0.05276924896240234, 0.052619647979736325, 0.05253494262695312, 0.0526196174621582, 0.052614559173583986, 0.052521087646484374, 0.052576671600341796, 0.05250259017944336, 0.05249436950683594, 0.052703041076660156, 0.052502368927001955, 0.0525579833984375, 0.05260073471069336, 0.052784481048583985, 0.052755359649658204, 0.052811328887939456, 0.05271392059326172, 0.05306367874145508, 0.053012191772460936, 0.05296771240234375, 0.05275852966308594, 0.05530476760864258, 0.053395454406738284, 0.05252025604248047, 0.05239468765258789, 0.05238070297241211, 0.05249327850341797, 0.052385726928710935, 0.052418624877929684, 0.052367359161376956, 0.05252710342407227, 0.052383743286132815, 0.052612895965576174, 0.052343006134033206, 0.052506622314453126, 0.05244851303100586, 0.052419326782226563, 0.052365310668945314, 0.05257625579833984, 0.05290598297119141, 0.053286945343017575, 0.05311894226074219, 0.05266361618041992, 0.05262201690673828, 0.052668319702148435, 0.05253539276123047, 0.05249951934814453, 0.05254646301269531, 0.052491649627685544, 0.05242537689208984, 0.05266022491455078, 0.052490238189697266, 0.05254963302612305, 0.05245849609375, 0.052499454498291014, 0.052515998840332034, 0.05255395126342773, 0.052703872680664066, 0.05279257583618164, 0.0529186897277832, 0.0530230712890625, 0.052741470336914065, 0.05270783996582031, 0.05259689712524414, 0.0526827507019043, 0.05263974380493164, 0.05258406448364258, 0.05250086212158203, 0.052442752838134765, 0.05250870513916016, 0.05277526473999023, 0.05259001541137695, 0.05267494583129883, 0.05257209777832031, 0.05270256042480469, 0.05278780746459961, 0.05277849578857422, 0.05276140975952148, 0.0530118408203125, 0.052865665435791014, 0.05295439910888672, 0.05277711868286133, 0.052755008697509764, 0.05288902282714844, 0.05545574569702148, 0.053321727752685545, 0.05238988876342773, 0.05242265701293945, 0.05246156692504883, 0.05260246276855469, 0.052338977813720704, 0.052487998962402346, 0.05244908905029297, 0.05258291244506836, 0.05241241455078125, 0.05255353546142578, 0.052394081115722656, 0.052450912475585934, 0.05248051071166992, 0.05242265701293945, 0.052359169006347656, 0.05251036834716797, 0.05285718536376953, 0.05316185760498047, 0.05280374526977539, 0.0526888313293457, 0.0524892463684082, 0.052462593078613284, 0.05246156692504883, 0.05247331237792969, 0.05249871826171875, 0.052471710205078126, 0.052531551361083985, 0.05264179229736328, 0.05264384078979492, 0.05246566390991211, 0.05237696075439453, 0.05245606231689453, 0.052504352569580075, 0.05250889587402344, 0.05255478286743164, 0.05268988800048828, 0.052908031463623044, 0.053049343109130856, 0.052791103363037106, 0.0527525749206543, 0.05275043106079102, 0.05280527877807617, 0.05267686462402344, 0.05264998245239258, 0.052670463562011716, 0.05270713424682617, 0.05251910400390625, 0.052582401275634766, 0.05244927978515625, 0.05260083389282227, 0.05267660903930664, 0.052615169525146485, 0.05271756744384765, 0.052744129180908206, 0.05280080032348633, 0.05303580856323242, 0.05287651062011719, 0.05299280166625977, 0.05277027130126953, 0.052791839599609376, 0.05277811050415039]",tokens/s,18.994658958780605,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 689, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 356, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 64.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 162545 has 14.71 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 85.33 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 170957 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 170586 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,883.310592,12530.352128,0.0,12127.830016,12122.08896,s,1,7.24532958984375,7.24532958984375,0.0,7.24532958984375,7.24532958984375,7.24532958984375,7.24532958984375,[7.24532958984375],,kWh,7.867758762495215e-06,8.608510776885102e-07,4.873615010003718e-06,1.3602224850187443e-05,,MB,1210.175488,12702.318592,0.0,12289.31072,12248.5888,s,10,11.011118041992187,1.1011118041992187,0.002527680214826469,1.1015615844726563,1.1044215698242186,1.1045426940917968,1.1046395935058595,"[1.0956488037109375, 1.1018443603515624, 1.099454833984375, 1.1017294921875, 1.1010318603515625, 1.098731689453125, 1.102224853515625, 1.1013936767578125, 1.1043946533203124, 1.104663818359375]",tokens/s,232.49228554603997,kWh,3.224677841291547e-05,3.5550727494187885e-06,2.1305239266399682e-05,5.710709042873394e-05,tokens/kWh,4482805.866628276,MB,1258.647552,12710.7072,0.0,12297.699328,12248.59136,s,10,33.670837890625,3.3670837890625003,0.0018022367897920194,3.367177734375,3.3694119873046873,3.369713562011719,3.369954821777344,"[3.364392333984375, 3.36546875, 3.3651826171875, 3.3680283203125, 3.36572119140625, 3.366473876953125, 3.37001513671875, 3.3683291015625, 3.369344970703125, 3.367881591796875]",tokens/s,18.71055309186147,kWh,9.833783202541782e-05,1.0848679123737598e-05,6.537002451820033e-05,0.00017455653566735571,tokens/kWh,360914.5871229719,,s,630,33.668360080719,0.05344184139796666,0.0002694710216109061,0.053440513610839846,0.05368627166748047,0.053757537841796876,0.054831900367736816,"[0.05479875183105469, 0.0534571533203125, 0.05297891235351562, 0.052963871002197266, 0.05293644714355469, 0.05304345703125, 0.05306537628173828, 0.05295868682861328, 0.053005184173583984, 0.05322348785400391, 0.05309798431396484, 0.05316447830200195, 0.05325823974609375, 0.05331148910522461, 0.05323471832275391, 0.05319148635864258, 0.05317574310302734, 0.05339542388916016, 0.05345536041259766, 0.0534653434753418, 0.05331760025024414, 0.05339753723144531, 0.05324390411376953, 0.05322246551513672, 0.05314761734008789, 0.05335855865478516, 0.05316211318969727, 0.053332321166992186, 0.05332345581054688, 0.0532151985168457, 0.0533098258972168, 0.0533939208984375, 0.05326847839355469, 0.05355014419555664, 0.053478336334228514, 0.0534466552734375, 0.05378803253173828, 0.0536129264831543, 0.053540481567382815, 0.05351462554931641, 0.05337446212768555, 0.053499839782714845, 0.05339740753173828, 0.05359523010253906, 0.05348947143554687, 0.05346713638305664, 0.053501953125, 0.053467201232910155, 0.05361248016357422, 0.053704673767089844, 0.0535327033996582, 0.053609664916992185, 0.05338995361328125, 0.05354515075683594, 0.05363449478149414, 0.053537246704101565, 0.0536104621887207, 0.05369561767578125, 0.05355212783813477, 0.053579776763916016, 0.05345820617675781, 0.05352521514892578, 0.05357567977905273, 0.05468924713134766, 0.053676158905029296, 0.05329743957519531, 0.05310873413085938, 0.05299363327026367, 0.05312089538574219, 0.05298556900024414, 0.053062080383300785, 0.052956768035888675, 0.05324265670776367, 0.053174270629882815, 0.05316540908813477, 0.05327529525756836, 0.053365825653076175, 0.05330419158935547, 0.05326444625854492, 0.05326553726196289, 0.05348543930053711, 0.053400577545166014, 0.053728801727294925, 0.053479904174804686, 0.05357993698120117, 0.05321459197998047, 0.053172737121582034, 0.053228897094726564, 0.05320473480224609, 0.053182689666748044, 0.05338550567626953, 0.05324534225463867, 0.05334012985229492, 0.05344972610473633, 0.05346464157104492, 0.05333651351928711, 0.05354086303710937, 0.053276161193847656, 0.05342259216308594, 0.053456127166748045, 0.053598976135253905, 0.053602302551269534, 0.053577632904052735, 0.05328879928588867, 0.053346561431884765, 0.05331763076782227, 0.05337497711181641, 0.05332377624511719, 0.05335593414306641, 0.053363296508789064, 0.05345062255859375, 0.05351968002319336, 0.053614433288574216, 0.05358281707763672, 0.05353881454467774, 0.053467296600341795, 0.05357350540161133, 0.05369772720336914, 0.05374166488647461, 0.05376015853881836, 0.05374211120605469, 0.05360540771484375, 0.05367827224731445, 0.05349385452270508, 0.053529281616210934, 0.05352758407592773, 0.05484543991088867, 0.05349481582641601, 0.05313840103149414, 0.05312220764160156, 0.0528884162902832, 0.053087711334228516, 0.053062175750732424, 0.0529879035949707, 0.05309952163696289, 0.05320127868652344, 0.05315852737426758, 0.05322751998901367, 0.053316864013671875, 0.05336959838867188, 0.05322751998901367, 0.05324390411376953, 0.0532184944152832, 0.0533430404663086, 0.05348726272583008, 0.05350230407714844, 0.05334019088745117, 0.053399040222167966, 0.05326691055297852, 0.05312492752075195, 0.05317855834960938, 0.05331763076782227, 0.05332710266113281, 0.05335036849975586, 0.053277473449707034, 0.053367969512939456, 0.053427040100097654, 0.05350140762329102, 0.053475521087646485, 0.0535002555847168, 0.053471233367919924, 0.05352239990234375, 0.053467105865478516, 0.05379059219360351, 0.05359203338623047, 0.053563617706298826, 0.05335039901733398, 0.05335039901733398, 0.05332377624511719, 0.05335859298706055, 0.05340553665161133, 0.05348108673095703, 0.05331635284423828, 0.05343209457397461, 0.05344879913330078, 0.05355510330200195, 0.0534917106628418, 0.05377020645141602, 0.05347126388549805, 0.05357894515991211, 0.05359408187866211, 0.0536866569519043, 0.05364579010009766, 0.053800704956054685, 0.05357136154174805, 0.05351180648803711, 0.053469696044921876, 0.05348387145996094, 0.05356447982788086, 0.054870494842529295, 0.05343827056884766, 0.05313350296020508, 0.05304729461669922, 0.052940608978271485, 0.05297769546508789, 0.05317033767700195, 0.05327667236328125, 0.053250049591064455, 0.0532889289855957, 0.05310879898071289, 0.05325411224365234, 0.05317631912231445, 0.05322761535644531, 0.05323129653930664, 0.05334572982788086, 0.05330361557006836, 0.053401313781738284, 0.05351500701904297, 0.05351833724975586, 0.053286911010742184, 0.053379070281982424, 0.05330553436279297, 0.053268287658691404, 0.05314096069335938, 0.05347382354736328, 0.053448543548583985, 0.053499393463134766, 0.053440513610839846, 0.05357839965820312, 0.05344255828857422, 0.0533520622253418, 0.05339136123657227, 0.0534653434753418, 0.05362700653076172, 0.05391500854492187, 0.053650047302246096, 0.05362073516845703, 0.05357075119018555, 0.05356227111816406, 0.05375990295410156, 0.05370675277709961, 0.05340979385375977, 0.05342617416381836, 0.05344460678100586, 0.05357139205932617, 0.05347331237792969, 0.05364547348022461, 0.053596160888671876, 0.05361868667602539, 0.053343582153320315, 0.053464832305908205, 0.05350083160400391, 0.05362428665161133, 0.05362128067016601, 0.05368627166748047, 0.05355241775512695, 0.053557441711425784, 0.053557441711425784, 0.053733726501464844, 0.053585567474365235, 0.05360166549682617, 0.05340873718261719, 0.054906494140625, 0.053453441619873046, 0.05312102508544922, 0.05299388885498047, 0.05299420928955078, 0.0531701774597168, 0.052948543548583984, 0.053098175048828126, 0.05322134399414063, 0.05325494384765625, 0.053177345275878904, 0.05332598495483398, 0.05330963134765625, 0.05325481414794922, 0.05326847839355469, 0.05332931137084961, 0.05320355224609375, 0.05321926498413086, 0.05336201477050781, 0.053516574859619144, 0.053352161407470705, 0.053405567169189455, 0.0532757453918457, 0.05312076950073242, 0.053212257385253904, 0.053308319091796875, 0.05331353759765625, 0.05372867202758789, 0.0532911376953125, 0.05341436767578125, 0.05340774536132813, 0.053325630187988284, 0.053391551971435545, 0.05347897720336914, 0.05325868988037109, 0.05341548919677734, 0.05348400115966797, 0.05352444839477539, 0.053438209533691404, 0.05356700897216797, 0.05361532974243164, 0.05360025787353516, 0.053395103454589844, 0.05332796859741211, 0.05343052673339844, 0.05366563034057617, 0.05358796691894531, 0.05351808166503906, 0.053367198944091795, 0.05349587249755859, 0.05352163314819336, 0.05365139389038086, 0.05342649459838867, 0.05354748916625977, 0.053585311889648435, 0.05363119888305664, 0.05357196807861328, 0.053596160888671876, 0.05357363128662109, 0.05365331268310547, 0.05361481475830078, 0.05363299179077148, 0.053610496520996094, 0.055053470611572265, 0.05348233413696289, 0.0531168327331543, 0.053026912689208984, 0.05303411102294922, 0.05305836868286133, 0.05302908706665039, 0.05300979232788086, 0.05319644927978515, 0.05330207824707031, 0.05325619125366211, 0.05335039901733398, 0.053308990478515624, 0.05338518524169922, 0.05328057479858399, 0.05314179229736328, 0.05301081466674805, 0.05335244750976562, 0.05346303939819336, 0.053634624481201175, 0.05349625778198242, 0.053432193756103516, 0.053235294342041016, 0.05326054382324219, 0.053164192199707035, 0.05347052764892578, 0.053222206115722655, 0.05340364837646484, 0.053332191467285156, 0.053546272277832034, 0.053397472381591794, 0.05343695831298828, 0.0534090576171875, 0.053381439208984374, 0.05326095962524414, 0.05374745559692383, 0.05346918487548828, 0.05365555191040039, 0.053524192810058595, 0.05356163024902344, 0.053500064849853514, 0.05353200149536133, 0.0534277458190918, 0.053473857879638674, 0.05324803161621094, 0.05341427230834961, 0.053550079345703126, 0.05348575973510742, 0.05350656127929688, 0.053501953125, 0.05348720169067383, 0.0535682258605957, 0.05345203018188477, 0.05355120086669922, 0.05369475173950195, 0.053595680236816406, 0.053754177093505856, 0.05375644683837891, 0.053651454925537106, 0.05367206573486328, 0.05354086303710937, 0.05349158477783203, 0.05346847915649414, 0.05484553527832031, 0.05357567977905273, 0.05328646469116211, 0.05322182464599609, 0.05308620834350586, 0.05303286361694336, 0.05297571182250976, 0.05313740921020508, 0.053026817321777345, 0.05333935928344727, 0.05331024169921875, 0.053282081604003904, 0.05327088165283203, 0.05341785430908203, 0.053297664642333986, 0.05319379043579102, 0.0530134391784668, 0.05334630584716797, 0.05348761749267578, 0.05342563247680664, 0.05333647918701172, 0.05340172958374023, 0.05336812973022461, 0.05322016143798828, 0.053176193237304686, 0.05323510360717774, 0.05351484680175781, 0.053354496002197264, 0.05339750289916992, 0.05353580856323242, 0.05328134536743164, 0.05331542587280273, 0.05339395141601563, 0.05345683288574219, 0.05340371322631836, 0.05379072189331055, 0.05366908645629883, 0.053803489685058596, 0.05359443283081055, 0.053694465637207034, 0.05368627166748047, 0.05356550216674805, 0.053413185119628906, 0.0535865592956543, 0.05340313720703125, 0.053561569213867184, 0.05361897659301758, 0.053776161193847656, 0.05358371353149414, 0.05371049499511719, 0.05366188812255859, 0.05367763137817383, 0.05359715270996094, 0.053716510772705076, 0.053758430480957034, 0.05365683364868164, 0.05390412902832031, 0.053766143798828124, 0.05369347381591797, 0.05391827011108399, 0.05359862518310547, 0.053667839050292966, 0.05373132705688476, 0.054949825286865234, 0.0536371841430664, 0.05326847839355469, 0.05329919815063477, 0.05305548858642578, 0.05313740921020508, 0.05297724914550781, 0.05303244781494141, 0.05307072067260742, 0.05318368148803711, 0.05326448059082031, 0.05334640121459961, 0.05324176025390625, 0.05351295852661133, 0.05342595291137695, 0.05323798370361328, 0.05316966247558594, 0.05349427032470703, 0.05345049667358399, 0.05356691360473633, 0.053473857879638674, 0.053431808471679686, 0.05332064056396484, 0.05326579284667969, 0.053144001007080076, 0.053267616271972656, 0.053205856323242186, 0.05334220886230469, 0.05333196640014649, 0.053440513610839846, 0.05346214294433594, 0.05337382507324219, 0.05341302490234375, 0.053489631652832034, 0.05342092895507813, 0.05356531143188477, 0.05345683288574219, 0.05361888122558594, 0.05348697662353516, 0.053508735656738284, 0.05345894241333008, 0.05359939193725586, 0.05348188781738281, 0.05337724685668945, 0.05325436782836914, 0.05359820938110352, 0.05333382415771484, 0.05356118392944336, 0.05356959915161133, 0.053569408416748045, 0.05347779083251953, 0.05367603302001953, 0.0534653434753418, 0.053702400207519534, 0.05358937454223633, 0.054086273193359374, 0.05365760040283203, 0.05364112091064453, 0.05355875015258789, 0.053865089416503906, 0.05373747253417969, 0.05389644622802734, 0.05357235336303711, 0.05464656066894531, 0.05343027114868164, 0.053109310150146485, 0.05309190368652344, 0.05306355285644531, 0.05308662414550781, 0.05301241683959961, 0.0532217903137207, 0.05317558288574219, 0.053233665466308595, 0.053269153594970704, 0.0534015998840332, 0.05317631912231445, 0.05344992065429687, 0.05332051086425781, 0.05320499038696289, 0.053184513092041016, 0.0535470085144043, 0.053430110931396484, 0.053418144226074216, 0.05337497711181641, 0.053333118438720704, 0.053457790374755856, 0.053362686157226565, 0.0533831672668457, 0.053429759979248044, 0.053473472595214844, 0.053544960021972655, 0.053438209533691404, 0.053479999542236326, 0.05340153503417969, 0.05351347351074219, 0.05344134521484375, 0.05354492950439453, 0.053401630401611326, 0.05375590515136719, 0.053610496520996094, 0.053585918426513675, 0.05347532653808594, 0.053624992370605466, 0.05341129684448242, 0.05349980926513672, 0.05329910278320313, 0.053536544799804686, 0.05368707275390625, 0.05350515365600586, 0.05349587249755859, 0.05352531051635742, 0.05333734512329102, 0.05352278518676758, 0.05359836959838867, 0.05362918472290039, 0.05365756988525391, 0.053714656829833986, 0.05379836654663086, 0.053737663269042966, 0.05372556686401367, 0.053895454406738284, 0.05373952102661133, 0.0539087028503418, 0.053543071746826175, 0.053609088897705076, 0.053602302551269534, 0.05500819015502929, 0.05353062438964844, 0.05329100799560547, 0.05309798431396484, 0.05301094436645508, 0.0530882568359375, 0.05309235382080078, 0.05319216156005859, 0.053193248748779294, 0.05343420791625977, 0.053237918853759766, 0.053222496032714846, 0.05343494415283203, 0.053358913421630856, 0.05322713470458985, 0.053233726501464844, 0.05312137603759766, 0.053321857452392575, 0.05351615905761719, 0.05346060943603516, 0.0532913932800293, 0.05348531341552734, 0.05335855865478516, 0.05332774353027344, 0.05323219299316406, 0.05319366455078125, 0.05327110290527344, 0.053545215606689456, 0.053399646759033206, 0.053428256988525394, 0.05359203338623047, 0.05341532897949219, 0.05343088150024414, 0.05333401489257812, 0.053349376678466794, 0.05354393768310547, 0.05345280075073242, 0.05350400161743164, 0.05344255828857422, 0.05354617691040039, 0.05352297592163086, 0.053581886291503907, 0.05335577774047852, 0.053391326904296876, 0.05349603271484375, 0.05360886383056641, 0.053674270629882816, 0.053499744415283206, 0.05348575973510742, 0.05354217529296875, 0.053406494140625, 0.053669631958007814, 0.05352444839477539, 0.053587646484375, 0.05384457778930664, 0.05369241714477539, 0.053626880645751954, 0.05366719818115234, 0.05359654235839844, 0.053782176971435544, 0.053693023681640625, 0.053689952850341796, 0.05346953582763672]",tokens/s,18.711930087761676,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1189, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1001, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 734, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 556, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1116, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 902, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 691, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 286, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 171312 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 891, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 823, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 383, in __init__ self.fc1 = nn.Linear(self.embed_dim, config.ffn_dim, bias=config.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.27 GiB. GPU 0 has a total capacity of 14.74 GiB of which 172.12 MiB is free. Process 125710 has 14.57 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 2.28 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 718, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 414.12 MiB is free. Process 192536 has 14.33 GiB memory in use. Of the allocated memory 14.22 GiB is allocated by PyTorch, and 1.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,742.989824,11799.560192,0.0,11404.312576,11388.314624,s,1,7.22953173828125,7.22953173828125,0.0,7.22953173828125,7.22953173828125,7.22953173828125,7.22953173828125,[7.22953173828125],,kWh,6.060062895841157e-06,6.604853918541069e-07,3.1891692180013864e-06,9.90971750569665e-06,,MB,1045.102592,11812.143104,0.0,11406.409728,11107.92192,s,10,4.078596130371094,0.4078596130371094,0.006914636060580647,0.4093941650390625,0.41454249267578125,0.4161641296386719,0.4174614392089844,"[0.3908428344726563, 0.40325701904296873, 0.4048968811035156, 0.40763894653320315, 0.4104405212402344, 0.41418212890625, 0.40905242919921875, 0.41076370239257814, 0.40973590087890627, 0.4177857666015625]",tokens/s,627.6669516104005,kWh,1.1848457179999816e-05,1.3057261868365596e-06,7.91846189032011e-06,2.1072645257156487e-05,tokens/kWh,12148451.078445397,MB,1049.894912,11814.240256,0.0,11408.50688,11305.031168,s,10,33.25393603515625,3.325393603515625,0.004680598481783512,3.3244874267578126,3.331663720703125,3.3319961669921874,3.332262124023438,"[3.32085595703125, 3.32172607421875, 3.32499609375, 3.320064453125, 3.3196875, 3.323978759765625, 3.32793017578125, 3.33232861328125, 3.33158984375, 3.330778564453125]",tokens/s,18.94512575395467,kWh,9.727351534833359e-05,1.0730509639033732e-05,6.439720707327956e-05,0.0001724012320606469,tokens/kWh,365426.6228088092,,s,630,33.250572078704806,0.05277868583921402,0.00030674911309846537,0.05275551986694336,0.053081284332275394,0.05319347534179687,0.054124218521118164,"[0.0541383056640625, 0.05286969757080078, 0.05241062545776367, 0.05242035293579102, 0.052539424896240236, 0.052261089324951174, 0.05237136077880859, 0.052418399810791015, 0.05233817672729492, 0.052375518798828125, 0.05235324859619141, 0.052402145385742185, 0.05252556610107422, 0.05238297653198242, 0.05253529739379883, 0.05237820816040039, 0.05243628692626953, 0.052414497375488284, 0.05265177536010742, 0.05280767822265625, 0.05262160110473633, 0.05304336166381836, 0.052717151641845705, 0.052778976440429684, 0.05249491119384766, 0.05253279876708984, 0.052677471160888674, 0.052567424774169924, 0.052643966674804685, 0.052544033050537106, 0.05244662475585937, 0.05290854263305664, 0.052652320861816405, 0.05259030532836914, 0.05252505493164063, 0.052580352783203124, 0.05269852828979492, 0.05272431945800781, 0.053174270629882815, 0.05282179260253906, 0.053018753051757815, 0.052811870574951174, 0.05289372634887695, 0.0526520004272461, 0.05276435089111328, 0.05268259048461914, 0.052684257507324216, 0.052634624481201174, 0.05271756744384765, 0.052739585876464844, 0.05284710311889648, 0.05268889617919922, 0.05295718383789062, 0.05281932830810547, 0.05289843368530273, 0.05272371292114258, 0.05284592056274414, 0.05336131286621094, 0.05306982421875, 0.053190689086914066, 0.052962303161621094, 0.052933120727539064, 0.05286550521850586, 0.054295841217041015, 0.05278310394287109, 0.05225545501708984, 0.05222348785400391, 0.05213471984863281, 0.05276435089111328, 0.05241856002807617, 0.05244518280029297, 0.0523612174987793, 0.05259823989868164, 0.05237583923339844, 0.05227657699584961, 0.05245225524902344, 0.05256185531616211, 0.05246105575561524, 0.05245990371704102, 0.0522856330871582, 0.05280972671508789, 0.05276387023925781, 0.052833057403564455, 0.05261711883544922, 0.05257030487060547, 0.05251478576660156, 0.05252822494506836, 0.052620094299316404, 0.05253500747680664, 0.05258812713623047, 0.05267324829101563, 0.052838401794433595, 0.052598785400390625, 0.05249017715454102, 0.05257353591918945, 0.05297020721435547, 0.05268889617919922, 0.05284044647216797, 0.05280115127563476, 0.05288179016113281, 0.053029022216796874, 0.05291356658935547, 0.052753952026367186, 0.05279212951660156, 0.05269014358520508, 0.052716415405273435, 0.05270233535766602, 0.05283651351928711, 0.05289884948730469, 0.052725440979003904, 0.05300569534301758, 0.052718208312988284, 0.05275033569335937, 0.0526295051574707, 0.05269289779663086, 0.05280547332763672, 0.05272003173828125, 0.05290387344360352, 0.05283190536499023, 0.053141761779785156, 0.05346918487548828, 0.053028865814208986, 0.052942848205566405, 0.053008384704589843, 0.05283375930786133, 0.05295772933959961, 0.0541822738647461, 0.05274176025390625, 0.05234732818603516, 0.052550945281982425, 0.05255356979370117, 0.05251718521118164, 0.05244704055786133, 0.05243376159667969, 0.05239174270629883, 0.05232963180541992, 0.052342655181884766, 0.052356063842773436, 0.052547584533691405, 0.05264169692993164, 0.05256614303588867, 0.052591743469238283, 0.05249110412597656, 0.052542720794677734, 0.05268278503417969, 0.05272003173828125, 0.05251308822631836, 0.052518753051757815, 0.05262556838989258, 0.05255782318115235, 0.05266960144042969, 0.05263446426391601, 0.05271779251098633, 0.052727584838867185, 0.05248006439208985, 0.05255161666870117, 0.0526025276184082, 0.05267695999145508, 0.05294230270385742, 0.052908065795898435, 0.05303123092651367, 0.05357177734375, 0.05297488021850586, 0.052861663818359376, 0.05295539093017578, 0.05288499069213867, 0.053065982818603516, 0.052876960754394534, 0.05283795166015625, 0.05278799819946289, 0.0530882568359375, 0.05285174560546875, 0.05284281539916992, 0.05268764877319336, 0.05266985702514648, 0.05278153610229492, 0.052770782470703125, 0.05302217483520508, 0.05281849670410156, 0.05310025787353516, 0.05289769744873047, 0.05295756912231445, 0.05297151947021484, 0.05296102523803711, 0.052888961791992185, 0.0530645751953125, 0.053065727233886716, 0.052951038360595705, 0.05332992172241211, 0.05417824172973633, 0.05289571380615234, 0.05244675064086914, 0.05241219329833984, 0.052550430297851565, 0.05248604965209961, 0.05226015853881836, 0.05234479904174805, 0.052349056243896484, 0.05243145751953125, 0.05271142578125, 0.05235302352905274, 0.05244480133056641, 0.052440608978271484, 0.05248700714111328, 0.0523691520690918, 0.052289249420166016, 0.05249897766113281, 0.052850368499755856, 0.05290611267089844, 0.052828254699707033, 0.052533344268798826, 0.052478240966796874, 0.05251430511474609, 0.05239318466186523, 0.052429824829101565, 0.05249622344970703, 0.052488449096679685, 0.052442623138427735, 0.052731391906738284, 0.05265296173095703, 0.05241439819335938, 0.052496158599853515, 0.05253763198852539, 0.052545055389404294, 0.05320268630981445, 0.05318729782104492, 0.052910079956054686, 0.052921409606933596, 0.05282297515869141, 0.05267251205444336, 0.05267171096801758, 0.05290063858032226, 0.05263161468505859, 0.052770751953125, 0.05267865753173828, 0.05271551895141602, 0.05280691146850586, 0.05285145568847656, 0.052733535766601565, 0.052744449615478514, 0.05276073455810547, 0.05272576141357422, 0.05313238525390625, 0.052916385650634765, 0.052813888549804684, 0.05304143905639649, 0.052922782897949217, 0.0530882568359375, 0.052909854888916016, 0.05281814575195312, 0.05285273742675781, 0.052891647338867184, 0.054089729309082034, 0.05288547134399414, 0.05237251281738281, 0.05233337783813476, 0.05233478546142578, 0.052393600463867186, 0.05241241455078125, 0.0523823356628418, 0.05242227172851562, 0.05236083221435547, 0.05278908920288086, 0.05237036895751953, 0.052472671508789065, 0.05242902374267578, 0.05275920104980469, 0.052555679321289066, 0.05271356964111328, 0.0526192626953125, 0.05252048110961914, 0.05265251159667969, 0.05267670440673828, 0.05266217422485352, 0.05276163101196289, 0.05257321548461914, 0.052557758331298825, 0.052499584197998043, 0.05245792007446289, 0.05243948745727539, 0.05244927978515625, 0.05246771240234375, 0.0525513916015625, 0.052494625091552734, 0.05247180938720703, 0.05246361541748047, 0.05320028686523438, 0.0529920654296875, 0.052733535766601565, 0.05265708923339844, 0.05290393447875977, 0.052724864959716795, 0.05337948989868164, 0.052705760955810546, 0.05294899368286133, 0.05284864044189453, 0.052872543334960935, 0.052568737030029296, 0.05271347045898438, 0.05271273422241211, 0.05285551834106445, 0.052708576202392575, 0.05266716766357422, 0.05255782318115235, 0.05283356857299805, 0.052711776733398434, 0.053072158813476565, 0.052754528045654295, 0.05274185562133789, 0.05279260635375976, 0.05277312088012695, 0.05285289764404297, 0.05288201522827148, 0.05281587219238281, 0.052967422485351565, 0.05408150482177734, 0.052770591735839846, 0.05258671951293945, 0.05242879867553711, 0.052424705505371094, 0.052391712188720706, 0.05248255920410156, 0.05252243041992188, 0.05233078384399414, 0.05242675018310547, 0.05241424179077148, 0.05258204650878906, 0.0529697265625, 0.05251513671875, 0.05253529739379883, 0.05252710342407227, 0.05259468841552734, 0.052711166381835935, 0.05284889602661133, 0.052830432891845705, 0.05274591827392578, 0.052676288604736325, 0.05271798324584961, 0.05254553604125976, 0.05266783905029297, 0.05261983871459961, 0.05264787292480469, 0.052617279052734375, 0.052623489379882815, 0.05266960144042969, 0.05278793716430664, 0.05263359832763672, 0.05275651168823242, 0.0525700798034668, 0.05270528030395508, 0.052682369232177735, 0.052834686279296876, 0.05288140869140625, 0.05290188980102539, 0.05278307342529297, 0.052817119598388675, 0.05302777481079102, 0.052786815643310545, 0.05273420715332031, 0.05282406234741211, 0.05277084732055664, 0.05295491027832031, 0.052744384765625, 0.053053760528564455, 0.05283808135986328, 0.05270528030395508, 0.05282611083984375, 0.05281792068481445, 0.05300617599487305, 0.05300204849243164, 0.05279369735717773, 0.052999488830566405, 0.05307696151733399, 0.052870880126953124, 0.05287097549438476, 0.053231136322021484, 0.05285520172119141, 0.053017887115478515, 0.05406307220458984, 0.05285583877563477, 0.05234790420532227, 0.052397247314453124, 0.052420894622802736, 0.05233513641357422, 0.05240537643432617, 0.05258329772949219, 0.052569534301757814, 0.05259273529052735, 0.05240812683105469, 0.05234960174560547, 0.05247334289550781, 0.0528983039855957, 0.052566017150878906, 0.05257212829589844, 0.05280361557006836, 0.05270937728881836, 0.05275839996337891, 0.05278937530517578, 0.052746238708496096, 0.05259823989868164, 0.05263824081420899, 0.05264003372192383, 0.052737567901611326, 0.05265340805053711, 0.05283107376098633, 0.052762622833251956, 0.05260489654541016, 0.05267027282714844, 0.052863201141357424, 0.05324579238891602, 0.05301776123046875, 0.052863998413085936, 0.05271756744384765, 0.05290963363647461, 0.05293072128295898, 0.05283663940429688, 0.05303068923950195, 0.052989246368408204, 0.052978591918945314, 0.052899486541748045, 0.05295548629760742, 0.05285472106933594, 0.05299820709228516, 0.053017791748046876, 0.05292319869995117, 0.05290800094604492, 0.052875297546386715, 0.052784961700439455, 0.05288191986083984, 0.052802398681640626, 0.05292486572265625, 0.05304348754882812, 0.05314982223510742, 0.05293011093139648, 0.05305785751342774, 0.053055553436279296, 0.05322348785400391, 0.05311654281616211, 0.05297628784179687, 0.05293641662597656, 0.05308124923706055, 0.05461270523071289, 0.053144832611083985, 0.05265071868896484, 0.05251686477661133, 0.05243417739868164, 0.05247286224365234, 0.052538944244384767, 0.052396190643310546, 0.05261529541015625, 0.05266729736328125, 0.05251379013061523, 0.05374358367919922, 0.052701343536376954, 0.05273788833618164, 0.05269417572021484, 0.05274915313720703, 0.05264777755737305, 0.05265423965454102, 0.05267827224731445, 0.05319510269165039, 0.05292233657836914, 0.05308422470092773, 0.05280767822265625, 0.05284659194946289, 0.05279888153076172, 0.05276732635498047, 0.05276374435424805, 0.052809982299804686, 0.05270595169067383, 0.05268668746948242, 0.052719425201416016, 0.05276079940795898, 0.052754047393798825, 0.05283891296386719, 0.052868640899658204, 0.0528421745300293, 0.05279414367675781, 0.05278617477416992, 0.05300128173828125, 0.05289971160888672, 0.05314156723022461, 0.05289267349243164, 0.05293913650512695, 0.052738494873046875, 0.052961280822753906, 0.0527341423034668, 0.05281788635253906, 0.052799518585205076, 0.052811775207519535, 0.05273011016845703, 0.052813568115234376, 0.05289567947387695, 0.052916385650634765, 0.05315081787109375, 0.05319148635864258, 0.0529837760925293, 0.05310262298583984, 0.05332735824584961, 0.05320755386352539, 0.05337699127197266, 0.053346046447753905, 0.05318070220947266, 0.053106689453125, 0.05466521453857422, 0.05334339141845703, 0.052703102111816405, 0.05247894287109375, 0.05252828979492188, 0.05254377746582031, 0.052410816192626955, 0.052627582550048825, 0.05255168151855469, 0.05253324890136719, 0.052502113342285155, 0.05256233596801758, 0.052587646484375, 0.05255487823486328, 0.052612545013427735, 0.05280185699462891, 0.052482177734375, 0.05268876647949219, 0.052728031158447264, 0.05289539337158203, 0.05286310577392578, 0.05277494430541992, 0.05271708679199219, 0.05267327880859375, 0.052676063537597656, 0.05277030563354492, 0.05288828659057617, 0.052709022521972654, 0.05279792022705078, 0.05270105743408203, 0.05278464126586914, 0.05269903945922851, 0.05286563110351562, 0.052827327728271485, 0.05291228866577148, 0.05285270309448242, 0.05293331146240234, 0.052883712768554685, 0.05298688125610351, 0.0529409294128418, 0.05295798492431641, 0.053319454193115234, 0.05304121780395508, 0.0530063362121582, 0.05297971343994141, 0.05307494354248047, 0.05297840118408203, 0.052834465026855466, 0.05283654403686523, 0.05287519836425781, 0.052983806610107424, 0.052822017669677736, 0.052956768035888675, 0.05294736099243164, 0.053182464599609375, 0.05314787292480469, 0.053106464385986325, 0.05320083236694336, 0.053137248992919925, 0.05315974426269531, 0.05315353775024414, 0.05341865539550781, 0.05304681777954102, 0.05451468658447266, 0.05318656158447266, 0.052654079437255856, 0.05249871826171875, 0.052536991119384764, 0.05242585754394531, 0.05259564971923828, 0.053082015991210936, 0.05249187088012695, 0.052679168701171876, 0.05255708694458008, 0.05245836639404297, 0.05275222396850586, 0.05255478286743164, 0.052591743469238283, 0.05266211318969727, 0.05263359832763672, 0.05268428802490235, 0.05275289535522461, 0.053082111358642575, 0.05294668960571289, 0.05282822418212891, 0.05271897506713867, 0.05253116989135742, 0.05270943832397461, 0.05268339157104492, 0.052842655181884766, 0.05267804718017578, 0.052660831451416014, 0.052719615936279295, 0.05269081497192383, 0.05266998291015625, 0.0528554573059082, 0.05276627349853515, 0.052913887023925785, 0.052947616577148436, 0.05287116622924805, 0.052789249420166016, 0.053037120819091794, 0.05297760009765625, 0.05308160018920898, 0.05293721771240235, 0.052967422485351565, 0.05287936019897461, 0.05300428771972656, 0.052848190307617185, 0.05299244689941406, 0.052754432678222656, 0.052819969177246094, 0.05288905715942383, 0.05279388809204102, 0.05301248168945313, 0.05294079971313476, 0.053032958984375, 0.05303910446166992, 0.05296332931518555, 0.053125118255615236, 0.05315996932983398, 0.05339907073974609, 0.05335254287719727, 0.05317232131958008, 0.05292262268066406, 0.05307148742675781]",tokens/s,18.94704243009041,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-7b/545fd352d38c88a3ba5e3032eb73d64175d5bafb/modeling_falcon.py"", line 843, in __init__ self.transformer = FalconModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-7b/545fd352d38c88a3ba5e3032eb73d64175d5bafb/modeling_falcon.py"", line 650, in __init__ self.h = nn.ModuleList([FalconDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-7b/545fd352d38c88a3ba5e3032eb73d64175d5bafb/modeling_falcon.py"", line 650, in self.h = nn.ModuleList([FalconDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-7b/545fd352d38c88a3ba5e3032eb73d64175d5bafb/modeling_falcon.py"", line 420, in __init__ self.mlp = FalconMLP(config) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-7b/545fd352d38c88a3ba5e3032eb73d64175d5bafb/modeling_falcon.py"", line 403, in __init__ self.dense_h_to_4h = FalconLinear(hidden_size, 4 * hidden_size, bias=config.bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 316.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 186.12 MiB is free. Process 197283 has 14.56 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 41.89 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 289, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 150.12 MiB is free. Process 161063 has 14.59 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.43 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,783.458304,6064.832512,0.0,5662.3104,5660.689408,s,1,7.5540146484375,7.5540146484375,0.0,7.5540146484375,7.5540146484375,7.5540146484375,7.5540146484375,[7.5540146484375],,kWh,6.288850195831704e-06,6.865710087459344e-07,1.9208348699950584e-06,8.896256074572697e-06,,MB,1148.563456,6075.318272,0.0,5662.3104,5460.6848,s,10,1.9794697113037112,0.19794697113037113,0.005175534072795056,0.19908808135986328,0.20212606658935547,0.20229346389770508,0.20242738174438477,"[0.18375747680664062, 0.20027197265625, 0.19694972229003907, 0.19894146728515624, 0.1992346954345703, 0.1979303436279297, 0.1959122314453125, 0.20192207336425783, 0.2020888671875, 0.2024608612060547]",tokens/s,1293.275661345655,kWh,5.677938651762541e-06,6.26173578277429e-07,3.7710072903079502e-06,1.0075119520347921e-05,tokens/kWh,25409127.850342326,MB,1160.187904,6077.415424,0.0,5664.407552,5509.80096,s,10,15.122183959960937,1.5122183959960938,0.0013118388423167175,1.5124496459960937,1.5136620727539063,1.513832745361328,1.5139692834472658,"[1.5104246826171874, 1.5119986572265625, 1.510499755859375, 1.5103450927734374, 1.5121942138671876, 1.5136241455078125, 1.5128984375, 1.513490478515625, 1.512705078125, 1.51400341796875]",tokens/s,41.66064912766921,kWh,4.427695994906644e-05,4.883327453031171e-06,2.944385261488995e-05,7.860414001698756e-05,tokens/kWh,801484.501788134,,s,630,15.11980140495301,0.02399968476976667,0.00021080949495601273,0.02396511936187744,0.02414363822937012,0.02419849920272827,0.02519167266845703,"[0.024957279205322265, 0.024383487701416014, 0.02407606315612793, 0.023881952285766603, 0.023762943267822266, 0.023770975112915038, 0.023765151977539062, 0.023786655426025392, 0.02375356864929199, 0.023773183822631837, 0.023785472869873047, 0.023795488357543946, 0.023808223724365234, 0.023781600952148436, 0.023776863098144533, 0.023772607803344725, 0.02383148765563965, 0.023827327728271484, 0.023822431564331056, 0.023828319549560547, 0.023861408233642578, 0.02381110382080078, 0.02382534408569336, 0.023829376220703125, 0.023846912384033202, 0.02388172721862793, 0.023856895446777344, 0.02393328094482422, 0.023942943572998046, 0.024015039443969727, 0.023969440460205077, 0.023931167602539063, 0.02388172721862793, 0.02387977600097656, 0.023928735733032228, 0.02391196823120117, 0.023914335250854492, 0.02391891288757324, 0.023996736526489256, 0.024055871963500976, 0.024111040115356447, 0.02410495948791504, 0.02406787109375, 0.02404118347167969, 0.024056480407714843, 0.0240447998046875, 0.0242325439453125, 0.024076480865478516, 0.02411296081542969, 0.02411315155029297, 0.02410905647277832, 0.024063776016235352, 0.024075519561767577, 0.024109216690063478, 0.02417951965332031, 0.02409187126159668, 0.02410313606262207, 0.024046144485473632, 0.0240513916015625, 0.024027456283569337, 0.02408243179321289, 0.024066047668457033, 0.024082527160644532, 0.025179744720458985, 0.024447519302368163, 0.024092927932739257, 0.023876415252685548, 0.02380076789855957, 0.023846015930175782, 0.023802751541137694, 0.023758848190307616, 0.023783647537231445, 0.023770912170410156, 0.023779232025146483, 0.02379507255554199, 0.02379439926147461, 0.023809471130371095, 0.023822336196899413, 0.023796287536621094, 0.023828479766845705, 0.023825536727905272, 0.023847808837890627, 0.02386124801635742, 0.02386319923400879, 0.023881824493408203, 0.02392790412902832, 0.02397875213623047, 0.023998624801635744, 0.023909664154052733, 0.023891008377075196, 0.023849760055541992, 0.023892704010009765, 0.02393724822998047, 0.023897663116455078, 0.023907968521118164, 0.023939296722412108, 0.023900320053100586, 0.02411952018737793, 0.02403139114379883, 0.024154111862182616, 0.023980031967163085, 0.023971839904785155, 0.02405936050415039, 0.024125631332397462, 0.02411942481994629, 0.02412054443359375, 0.02405068778991699, 0.02407587242126465, 0.02408243179321289, 0.02416422462463379, 0.02411574363708496, 0.024099008560180664, 0.024149824142456054, 0.024156160354614258, 0.024082304000854492, 0.02409196853637695, 0.024059871673583984, 0.024103263854980468, 0.024077823638916016, 0.024071168899536134, 0.02406950378417969, 0.024068288803100586, 0.024030912399291993, 0.024070911407470703, 0.0240762882232666, 0.02408243179321289, 0.025202911376953126, 0.024493215560913086, 0.02410371208190918, 0.02396326446533203, 0.02385152053833008, 0.02389811134338379, 0.023773183822631837, 0.02377289581298828, 0.023783327102661133, 0.023771520614624023, 0.023772960662841798, 0.023797632217407227, 0.023789920806884766, 0.02378726387023926, 0.02379801559448242, 0.023789567947387694, 0.023812095642089845, 0.02382441520690918, 0.02382153511047363, 0.023796512603759767, 0.023797216415405272, 0.02383292770385742, 0.02381955146789551, 0.023817344665527342, 0.023900928497314452, 0.023905311584472656, 0.023988191604614257, 0.023928415298461913, 0.02392233657836914, 0.02389651107788086, 0.02386524772644043, 0.023891616821289062, 0.023904672622680666, 0.02389632034301758, 0.023881568908691406, 0.02386764717102051, 0.023934368133544923, 0.023943328857421876, 0.023949375152587892, 0.023970176696777343, 0.024013887405395506, 0.024075199127197265, 0.024143327713012697, 0.024112831115722655, 0.024105087280273437, 0.024107744216918945, 0.02409823989868164, 0.024080352783203127, 0.024089183807373047, 0.02408448028564453, 0.02408038330078125, 0.024032703399658205, 0.024049888610839842, 0.024047840118408204, 0.02407436752319336, 0.024035520553588867, 0.02404947280883789, 0.024053760528564453, 0.024096864700317383, 0.024096704483032225, 0.024090368270874022, 0.024053983688354492, 0.024070144653320313, 0.025219200134277343, 0.024484384536743165, 0.024104927062988283, 0.02390220832824707, 0.023812095642089845, 0.023788864135742188, 0.023756927490234375, 0.023761472702026366, 0.023758655548095704, 0.023754911422729494, 0.023746591567993164, 0.023764991760253908, 0.023811296463012697, 0.02384294319152832, 0.023812320709228514, 0.023810335159301758, 0.02384079933166504, 0.023822463989257813, 0.023824384689331055, 0.023840768814086914, 0.023840511322021484, 0.02388198471069336, 0.02388787269592285, 0.023877567291259765, 0.02390153694152832, 0.0238558406829834, 0.02383660888671875, 0.023881759643554688, 0.023881759643554688, 0.023885055541992186, 0.023883775711059572, 0.023900096893310546, 0.023918464660644533, 0.02392268753051758, 0.023925695419311523, 0.023957504272460937, 0.023936384201049803, 0.02389200019836426, 0.023969472885131834, 0.02404969596862793, 0.024064895629882812, 0.024062047958374022, 0.02407580757141113, 0.024107391357421876, 0.02408006477355957, 0.024065343856811524, 0.0240579833984375, 0.024023935317993163, 0.02414771270751953, 0.02404812812805176, 0.024043264389038085, 0.024016895294189454, 0.024011775970458983, 0.024007808685302733, 0.02401785659790039, 0.02404262351989746, 0.02406777572631836, 0.02415001678466797, 0.02406412887573242, 0.024080448150634766, 0.024084415435791016, 0.02410495948791504, 0.024133087158203125, 0.025268224716186522, 0.024397216796875, 0.02404207992553711, 0.023906591415405274, 0.023823711395263673, 0.0238472957611084, 0.023866943359375, 0.023899967193603516, 0.023822975158691407, 0.023787712097167967, 0.023820032119750978, 0.02381977653503418, 0.02386284828186035, 0.023864320755004883, 0.023842815399169923, 0.023813215255737305, 0.02381648063659668, 0.023888160705566406, 0.023843072891235353, 0.023849056243896483, 0.023866943359375, 0.023866079330444337, 0.02383024024963379, 0.023870975494384765, 0.023904767990112305, 0.023932928085327147, 0.023988384246826172, 0.023938175201416015, 0.023909088134765624, 0.023918207168579102, 0.023968128204345702, 0.023965055465698243, 0.023959648132324218, 0.023916927337646485, 0.02393718338012695, 0.02396518325805664, 0.02401321601867676, 0.024025184631347656, 0.023988224029541014, 0.024071807861328124, 0.024119680404663085, 0.024134944915771485, 0.024140512466430664, 0.02409881591796875, 0.02406713676452637, 0.024140735626220704, 0.02406399917602539, 0.024030879974365236, 0.024041471481323243, 0.024043872833251954, 0.02405580711364746, 0.024030784606933593, 0.024050111770629882, 0.02403228759765625, 0.024023008346557618, 0.024056480407714843, 0.02408687973022461, 0.024434335708618166, 0.02408073616027832, 0.024057855606079103, 0.024128799438476563, 0.024068735122680665, 0.0240631046295166, 0.025148160934448244, 0.0244715518951416, 0.02409881591796875, 0.023918495178222657, 0.023830623626708985, 0.023861536026000975, 0.024155872344970703, 0.02373222351074219, 0.023777280807495117, 0.02382579231262207, 0.023928607940673828, 0.023921247482299804, 0.02388812828063965, 0.023883775711059572, 0.02401840019226074, 0.024027679443359377, 0.023942367553710937, 0.023859935760498045, 0.023821439743041992, 0.023842815399169923, 0.023864255905151368, 0.02384486389160156, 0.023855104446411132, 0.023857152938842774, 0.02386534309387207, 0.023879680633544922, 0.023953056335449217, 0.023935327529907225, 0.02395136070251465, 0.024012800216674804, 0.023981311798095702, 0.023964319229125976, 0.023967840194702147, 0.023971839904785155, 0.023957504272460937, 0.0239617919921875, 0.023969600677490235, 0.024016447067260742, 0.024052160263061523, 0.024199167251586915, 0.024153247833251953, 0.024172479629516602, 0.024197759628295897, 0.024127775192260743, 0.024139711380004883, 0.024077983856201173, 0.024040864944458007, 0.024069120407104492, 0.024102943420410156, 0.024100704193115233, 0.024092479705810545, 0.024052032470703123, 0.02407219123840332, 0.024070144653320313, 0.024083520889282225, 0.024037696838378905, 0.024106847763061524, 0.0241364803314209, 0.02414192008972168, 0.02406595230102539, 0.02410086441040039, 0.024105056762695313, 0.024127391815185546, 0.025196544647216795, 0.024511903762817384, 0.024130144119262696, 0.02392192077636719, 0.023804000854492188, 0.023771808624267577, 0.023783424377441405, 0.023777280807495117, 0.023775232315063476, 0.02377670478820801, 0.023802431106567382, 0.023810047149658203, 0.023811744689941405, 0.023802207946777343, 0.02381337547302246, 0.023823104858398437, 0.023842815399169923, 0.023877311706542968, 0.02390800094604492, 0.02392527961730957, 0.023953792572021484, 0.023922496795654297, 0.023934911727905274, 0.02394726371765137, 0.023937023162841797, 0.02392255973815918, 0.023879808425903322, 0.02392268753051758, 0.02391449546813965, 0.02392268753051758, 0.02395248031616211, 0.023913375854492186, 0.02391449546813965, 0.023935232162475586, 0.02396735954284668, 0.024027263641357422, 0.024045568466186523, 0.024045631408691405, 0.024081727981567384, 0.024113792419433594, 0.024145919799804686, 0.024166656494140626, 0.02414361572265625, 0.02419910430908203, 0.02421561622619629, 0.024135679244995118, 0.02411315155029297, 0.024087776184082033, 0.02410540771484375, 0.02404774475097656, 0.02403865623474121, 0.024044511795043945, 0.024076351165771483, 0.024077856063842773, 0.024091039657592773, 0.024090560913085937, 0.024099008560180664, 0.024086336135864257, 0.024117311477661132, 0.024086528778076172, 0.02411532783508301, 0.02411712074279785, 0.024128799438476563, 0.02526486396789551, 0.024498176574707032, 0.024128799438476563, 0.023978815078735352, 0.0238570556640625, 0.023815616607666016, 0.02380851173400879, 0.023791168212890627, 0.023843519210815428, 0.024141632080078124, 0.023793344497680665, 0.023787839889526367, 0.02381929588317871, 0.023870431900024414, 0.023846847534179687, 0.023867456436157227, 0.02389593505859375, 0.023866912841796876, 0.023906911849975586, 0.02392064094543457, 0.023944448471069336, 0.02390243148803711, 0.0239150390625, 0.02391587257385254, 0.024179359436035157, 0.02392064094543457, 0.02390630340576172, 0.02390220832824707, 0.023914335250854492, 0.02391644859313965, 0.023926336288452147, 0.023902303695678712, 0.023931488037109375, 0.02393267250061035, 0.02395302391052246, 0.02394998359680176, 0.02394697570800781, 0.02396931266784668, 0.02407072067260742, 0.024280384063720704, 0.024160736083984374, 0.02416431999206543, 0.024115615844726563, 0.02409891128540039, 0.024133535385131837, 0.024145856857299804, 0.02409385681152344, 0.02404662322998047, 0.024076160430908204, 0.024055072784423828, 0.024084224700927734, 0.0240980167388916, 0.024098432540893555, 0.02407593536376953, 0.0240849609375, 0.024054815292358398, 0.024081344604492187, 0.02407427215576172, 0.024108064651489257, 0.0241059513092041, 0.02411315155029297, 0.02409676742553711, 0.024104639053344725, 0.025587743759155273, 0.024548864364624022, 0.024148096084594728, 0.02394281578063965, 0.023829439163208007, 0.02380361557006836, 0.02377859115600586, 0.023773952484130858, 0.023793216705322265, 0.023779775619506834, 0.023779327392578126, 0.023810047149658203, 0.02381545639038086, 0.023898847579956056, 0.023846464157104494, 0.023807647705078126, 0.0238351993560791, 0.02386147117614746, 0.02386944007873535, 0.02386944007873535, 0.02388787269592285, 0.02388166427612305, 0.023871616363525392, 0.023904415130615236, 0.023915679931640624, 0.023919519424438478, 0.023942880630493164, 0.023941120147705077, 0.023961664199829102, 0.023936960220336916, 0.023915552139282228, 0.02389219284057617, 0.02396550369262695, 0.023927743911743165, 0.023928831100463867, 0.02393670463562012, 0.023910175323486327, 0.023898656845092774, 0.02402675247192383, 0.02410691261291504, 0.02414009666442871, 0.024141984939575194, 0.02408857536315918, 0.02410700798034668, 0.02407753562927246, 0.0240914249420166, 0.02409859275817871, 0.024066272735595702, 0.024063711166381837, 0.024103200912475586, 0.02407219123840332, 0.0240513916015625, 0.02404748725891113, 0.02409516716003418, 0.02409017562866211, 0.024074079513549805, 0.024076799392700195, 0.02406825637817383, 0.024094655990600587, 0.02408243179321289, 0.024293376922607423, 0.024290399551391603, 0.02410588836669922, 0.02527267265319824, 0.02452537536621094, 0.024116607666015626, 0.0239202880859375, 0.023822784423828125, 0.023820640563964844, 0.023858783721923828, 0.023843296051025392, 0.023817888259887697, 0.02377872085571289, 0.02378995132446289, 0.023786048889160156, 0.023795711517333985, 0.023823999404907228, 0.023823808670043946, 0.023839839935302733, 0.023875423431396484, 0.023872543334960937, 0.02385513687133789, 0.023851968765258788, 0.02384889602661133, 0.02383273506164551, 0.023828479766845705, 0.023887296676635743, 0.023908607482910155, 0.023910432815551757, 0.023899967193603516, 0.023937408447265624, 0.023960895538330078, 0.02395564842224121, 0.023923200607299806, 0.023911903381347657, 0.02393734359741211, 0.023932287216186524, 0.02398703956604004, 0.023971839904785155, 0.024014848709106446, 0.02399168014526367, 0.024058656692504884, 0.024096384048461914, 0.02415430450439453, 0.0241595516204834, 0.02418873596191406, 0.0241246395111084, 0.02412211227416992, 0.024136640548706054, 0.0241560001373291, 0.02414556884765625, 0.024248832702636718, 0.024213056564331054, 0.024199296951293945, 0.024178943634033202, 0.02414803123474121, 0.02424140739440918, 0.024189695358276368, 0.024178688049316405, 0.024184831619262694, 0.024194623947143553, 0.024184608459472658, 0.024123327255249023, 0.02414384078979492, 0.024123615264892578, 0.024131999969482423]",tokens/s,41.66721394856563,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1089, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.45 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.14 GiB is free. Process 64649 has 13.60 GiB memory in use. Of the allocated memory 13.48 GiB is allocated by PyTorch, and 16.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciLM-7B/c3c9f4226801dc0433f32aebffe0aac68ee2f051/modeling_decilm.py"", line 311, in __init__ self.model = DeciLMModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciLM-7B/c3c9f4226801dc0433f32aebffe0aac68ee2f051/modeling_decilm.py"", line 182, in __init__ self.layers = nn.ModuleList([DeciLMDecoderLayer(config, layer_idx) for layer_idx File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciLM-7B/c3c9f4226801dc0433f32aebffe0aac68ee2f051/modeling_decilm.py"", line 182, in self.layers = nn.ModuleList([DeciLMDecoderLayer(config, layer_idx) for layer_idx File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciLM-7B/c3c9f4226801dc0433f32aebffe0aac68ee2f051/modeling_decilm.py"", line 149, in __init__ self.mlp = LlamaMLP(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciLM-7B/c3c9f4226801dc0433f32aebffe0aac68ee2f051/transformers_v4_35_2__modeling_llama.py"", line 236, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 162.12 MiB is free. Process 23735 has 14.58 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 25.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,884.5312,11792.154624,0.0,11389.632512,11388.883968,s,1,7.62391650390625,7.62391650390625,0.0,7.62391650390625,7.62391650390625,7.62391650390625,7.62391650390625,[7.62391650390625],,kWh,7.550980120830293e-06,8.183254871460181e-07,4.161947774000696e-06,1.2531253381977007e-05,,MB,1211.850752,12089.950208,0.0,11676.942336,11620.241408,s,10,3.529409362792969,0.35294093627929685,0.004687535482057751,0.3546914520263672,0.3575145141601562,0.3580895935058594,0.35854965698242186,"[0.3421952209472656, 0.3542493896484375, 0.35546945190429685, 0.3586646728515625, 0.3475618896484375, 0.3554535827636719, 0.35738671875, 0.35513351440429686, 0.35198422241210936, 0.3513106994628906]",tokens/s,725.333826953461,kWh,1.0179109382326568e-05,1.1225649051281122e-06,6.76206479662085e-06,1.806373908407553e-05,tokens/kWh,14172038.181490464,MB,1217.179648,12089.950208,0.0,11676.942336,11620.243968,s,10,31.83818115234375,3.1838181152343745,0.0023803913060039604,3.1841195068359376,3.1869217041015627,3.1870340942382813,3.187124006347656,"[3.182017333984375, 3.184160400390625, 3.182552490234375, 3.183121826171875, 3.17848828125, 3.18476953125, 3.184949462890625, 3.18407861328125, 3.186896728515625, 3.187146484375]",tokens/s,19.78756251764159,kWh,9.316640863058924e-05,1.0276466099167838e-05,6.183607341517975e-05,0.00016527894814493684,tokens/kWh,381173.7714155458,,s,630,31.834508899688718,0.05053096650744242,0.00027557110689108155,0.05054019165039063,0.050829452133178715,0.05090588703155518,0.0515531579208374,"[0.05157068634033203, 0.05063679885864258, 0.05017599868774414, 0.04999980926513672, 0.05020832061767578, 0.0501212158203125, 0.05012678527832031, 0.05012886428833008, 0.0501822395324707, 0.05013913726806641, 0.05027222442626953, 0.0502599983215332, 0.05031321716308594, 0.050114559173583983, 0.050184192657470705, 0.05008902359008789, 0.05028300857543945, 0.05022150421142578, 0.050450401306152345, 0.050608158111572266, 0.050603870391845704, 0.05046031951904297, 0.05046732711791992, 0.05025177764892578, 0.05030246353149414, 0.05031520080566406, 0.05030470275878906, 0.05025471878051758, 0.050288639068603515, 0.0503166389465332, 0.05038966369628906, 0.050348033905029295, 0.05049305725097656, 0.0504323844909668, 0.05052537536621094, 0.05059795379638672, 0.050791168212890626, 0.050683902740478515, 0.050716670989990234, 0.0506341438293457, 0.05071660614013672, 0.05068479919433594, 0.050669345855712894, 0.05062047958374023, 0.050672863006591795, 0.05063958358764648, 0.05063663864135742, 0.05060214233398438, 0.050561023712158204, 0.050677761077880856, 0.050670848846435544, 0.050648929595947266, 0.05065411376953125, 0.05081087875366211, 0.05064089584350586, 0.050722270965576174, 0.05067830276489258, 0.050756607055664066, 0.050918399810791014, 0.050888671875, 0.050864158630371095, 0.05086617660522461, 0.05081292724609375, 0.051525279998779296, 0.050764320373535156, 0.050282497406005856, 0.05028432083129883, 0.050175617218017575, 0.05016841506958008, 0.050237438201904294, 0.05014323043823242, 0.0505239372253418, 0.05024528121948242, 0.050235969543457035, 0.05016899108886719, 0.05016569519042969, 0.05054054260253906, 0.050278560638427734, 0.05021343994140625, 0.050309310913085936, 0.05040947341918945, 0.05056512069702149, 0.0507064323425293, 0.05078015899658203, 0.05053984069824219, 0.05064265441894531, 0.05047590255737305, 0.050345535278320315, 0.050342430114746095, 0.05032470321655273, 0.050332447052001954, 0.05036646270751953, 0.05034710311889649, 0.05033184051513672, 0.05041430282592774, 0.050388992309570314, 0.050484577178955076, 0.050481822967529295, 0.05046476745605469, 0.05060403060913086, 0.050710529327392576, 0.05065523147583008, 0.0508040657043457, 0.05062518310546875, 0.05088256072998047, 0.050783905029296875, 0.0506412467956543, 0.05057126235961914, 0.05061964797973633, 0.05052617645263672, 0.05053462219238281, 0.05060211181640625, 0.05091987228393555, 0.05053961563110351, 0.05069680023193359, 0.050579776763916014, 0.05063679885864258, 0.05060713577270508, 0.05070742416381836, 0.0507125129699707, 0.05084726333618164, 0.05077660751342773, 0.05074691009521484, 0.050825695037841796, 0.050826366424560544, 0.0507831039428711, 0.05176115036010742, 0.05078742218017578, 0.05025788879394531, 0.050086849212646486, 0.05018009567260742, 0.05006131362915039, 0.05021491241455078, 0.05018009567260742, 0.050098175048828124, 0.05026764678955078, 0.05028915023803711, 0.050214336395263674, 0.05011308670043945, 0.05016323089599609, 0.05026863861083984, 0.05026601409912109, 0.0501712646484375, 0.0502413444519043, 0.050406303405761715, 0.050730945587158204, 0.05065439987182617, 0.05046566390991211, 0.05028400039672851, 0.05036624145507813, 0.050342655181884764, 0.050331649780273435, 0.05030857467651367, 0.05032400131225586, 0.05023539352416992, 0.05045862579345703, 0.050267486572265624, 0.050375328063964844, 0.05041766357421875, 0.05050518417358398, 0.05041404724121094, 0.050505214691162106, 0.050496063232421874, 0.05064908981323242, 0.050601982116699216, 0.050730720520019534, 0.05067190551757812, 0.050792224884033205, 0.05067388916015625, 0.05064838409423828, 0.05060063934326172, 0.05065017700195312, 0.05058246231079101, 0.050572414398193356, 0.05065804672241211, 0.050724990844726564, 0.05075465774536133, 0.050664447784423826, 0.050587169647216795, 0.0507457275390625, 0.0506695671081543, 0.05082278442382813, 0.05076825714111328, 0.05076582336425781, 0.05092313766479492, 0.05083552169799805, 0.0510134391784668, 0.05077862548828125, 0.05077196884155273, 0.05157795333862305, 0.05066435241699219, 0.050216960906982425, 0.05022304153442383, 0.05025388717651367, 0.050260990142822266, 0.05018931198120117, 0.05029619216918945, 0.05014182281494141, 0.05024470520019531, 0.05017436981201172, 0.05022771072387695, 0.05018761444091797, 0.050200672149658204, 0.050237472534179685, 0.05018668746948242, 0.05032515335083008, 0.05035647964477539, 0.05054278564453125, 0.050611743927001955, 0.0505382080078125, 0.05049542236328125, 0.05042067337036133, 0.050366336822509766, 0.050411518096923826, 0.050359809875488284, 0.05037107086181641, 0.05032102584838867, 0.05031472015380859, 0.05029776000976562, 0.050251136779785155, 0.0501798095703125, 0.05077699279785156, 0.05042937469482422, 0.05047558212280273, 0.050522113800048826, 0.0506033935546875, 0.05062838363647461, 0.05064380645751953, 0.050713951110839844, 0.05069635009765625, 0.050661376953125, 0.05061593627929688, 0.05082815933227539, 0.050627777099609375, 0.050628704071044923, 0.05066416168212891, 0.0506429443359375, 0.050544639587402344, 0.05070025634765625, 0.05059135818481445, 0.050641311645507815, 0.05067571258544922, 0.050735103607177735, 0.05065932846069336, 0.05071638488769531, 0.05073891067504883, 0.050805313110351566, 0.051035999298095706, 0.05081718444824219, 0.05088774490356445, 0.05084572982788086, 0.050708511352539065, 0.0516328010559082, 0.05083071899414063, 0.0502685432434082, 0.050114814758300784, 0.04996432113647461, 0.05011324691772461, 0.0500747184753418, 0.050125247955322264, 0.050012126922607425, 0.050130752563476565, 0.05020947265625, 0.05003878402709961, 0.05019375991821289, 0.0500968017578125, 0.05019180679321289, 0.050002494812011716, 0.05018009567260742, 0.05032755279541016, 0.05048934555053711, 0.050552833557128904, 0.0506363525390625, 0.05055123138427734, 0.05043404769897461, 0.05017734527587891, 0.05028524780273438, 0.05028659057617187, 0.05039436721801758, 0.050311103820800784, 0.05030361557006836, 0.05038083267211914, 0.05047628784179688, 0.05034281539916992, 0.05041907119750977, 0.050518657684326174, 0.050485183715820316, 0.050433406829833986, 0.05061907196044922, 0.05104435348510742, 0.05074716949462891, 0.050743518829345705, 0.05070771026611328, 0.05074764633178711, 0.050567680358886716, 0.05039308929443359, 0.05050483322143555, 0.050514495849609375, 0.050454784393310546, 0.05053635025024414, 0.05047449493408203, 0.05055894470214844, 0.050471614837646485, 0.050339839935302735, 0.05043199920654297, 0.05055487823486328, 0.05052604675292969, 0.050417823791503905, 0.05076377487182617, 0.050577407836914064, 0.05073020935058594, 0.050549537658691406, 0.050799713134765626, 0.05066640090942383, 0.05071257781982422, 0.05145964813232422, 0.05054844665527344, 0.050289375305175785, 0.05014323043823242, 0.05007974243164062, 0.05015961456298828, 0.05014473724365234, 0.050159233093261715, 0.05015644836425781, 0.05013078308105469, 0.05033964920043945, 0.050127201080322266, 0.05020985412597656, 0.05020528030395508, 0.050239742279052736, 0.050208255767822264, 0.0502125129699707, 0.050291648864746095, 0.050527423858642576, 0.05068854522705078, 0.05069990539550781, 0.05063910293579101, 0.05040700912475586, 0.050430656433105465, 0.05036044692993164, 0.050348033905029295, 0.05039616012573242, 0.05031628799438476, 0.0504131851196289, 0.050463104248046876, 0.050522113800048826, 0.0504439697265625, 0.050547008514404294, 0.05039513778686523, 0.05056716918945312, 0.05050294494628906, 0.05060630416870117, 0.05085190582275391, 0.050743743896484374, 0.05082931137084961, 0.05075107192993164, 0.05077648162841797, 0.05074943923950195, 0.050710529327392576, 0.05063683319091797, 0.05081494522094727, 0.05076172637939453, 0.050702335357666016, 0.050560577392578125, 0.05079443359375, 0.05055744171142578, 0.050683902740478515, 0.05067161560058594, 0.05070438385009766, 0.050683902740478515, 0.05082316970825195, 0.05081497573852539, 0.05084934234619141, 0.05088848114013672, 0.051154590606689455, 0.05082371139526367, 0.05086051177978516, 0.05086003112792969, 0.05167411041259766, 0.05076416015625, 0.05025817489624024, 0.050057567596435544, 0.05016697692871094, 0.050033470153808594, 0.0501288948059082, 0.050171585083007814, 0.0503565444946289, 0.05030297470092773, 0.05019343948364258, 0.050315265655517576, 0.050230239868164064, 0.050321407318115234, 0.050130752563476565, 0.050219200134277345, 0.050298881530761716, 0.05032470321655273, 0.05045123291015625, 0.05072895812988281, 0.050980289459228514, 0.05049196624755859, 0.05040083312988281, 0.05036281585693359, 0.050290687561035156, 0.05039913558959961, 0.05032742309570312, 0.050343391418457034, 0.0503078384399414, 0.05040332794189453, 0.050323455810546876, 0.05034521484375, 0.050372608184814455, 0.05044940948486328, 0.05051932907104492, 0.050546783447265625, 0.05065356826782227, 0.050713951110839844, 0.05072553634643555, 0.050855934143066404, 0.050733055114746094, 0.050826271057128905, 0.05070025634765625, 0.050715648651123046, 0.05075353622436524, 0.05068364715576172, 0.050673473358154295, 0.05064134216308594, 0.050552833557128904, 0.05060748672485352, 0.05072313690185547, 0.050968734741210935, 0.05065887832641602, 0.05067190551757812, 0.05074736022949219, 0.050743072509765626, 0.050821407318115235, 0.05080092620849609, 0.051076225280761715, 0.050979328155517575, 0.05088614273071289, 0.05086819076538086, 0.05080361557006836, 0.05156454467773437, 0.05069004821777344, 0.05017331314086914, 0.05013471984863281, 0.050222015380859374, 0.050135040283203126, 0.050106529235839845, 0.05019007873535156, 0.05018838500976563, 0.050157569885253904, 0.05022719955444336, 0.050249729156494144, 0.05029033660888672, 0.05029513549804687, 0.05026406478881836, 0.050253822326660154, 0.05036236953735351, 0.05036236953735351, 0.05059379196166992, 0.050710529327392576, 0.050644992828369144, 0.05053440093994141, 0.05041916656494141, 0.05029724884033203, 0.050321537017822264, 0.05035523223876953, 0.05025481414794922, 0.05032112121582031, 0.05026230239868164, 0.05034598541259765, 0.050414623260498045, 0.050331615447998045, 0.0504637451171875, 0.050444286346435545, 0.05056460952758789, 0.05061254501342773, 0.0507578239440918, 0.05066342544555664, 0.05086617660522461, 0.0507446403503418, 0.05072272109985351, 0.050756385803222656, 0.05069823837280273, 0.05067712020874023, 0.05060262298583985, 0.050685791015625, 0.05061840057373047, 0.05070415878295898, 0.050590049743652346, 0.05062041473388672, 0.05067497634887695, 0.050662113189697267, 0.050702335357666016, 0.050728225708007814, 0.05058428955078125, 0.05086396789550781, 0.050718753814697266, 0.05084985733032227, 0.05083961486816406, 0.050917377471923826, 0.05102592086791992, 0.050878398895263674, 0.05079849624633789, 0.051515392303466793, 0.0507325439453125, 0.050237953186035154, 0.050148544311523435, 0.05016044616699219, 0.050098175048828124, 0.05015075302124023, 0.050274528503417966, 0.05015596771240234, 0.05026201629638672, 0.050288639068603515, 0.05025958251953125, 0.0502685432434082, 0.0503616943359375, 0.0503548469543457, 0.05034598541259765, 0.05027174377441406, 0.05040560150146484, 0.05047087860107422, 0.05074348831176758, 0.050603809356689455, 0.050571231842041015, 0.05043033599853516, 0.05047091293334961, 0.050423809051513675, 0.050444286346435545, 0.050385982513427734, 0.050419872283935546, 0.0503359375, 0.050477664947509764, 0.05051145553588867, 0.05044675064086914, 0.05045248031616211, 0.05052403259277344, 0.050544769287109374, 0.05060403060913086, 0.0506695671081543, 0.050784255981445314, 0.05088051223754883, 0.05083135986328125, 0.050786304473876956, 0.050759681701660155, 0.0506960334777832, 0.05073321533203125, 0.05059756851196289, 0.050772289276123046, 0.050599872589111326, 0.05106284713745117, 0.05055897521972656, 0.0506668815612793, 0.05067020797729492, 0.050710529327392576, 0.05075353622436524, 0.05072860717773438, 0.05068172836303711, 0.050907615661621095, 0.05076732635498047, 0.05091382217407227, 0.05091328048706055, 0.050958335876464846, 0.05094400024414063, 0.05111529541015625, 0.05090377426147461, 0.05180992126464844, 0.05079507064819336, 0.05030809783935547, 0.05042822265625, 0.0502545280456543, 0.050229248046875, 0.05015497589111328, 0.05022771072387695, 0.0503337287902832, 0.05023871994018555, 0.050244350433349606, 0.05028160095214844, 0.050328449249267576, 0.05023539352416992, 0.05029478454589844, 0.050300289154052734, 0.05050636672973633, 0.0504189453125, 0.050590465545654294, 0.050694145202636716, 0.0506429443359375, 0.050563072204589846, 0.05057712173461914, 0.0503803825378418, 0.050430656433105465, 0.05040719985961914, 0.05041584014892578, 0.05039427185058594, 0.05044924926757813, 0.05038256072998047, 0.05050601577758789, 0.050493438720703124, 0.05043404769897461, 0.050462718963623046, 0.0505300178527832, 0.05057769775390625, 0.05056668853759766, 0.05073273468017578, 0.05082191848754883, 0.05086207962036133, 0.05089427185058594, 0.0508642578125, 0.05078470230102539, 0.050763454437255856, 0.050624095916748046, 0.050673728942871095, 0.050621086120605466, 0.05074694442749023, 0.050661823272705075, 0.05068755340576172, 0.05058339309692383, 0.05066403198242188, 0.05062793731689453, 0.05070492935180664, 0.05073932647705078, 0.05076377487182617, 0.05081862258911133, 0.050839969635009766, 0.05094403076171875, 0.05083135986328125, 0.05095616149902344, 0.05086220932006836, 0.050818687438964845]",tokens/s,19.78984510127499,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 270.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 98.12 MiB is free. Process 152409 has 14.64 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 1.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,785.956864,11656.953856,0.0,11261.70624,11255.391232,s,1,7.18033935546875,7.18033935546875,0.0,7.18033935546875,7.18033935546875,7.18033935546875,7.18033935546875,[7.18033935546875],,kWh,5.039279437501893e-06,5.485672503577893e-07,1.397223339999476e-06,6.985070027859159e-06,,MB,1123.26656,11667.439616,0.0,11261.70624,10971.009024,s,10,3.609160095214844,0.3609160095214844,0.0057715397464658475,0.36168374633789063,0.36523611755371094,0.3675208755493164,0.3693486819458008,"[0.34562661743164064, 0.36130355834960937, 0.35997222900390624, 0.3647283935546875, 0.360566650390625, 0.3621558837890625, 0.3620639343261719, 0.36980563354492185, 0.3622278137207031, 0.36070938110351564]",tokens/s,709.3063018717683,kWh,1.0374761183620728e-05,1.1441585045193638e-06,6.926892514689321e-06,1.844581220282941e-05,tokens/kWh,13878488.90496305,MB,1129.078784,11669.536768,0.0,11263.803392,11168.310272,s,10,32.345559570312496,3.2345559570312497,0.0030313954029406553,3.23419287109375,3.238731909179687,3.239144885253906,3.2394752661132813,"[3.228908203125, 3.23709130859375, 3.234354248046875, 3.239557861328125, 3.23864013671875, 3.231833740234375, 3.232732177734375, 3.233671142578125, 3.234031494140625, 3.2347392578125]",tokens/s,19.477171159475894,kWh,9.4550488759296e-05,1.0429143709534005e-05,6.261869089951168e-05,0.00016759832336834168,tokens/kWh,375898.7484710144,,s,630,32.3424638595581,0.051337244221520806,0.0002730846994798606,0.05133265495300293,0.051640332794189454,0.051755985450744627,0.05218107563018799,"[0.05185184097290039, 0.05116694259643555, 0.051020065307617185, 0.05102796936035156, 0.05079040145874023, 0.05086412811279297, 0.05093948745727539, 0.05091164779663086, 0.05082726287841797, 0.051001182556152345, 0.0509455680847168, 0.05109526443481445, 0.05096131134033203, 0.05127126312255859, 0.05109990310668945, 0.05096457672119141, 0.05110784149169922, 0.05120006561279297, 0.05122048187255859, 0.0513875846862793, 0.05126841735839844, 0.051265281677246095, 0.05110111999511719, 0.05129817581176758, 0.05111084747314453, 0.05101350402832031, 0.05118150329589844, 0.05111347198486328, 0.05107360076904297, 0.05109142303466797, 0.05104451370239258, 0.05120204925537109, 0.051120128631591794, 0.051205665588378906, 0.05122851181030273, 0.05125593566894531, 0.0513966064453125, 0.05135974502563476, 0.0513719367980957, 0.051394657135009764, 0.05133660888671875, 0.05134710311889648, 0.05139756774902344, 0.051353118896484376, 0.05133315277099609, 0.05142950439453125, 0.0512957763671875, 0.0513666877746582, 0.051224193572998046, 0.051324478149414064, 0.05144249725341797, 0.05132809448242188, 0.051420063018798826, 0.05140889739990234, 0.05154611206054688, 0.05159686279296875, 0.0514400634765625, 0.05144985580444336, 0.05153779220581055, 0.051560321807861326, 0.051517696380615235, 0.05165055847167969, 0.051557727813720707, 0.052185344696044925, 0.05153792190551758, 0.05099494552612305, 0.050864383697509764, 0.050991104125976565, 0.051146751403808595, 0.05095945739746094, 0.05092240142822266, 0.05091123199462891, 0.05104838562011719, 0.05114652633666992, 0.05100291061401367, 0.05100211334228515, 0.05116652679443359, 0.05118841552734375, 0.051087135314941405, 0.05124937438964844, 0.05129216003417969, 0.051910110473632816, 0.05162768173217774, 0.051323776245117185, 0.05125529479980469, 0.05158707046508789, 0.051156608581542966, 0.05110140609741211, 0.051098270416259764, 0.05119753646850586, 0.05113078308105469, 0.051118049621582035, 0.0510423355102539, 0.05114265441894531, 0.0510382080078125, 0.051138561248779295, 0.051317855834960936, 0.05135017776489258, 0.05146768188476562, 0.051442527770996095, 0.05140470504760742, 0.05155644989013672, 0.051523582458496094, 0.05148262405395508, 0.05155430221557617, 0.05166080093383789, 0.0514150390625, 0.05227718353271484, 0.05254502487182617, 0.05140947341918945, 0.05142015838623047, 0.05121331024169922, 0.05138604736328125, 0.05148908615112305, 0.05131468963623047, 0.05129817581176758, 0.05148236846923828, 0.0523573112487793, 0.05162601470947266, 0.05164588928222656, 0.051622528076171875, 0.05166604614257812, 0.05177158355712891, 0.05144451141357422, 0.05154816055297851, 0.05153123092651367, 0.052065982818603515, 0.05120902252197266, 0.05104422378540039, 0.050863422393798825, 0.050934593200683595, 0.05127372741699219, 0.05117497634887695, 0.050969024658203126, 0.05103731155395508, 0.05110188674926758, 0.05112697601318359, 0.05102796936035156, 0.05100940704345703, 0.05121855926513672, 0.05104230499267578, 0.05109145736694336, 0.051128318786621094, 0.051236862182617186, 0.05130643081665039, 0.05133318328857422, 0.051122177124023435, 0.05131235122680664, 0.05115523147583008, 0.05117715072631836, 0.0510379524230957, 0.05120467376708984, 0.05102092742919922, 0.05099977493286133, 0.05135721588134766, 0.05136883163452149, 0.05117337417602539, 0.051418689727783205, 0.05147068786621094, 0.051370079040527344, 0.051492641448974606, 0.05142265701293945, 0.05144246292114258, 0.05139046478271484, 0.05171814346313477, 0.05150624084472656, 0.0512685432434082, 0.05134912109375, 0.051423614501953124, 0.05139177703857422, 0.05130928039550781, 0.051490238189697266, 0.051272254943847656, 0.051500926971435546, 0.051514846801757816, 0.051388256072998045, 0.05150803375244141, 0.051410945892333984, 0.05156620788574219, 0.05153814315795899, 0.05151705551147461, 0.051646270751953126, 0.051501792907714845, 0.051689472198486325, 0.05170995330810547, 0.05175091171264649, 0.05171532821655273, 0.05170048141479492, 0.051502239227294924, 0.05227001571655274, 0.05150310516357422, 0.05107062530517578, 0.05112630462646484, 0.05085804748535156, 0.05107913589477539, 0.051093791961669924, 0.05113417434692383, 0.05096847915649414, 0.05105683135986328, 0.05099103927612305, 0.05121427154541015, 0.05112041473388672, 0.0511242561340332, 0.05108272171020508, 0.05123072052001953, 0.05118825531005859, 0.05149033737182617, 0.05149897766113281, 0.051491329193115234, 0.05133107376098633, 0.05146214294433594, 0.051253246307373046, 0.05124915313720703, 0.0511110725402832, 0.0511288948059082, 0.05133036804199219, 0.0512907829284668, 0.0512105598449707, 0.05145539093017578, 0.05127180862426758, 0.05123660659790039, 0.05125571060180664, 0.05132249450683594, 0.05163692855834961, 0.05156614303588867, 0.05147488021850586, 0.05175910568237305, 0.051603424072265626, 0.05159529495239258, 0.05159526443481445, 0.05144166564941406, 0.05156560134887695, 0.051442176818847656, 0.051501537322998045, 0.051544063568115236, 0.05144521713256836, 0.0514312973022461, 0.05148944091796875, 0.05144780731201172, 0.0514436149597168, 0.051477664947509764, 0.05152646255493164, 0.05164044952392578, 0.05175296020507812, 0.05170175933837891, 0.05183820724487305, 0.05177215957641602, 0.05170336151123047, 0.05169107055664063, 0.051609790802001954, 0.05243532943725586, 0.05156486511230469, 0.05212601470947266, 0.05128799819946289, 0.05166207885742188, 0.05214700698852539, 0.05093116760253906, 0.05081455993652344, 0.05114275360107422, 0.05114147186279297, 0.050964481353759764, 0.05104844665527344, 0.05102105712890625, 0.05125724792480469, 0.0509202880859375, 0.05108224105834961, 0.05093478393554687, 0.05121795272827148, 0.05101206588745117, 0.0513493766784668, 0.05119929504394531, 0.05132505416870117, 0.051122081756591796, 0.051378753662109374, 0.05119558334350586, 0.051055137634277346, 0.050984798431396486, 0.05127388763427734, 0.05140009689331055, 0.05122259140014648, 0.05123126220703125, 0.05199462509155273, 0.051738304138183595, 0.05138463973999023, 0.05116668701171875, 0.05135619354248047, 0.05146739196777344, 0.05139104080200195, 0.05161743927001953, 0.05149967956542969, 0.05142297744750977, 0.05154227066040039, 0.05150848007202148, 0.05143824005126953, 0.05150729751586914, 0.05150515365600586, 0.0514334716796875, 0.05175686264038086, 0.051617088317871096, 0.05141955184936523, 0.051665374755859375, 0.051650177001953124, 0.05181683349609375, 0.05150051116943359, 0.051754913330078124, 0.05142512130737305, 0.051761920928955076, 0.05155219268798828, 0.05149705505371094, 0.05169356918334961, 0.051582977294921874, 0.05166262435913086, 0.05154019165039062, 0.051544063568115236, 0.05148246383666992, 0.052170623779296876, 0.051132606506347655, 0.05115887832641602, 0.05097283172607422, 0.05088665771484375, 0.05084569549560547, 0.05100896072387695, 0.050893375396728516, 0.050958335876464846, 0.05089641571044922, 0.05089308929443359, 0.050986686706542966, 0.0508851203918457, 0.05096857452392578, 0.05093939208984375, 0.05104281616210937, 0.051371391296386716, 0.05133785629272461, 0.05122662353515625, 0.051205631256103515, 0.051044769287109375, 0.051213470458984375, 0.05127468872070313, 0.0513309440612793, 0.05114204788208008, 0.05102870559692383, 0.05101363372802734, 0.05098863983154297, 0.05107497787475586, 0.050962944030761716, 0.05104844665527344, 0.050993152618408207, 0.05110374450683594, 0.05125529479980469, 0.052029441833496094, 0.051484672546386716, 0.051666465759277344, 0.05137593460083008, 0.05141161727905273, 0.05146419143676758, 0.051410945892333984, 0.05141299057006836, 0.0515968017578125, 0.05161625671386719, 0.05164031982421875, 0.05161369705200195, 0.05141299057006836, 0.05154934310913086, 0.051472225189208985, 0.05130752182006836, 0.051275070190429685, 0.05130889511108398, 0.051398719787597656, 0.051542015075683595, 0.05147244644165039, 0.05164054489135742, 0.05151129531860352, 0.051557727813720707, 0.05151811218261719, 0.051555774688720704, 0.05163065719604492, 0.051596382141113284, 0.05175388717651367, 0.05201375961303711, 0.05120841598510742, 0.05115084838867188, 0.05107036972045898, 0.05101833724975586, 0.05094153594970703, 0.0510153923034668, 0.05099923324584961, 0.050991584777832034, 0.05088214492797852, 0.050941951751708986, 0.05109215927124024, 0.05103615951538086, 0.05110492706298828, 0.05115702438354492, 0.051163745880126954, 0.05103433609008789, 0.05115609741210937, 0.05127043151855469, 0.05153801727294922, 0.05113779067993164, 0.05124556732177735, 0.051189952850341794, 0.051118144989013674, 0.05130444717407227, 0.05112422561645508, 0.05120783996582031, 0.051087711334228514, 0.05102150344848633, 0.05118912124633789, 0.05098908615112305, 0.05129497528076172, 0.051140609741210936, 0.05128611373901367, 0.051273792266845704, 0.051367935180664064, 0.05140991973876953, 0.05138278579711914, 0.05142169570922851, 0.05158089447021484, 0.05134748840332031, 0.0514150390625, 0.051510337829589845, 0.05153273773193359, 0.051576831817626956, 0.05148246383666992, 0.051294368743896486, 0.051591167449951174, 0.0513875846862793, 0.05140118408203125, 0.05134339141845703, 0.05153164672851562, 0.05123894500732422, 0.05145568084716797, 0.05140550231933594, 0.05156252670288086, 0.051525409698486326, 0.05202057647705078, 0.05177350234985351, 0.051661121368408204, 0.05160806274414063, 0.05163827133178711, 0.05157068634033203, 0.052096382141113284, 0.05118835067749023, 0.050974720001220705, 0.050861183166503905, 0.050937793731689454, 0.05083404922485352, 0.05093404769897461, 0.05090435028076172, 0.05090719985961914, 0.051003326416015626, 0.05099801635742188, 0.05110988616943359, 0.05105459213256836, 0.05113651275634765, 0.0522874870300293, 0.05129344177246094, 0.051132736206054685, 0.05110419082641601, 0.051307743072509765, 0.051372833251953125, 0.05130854415893555, 0.05130035018920898, 0.051165184020996096, 0.05116108703613281, 0.051158718109130856, 0.05113478469848633, 0.05117526245117188, 0.05122883224487305, 0.05125734329223633, 0.05117647933959961, 0.05114569473266602, 0.05110988616943359, 0.05133107376098633, 0.05126758575439453, 0.05139564895629883, 0.05188703918457031, 0.05153318405151367, 0.0515467529296875, 0.05148591995239258, 0.051313438415527345, 0.05135769653320312, 0.05148057556152344, 0.051525630950927735, 0.05146623992919922, 0.05160953521728515, 0.05167520141601562, 0.051419136047363284, 0.05130035018920898, 0.05132287979125977, 0.05125734329223633, 0.051332191467285154, 0.05128467178344726, 0.05158115386962891, 0.0514150390625, 0.05144707107543945, 0.05169347381591797, 0.051591999053955076, 0.05157273483276367, 0.051507198333740234, 0.051607551574707033, 0.05143462371826172, 0.05160844802856445, 0.05136716842651367, 0.051907230377197265, 0.051843040466308596, 0.05120425415039063, 0.050949630737304685, 0.0509567985534668, 0.05085184097290039, 0.05095609664916992, 0.05094144058227539, 0.05097856140136719, 0.05113907241821289, 0.051044734954833984, 0.05099321746826172, 0.050817024230957034, 0.05112815856933594, 0.05116329574584961, 0.051253246307373046, 0.05100255966186523, 0.05127804946899414, 0.05135625457763672, 0.051568641662597656, 0.05122252655029297, 0.0514202880859375, 0.051200191497802736, 0.05117599868774414, 0.05118947219848633, 0.051257408142089844, 0.0511102409362793, 0.05108736038208008, 0.051195903778076174, 0.05124643325805664, 0.051178112030029296, 0.051300384521484374, 0.05137561416625976, 0.05129391860961914, 0.051346206665039064, 0.05143484878540039, 0.05147100830078125, 0.05144512176513672, 0.05160819244384766, 0.051451904296875, 0.05155833435058594, 0.051369281768798826, 0.051737342834472656, 0.05139865493774414, 0.05151129531860352, 0.05155977630615234, 0.051302398681640625, 0.05135222244262695, 0.051329025268554686, 0.05146419143676758, 0.05148672103881836, 0.05142323303222656, 0.05130179214477539, 0.05141753768920899, 0.05161795043945312, 0.051533824920654295, 0.05148211288452149, 0.051622398376464845, 0.05159526443481445, 0.05156380844116211, 0.051778270721435544, 0.051580032348632815, 0.051409793853759767, 0.05198233413696289, 0.0513331184387207, 0.05110572814941406, 0.0509071044921875, 0.050954177856445314, 0.05108918380737305, 0.05096076965332031, 0.051042240142822264, 0.05087609481811523, 0.051385921478271486, 0.05099808120727539, 0.05110889434814453, 0.05101052856445312, 0.051109214782714844, 0.05110553741455078, 0.05117414474487305, 0.05110185623168945, 0.05140070343017578, 0.05142643356323242, 0.05148681640625, 0.05125814437866211, 0.05127135848999023, 0.05126176071166992, 0.05130035018920898, 0.05120819091796875, 0.05146988677978516, 0.05118563079833984, 0.05119753646850586, 0.05121484756469727, 0.05141337585449219, 0.0510832633972168, 0.05120819091796875, 0.051236862182617186, 0.05122457504272461, 0.051318687438964845, 0.051361888885498044, 0.05144908905029297, 0.05157759857177734, 0.05143328094482422, 0.0513947525024414, 0.05152767944335938, 0.05149491119384766, 0.051748863220214845, 0.05152972793579102, 0.052103168487548826, 0.051590240478515625, 0.05137420654296875, 0.05142403030395508, 0.05144780731201172, 0.051394561767578124, 0.05147238540649414, 0.05140889739990234, 0.05141708755493164, 0.05135475158691406, 0.05145484924316406, 0.05156454467773437, 0.05144915390014648, 0.05160825729370117, 0.05161939239501953, 0.05156047821044922, 0.051429790496826173, 0.05136588668823242, 0.05148057556152344]",tokens/s,19.479035448123945,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,885.305344,6089.998336,0.0,5687.476224,5685.679104,s,1,7.68189697265625,7.68189697265625,0.0,7.68189697265625,7.68189697265625,7.68189697265625,7.68189697265625,[7.68189697265625],,kWh,6.184454587499506e-06,6.652440169265913e-07,2.1147239139956975e-06,8.964422518421794e-06,,MB,1212.981248,6324.87936,0.0,5911.871488,5850.451456,s,10,2.102448516845703,0.2102448516845703,0.0049277683334715185,0.21066101837158202,0.2150048355102539,0.21501245040893555,0.21501854232788087,"[0.19741542053222655, 0.21035317993164063, 0.20921714782714843, 0.2143074951171875, 0.21312864685058594, 0.20896908569335937, 0.21500314331054687, 0.21096885681152344, 0.2080654754638672, 0.2150200653076172]",tokens/s,1217.6279131156848,kWh,6.006803992857256e-06,6.618402416960082e-07,3.986953302938755e-06,1.0655597537492018e-05,tokens/kWh,24024931.412739348,MB,1224.544256,6324.87936,0.0,5911.871488,5850.454016,s,10,15.786176025390628,1.5786176025390626,0.0015665264475455988,1.5792468261718748,1.5798160400390626,1.58029833984375,1.5806841796875,"[1.57953125, 1.5792275390625, 1.5759837646484376, 1.57580419921875, 1.579021240234375, 1.5774376220703126, 1.57926611328125, 1.579414794921875, 1.5797088623046875, 1.5807806396484374]",tokens/s,39.90833492460127,kWh,4.6272717024640285e-05,5.10280955901859e-06,3.078132451166134e-05,8.215685109532021e-05,tokens/kWh,766825.8843916254,,s,630,15.783405036926247,0.025053023868136935,0.0002981530175449193,0.025016223907470703,0.025205286407470703,0.025294050312042238,0.026843989086151123,"[0.027113983154296875, 0.025991167068481445, 0.02528665542602539, 0.025006080627441408, 0.024969215393066405, 0.024838144302368165, 0.024793056488037108, 0.024793119430541993, 0.024879104614257814, 0.02519862365722656, 0.025098207473754883, 0.025004032135009766, 0.024823808670043947, 0.024821727752685548, 0.02485878372192383, 0.024819583892822267, 0.024804832458496094, 0.024893024444580077, 0.024863807678222657, 0.024908704757690428, 0.024869855880737306, 0.025055328369140626, 0.024850336074829102, 0.02489151954650879, 0.02489727973937988, 0.02495427131652832, 0.02501705551147461, 0.024969152450561524, 0.024893503189086914, 0.024971263885498047, 0.02495484733581543, 0.02502659225463867, 0.024880704879760743, 0.02493075180053711, 0.024946527481079103, 0.024894975662231447, 0.024938880920410158, 0.025034271240234374, 0.02516681671142578, 0.025605920791625977, 0.025207071304321288, 0.025183231353759765, 0.025123552322387697, 0.025128320693969728, 0.025129600524902342, 0.02511257553100586, 0.025110143661499024, 0.025172351837158203, 0.02519830322265625, 0.025080095291137694, 0.025103647232055663, 0.025127872467041016, 0.025120351791381838, 0.025036863327026367, 0.025056608200073244, 0.02506742477416992, 0.02517081642150879, 0.025106336593627928, 0.025100383758544922, 0.02506342315673828, 0.025061151504516602, 0.02511894416809082, 0.025133056640625, 0.026820608139038086, 0.025699392318725586, 0.025317888259887695, 0.025186016082763673, 0.024917919158935545, 0.024900352478027344, 0.024901695251464843, 0.024831743240356446, 0.02483430480957031, 0.024882463455200194, 0.024826400756835936, 0.024867008209228516, 0.024817663192749022, 0.024840127944946288, 0.024932416915893554, 0.02488934326171875, 0.02498150444030762, 0.02491187286376953, 0.024965215682983398, 0.02494156837463379, 0.02497952079772949, 0.02489151954650879, 0.024904224395751955, 0.024895807266235352, 0.024950592041015626, 0.024954463958740233, 0.0248939208984375, 0.025014272689819338, 0.024854751586914064, 0.02493417549133301, 0.024864767074584963, 0.024925567626953124, 0.02500876808166504, 0.025022495269775392, 0.02496099281311035, 0.024864479064941405, 0.02511270332336426, 0.025124000549316405, 0.025155967712402343, 0.025073535919189455, 0.025139007568359375, 0.025178592681884767, 0.025172447204589842, 0.025112192153930665, 0.025155744552612304, 0.025096416473388672, 0.025186304092407227, 0.02512387275695801, 0.02520982360839844, 0.02509644889831543, 0.025144287109375, 0.025078559875488283, 0.02510438346862793, 0.02527449607849121, 0.025165695190429688, 0.025076736450195314, 0.02514227294921875, 0.025202495574951172, 0.02521721649169922, 0.02511257553100586, 0.025099775314331055, 0.025094655990600585, 0.025117759704589845, 0.02702889633178711, 0.025748064041137695, 0.025206783294677734, 0.025085344314575195, 0.02486672019958496, 0.02480735969543457, 0.024803712844848634, 0.024742271423339842, 0.024750175476074218, 0.02485443115234375, 0.02492416000366211, 0.02487641525268555, 0.024793472290039063, 0.02481177520751953, 0.024901151657104492, 0.02487481689453125, 0.024844959259033204, 0.02482294464111328, 0.024829984664916992, 0.024807903289794923, 0.024852832794189452, 0.024876352310180663, 0.024914623260498047, 0.02487868881225586, 0.02492025566101074, 0.024925535202026367, 0.024885887145996093, 0.025233280181884764, 0.024876447677612306, 0.024873952865600586, 0.024923648834228516, 0.024922624588012695, 0.024858015060424805, 0.02487980842590332, 0.024909727096557616, 0.02487500762939453, 0.024904960632324218, 0.025008895874023437, 0.025208831787109375, 0.02515702438354492, 0.02518000030517578, 0.025135040283203125, 0.025078559875488283, 0.025014303207397462, 0.025233407974243165, 0.025014272689819338, 0.025030656814575194, 0.024970687866210938, 0.025072191238403322, 0.025051136016845704, 0.02497884750366211, 0.024959583282470704, 0.02501193618774414, 0.02503273582458496, 0.02508006477355957, 0.02504287910461426, 0.02508585548400879, 0.02506319999694824, 0.025091680526733398, 0.025047967910766602, 0.025085792541503907, 0.02510032081604004, 0.02507939147949219, 0.026817983627319335, 0.025633312225341796, 0.025157663345336916, 0.025030656814575194, 0.024874015808105467, 0.0247490234375, 0.024786367416381835, 0.024819616317749024, 0.024828575134277345, 0.024749664306640624, 0.02475254440307617, 0.02472265625, 0.024805536270141603, 0.024782495498657228, 0.024849376678466796, 0.024752256393432617, 0.024803199768066407, 0.02477670478820801, 0.024797183990478516, 0.02480678367614746, 0.024849023818969727, 0.024897504806518554, 0.024810911178588867, 0.02500262451171875, 0.025083904266357423, 0.024908832550048828, 0.02492313575744629, 0.025295072555541993, 0.024911615371704103, 0.02492736053466797, 0.024931200027465822, 0.024885248184204102, 0.024977407455444335, 0.024970560073852538, 0.024937152862548828, 0.024962528228759766, 0.02497385597229004, 0.025090272903442384, 0.025054784774780275, 0.025063648223876953, 0.025122943878173827, 0.02509779167175293, 0.02503868865966797, 0.02501475143432617, 0.025192447662353516, 0.025184255599975586, 0.02507366371154785, 0.025042943954467774, 0.025050239562988283, 0.025043584823608397, 0.025061632156372072, 0.02501171112060547, 0.024988384246826173, 0.025006048202514647, 0.025040159225463866, 0.025034496307373047, 0.025043743133544922, 0.025182207107543944, 0.025091520309448244, 0.025104543685913087, 0.025127328872680665, 0.02509414482116699, 0.025134464263916016, 0.02712031936645508, 0.0257640323638916, 0.02525292778015137, 0.02502751922607422, 0.02495644760131836, 0.024839744567871094, 0.024800128936767578, 0.024805248260498045, 0.02480342483520508, 0.024747072219848634, 0.024815616607666017, 0.02475632095336914, 0.024797311782836916, 0.024789600372314452, 0.02477280044555664, 0.024778751373291014, 0.024823392868041992, 0.024838560104370116, 0.024812576293945312, 0.02482275199890137, 0.02493440055847168, 0.02491548728942871, 0.024934879302978517, 0.024905920028686523, 0.024850240707397463, 0.024929344177246095, 0.025004383087158202, 0.024928672790527344, 0.024958431243896486, 0.024922752380371095, 0.024999296188354492, 0.024918880462646485, 0.024941535949707033, 0.025020736694335938, 0.024952447891235352, 0.02516476821899414, 0.02505523109436035, 0.02512009620666504, 0.025387680053710938, 0.025234560012817382, 0.02519753646850586, 0.025200544357299806, 0.025130559921264648, 0.02511199951171875, 0.02511510467529297, 0.025061311721801757, 0.02505529594421387, 0.02514384078979492, 0.025151584625244142, 0.025115840911865233, 0.025147775650024413, 0.025116384506225584, 0.025125503540039062, 0.02510793685913086, 0.02515817642211914, 0.025110240936279296, 0.025116960525512697, 0.025169919967651368, 0.02529280090332031, 0.025273920059204102, 0.02519718360900879, 0.02522438430786133, 0.0252010555267334, 0.0268469123840332, 0.02579484748840332, 0.025317375183105468, 0.02509823989868164, 0.02491187286376953, 0.02488319969177246, 0.024913087844848632, 0.024941280364990236, 0.024776159286499025, 0.024808063507080078, 0.024811519622802734, 0.02488319969177246, 0.024936447143554686, 0.024902719497680664, 0.025015424728393555, 0.024843551635742187, 0.02496361541748047, 0.024809471130371095, 0.024829504013061523, 0.024907360076904295, 0.024894304275512695, 0.02488934326171875, 0.024859935760498046, 0.0248306884765625, 0.024879104614257814, 0.024965087890625, 0.024952863693237303, 0.024993408203125, 0.02500783920288086, 0.024905696868896484, 0.02488595199584961, 0.02487299156188965, 0.024917728424072267, 0.024840448379516603, 0.024893440246582032, 0.02490127944946289, 0.025043296813964843, 0.025052543640136718, 0.025073535919189455, 0.025149311065673828, 0.025172191619873045, 0.025146080017089845, 0.02505107116699219, 0.025020416259765626, 0.025038976669311524, 0.025083711624145508, 0.025012287139892578, 0.025028608322143556, 0.025152767181396484, 0.02504764747619629, 0.025007295608520507, 0.024998880386352538, 0.02514486312866211, 0.025086431503295897, 0.025062688827514647, 0.02502934455871582, 0.025165824890136718, 0.02518016052246094, 0.025141248703002928, 0.025126911163330077, 0.025188480377197266, 0.02517571258544922, 0.025095775604248048, 0.02686566352844238, 0.025819040298461913, 0.02537651252746582, 0.02510793685913086, 0.0250002555847168, 0.02485305595397949, 0.024793088912963866, 0.02485862350463867, 0.024801279067993166, 0.02486390495300293, 0.02481443214416504, 0.02490585517883301, 0.024842079162597657, 0.02483407974243164, 0.024899391174316405, 0.024865184783935547, 0.024939615249633788, 0.02483475112915039, 0.024827104568481445, 0.024873023986816407, 0.02498633575439453, 0.02495052719116211, 0.024877023696899415, 0.025028127670288086, 0.02495916748046875, 0.02488697624206543, 0.024873247146606447, 0.02490230369567871, 0.02494998359680176, 0.02499247932434082, 0.02487276840209961, 0.024899168014526366, 0.024981376647949218, 0.024945375442504882, 0.02490713691711426, 0.024918527603149415, 0.02509020805358887, 0.02517398452758789, 0.025065568923950194, 0.02507561683654785, 0.025231359481811523, 0.02527027130126953, 0.025167680740356444, 0.025213247299194337, 0.02511840057373047, 0.025165023803710936, 0.025131744384765627, 0.0250883846282959, 0.02518412780761719, 0.025178112030029298, 0.02506547164916992, 0.02516377639770508, 0.02505120086669922, 0.02522105598449707, 0.02521446418762207, 0.025340320587158204, 0.025060991287231445, 0.02510220718383789, 0.025112512588500977, 0.02503094482421875, 0.025100671768188476, 0.025300991058349608, 0.025157535552978515, 0.0270328311920166, 0.025862592697143555, 0.025252351760864256, 0.02495052719116211, 0.024934463500976563, 0.024829952239990235, 0.024739839553833007, 0.02480476760864258, 0.02480803108215332, 0.024854528427124024, 0.024771743774414063, 0.024744800567626953, 0.024961023330688475, 0.024885248184204102, 0.02484223937988281, 0.024958976745605467, 0.0248668155670166, 0.02491801643371582, 0.024930335998535155, 0.024979488372802734, 0.024934303283691405, 0.02504297637939453, 0.02503696060180664, 0.025007936477661134, 0.024967199325561525, 0.02493257522583008, 0.02505625534057617, 0.024963264465332032, 0.024869504928588866, 0.02501411247253418, 0.02507379150390625, 0.024882944107055664, 0.024879423141479493, 0.024937599182128907, 0.02498454475402832, 0.025028448104858398, 0.02494063949584961, 0.025156543731689452, 0.025242687225341797, 0.025261568069458007, 0.02514780807495117, 0.02509427261352539, 0.025094112396240233, 0.025208703994750975, 0.0250446720123291, 0.025026912689208983, 0.025200639724731445, 0.025036672592163085, 0.025161855697631835, 0.025093151092529298, 0.025067615509033202, 0.0251278076171875, 0.025049152374267577, 0.025126848220825195, 0.02528188705444336, 0.02513372802734375, 0.025169984817504883, 0.025071552276611328, 0.025105440139770507, 0.025183231353759765, 0.025234495162963867, 0.025068447113037108, 0.025281696319580077, 0.026836832046508788, 0.0257039680480957, 0.02536716842651367, 0.025024511337280272, 0.024860671997070313, 0.024833856582641603, 0.024774848937988283, 0.024790111541748046, 0.024804256439208985, 0.024911808013916015, 0.024932416915893554, 0.024887296676635744, 0.024829023361206053, 0.024826784133911133, 0.02503215980529785, 0.0247956485748291, 0.024944671630859373, 0.024895328521728516, 0.024957088470458983, 0.02488297653198242, 0.02494691276550293, 0.02492416000366211, 0.02489900779724121, 0.0249204158782959, 0.02488956832885742, 0.02488051223754883, 0.024998111724853514, 0.025029024124145507, 0.024946687698364257, 0.024944639205932616, 0.025021728515625, 0.02501702308654785, 0.025868320465087892, 0.02492019271850586, 0.024973119735717773, 0.024972511291503907, 0.02519945526123047, 0.025182207107543944, 0.025158655166625975, 0.025207199096679688, 0.025188255310058593, 0.025162431716918947, 0.025136383056640624, 0.025205120086669922, 0.025160064697265627, 0.025098175048828126, 0.02499385643005371, 0.02506051254272461, 0.02512982368469238, 0.0251180477142334, 0.024992416381835938, 0.02512281608581543, 0.02517196846008301, 0.025149023056030274, 0.025100704193115234, 0.025176063537597656, 0.02512076759338379, 0.02507366371154785, 0.025032703399658202, 0.025108160018920897, 0.025114944458007812, 0.025073503494262694, 0.025153600692749023, 0.027019296646118164, 0.02572870445251465, 0.025247936248779298, 0.025061471939086914, 0.02489276885986328, 0.02484614372253418, 0.024853151321411134, 0.024966367721557615, 0.02490675163269043, 0.024895456314086913, 0.024917919158935545, 0.024836191177368162, 0.024838239669799804, 0.024835359573364257, 0.025915456771850587, 0.025456735610961914, 0.024867551803588867, 0.024841983795166014, 0.02495392036437988, 0.024906688690185547, 0.02489571189880371, 0.024948511123657226, 0.02490297508239746, 0.024912351608276366, 0.024842111587524415, 0.02489993667602539, 0.02489321517944336, 0.02504863929748535, 0.025012895584106444, 0.025068767547607423, 0.024933151245117188, 0.024999168395996092, 0.0250534725189209, 0.024977888107299805, 0.02503993606567383, 0.025250751495361327, 0.025190528869628907, 0.02516713523864746, 0.025164384841918946, 0.025165632247924806, 0.02526131248474121, 0.02525689506530762, 0.025126911163330077, 0.025049087524414062, 0.025047040939331053, 0.025053024291992188, 0.02497884750366211, 0.025036991119384764, 0.025059904098510742, 0.02508799934387207, 0.02505523109436035, 0.025026496887207032, 0.025190048217773438, 0.02516828727722168, 0.025124864578247072, 0.02507542419433594, 0.025133056640625, 0.02515519905090332, 0.02509244728088379, 0.025014591217041016, 0.02510963249206543, 0.02510867118835449, 0.025156288146972655]",tokens/s,39.91534136810627,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1006.432256,13915.58656,0.0,13520.338944,13508.0832,s,1,7.23065673828125,7.23065673828125,0.0,7.23065673828125,7.23065673828125,7.23065673828125,7.23065673828125,[7.23065673828125],,kWh,8.440995179156138e-06,9.145939481327352e-07,4.60222590401016e-06,1.3957815031299033e-05,,MB,1360.71168,13930.266624,0.0,13524.533248,11787.729408,s,10,4.1068275451660154,0.4106827545166015,0.005539901845629834,0.412336669921875,0.4148724090576172,0.41568087921142577,0.41632765533447263,"[0.39646453857421876, 0.41363455200195315, 0.4117584228515625, 0.4146927490234375, 0.41122003173828126, 0.41462997436523436, 0.40607705688476564, 0.40894595336914064, 0.41648934936523435, 0.4129149169921875]",tokens/s,623.3522035794455,kWh,1.1856260370501634e-05,1.3075373971009456e-06,7.816406253120523e-06,2.0980204020723102e-05,tokens/kWh,12201978.576906934,MB,1378.271232,13932.363776,0.0,13526.6304,11790.353408,s,10,34.81288720703125,3.4812887207031253,0.0023177707460265234,3.48176611328125,3.4838597412109373,3.483888854980469,3.483912145996094,"[3.479330322265625, 3.47658447265625, 3.478411865234375, 3.4817265625, 3.4818056640625, 3.48391796875, 3.48139599609375, 3.482677978515625, 3.48318310546875, 3.483853271484375]",tokens/s,18.096746651704233,kWh,0.00010191496828449954,1.124156451406639e-05,6.774890419907919e-05,0.0001809054369976451,tokens/kWh,348248.23977413174,,s,630,34.810988906860345,0.05525553794739738,0.0006136812357323722,0.05514700698852539,0.05558548202514649,0.05579898166656494,0.05940476905822754,"[0.05961724853515625, 0.05585667037963867, 0.05495641708374024, 0.054973918914794924, 0.05516556930541992, 0.055015487670898436, 0.05493465423583985, 0.055062496185302734, 0.055241569519042966, 0.054937599182128906, 0.05496035385131836, 0.05514009475708008, 0.05496604919433594, 0.054927616119384765, 0.055135295867919924, 0.05503388977050781, 0.05488323211669922, 0.05559500885009765, 0.05594112014770508, 0.05535081481933594, 0.05513983917236328, 0.05546223831176758, 0.055322528839111325, 0.055203903198242185, 0.05487887954711914, 0.05517644882202148, 0.055026432037353516, 0.054916255950927736, 0.05512483215332031, 0.05528707122802735, 0.0549257926940918, 0.054870529174804686, 0.055056129455566406, 0.054988800048828126, 0.055008575439453124, 0.05533929443359375, 0.05595782470703125, 0.05527305603027344, 0.05509571075439453, 0.0552916145324707, 0.05538444900512695, 0.054953983306884766, 0.055347198486328124, 0.05514035034179687, 0.05493929672241211, 0.05486012649536133, 0.055177215576171876, 0.05527772903442383, 0.05491439819335937, 0.05482342529296875, 0.05513216018676758, 0.05497241592407227, 0.054921215057373046, 0.05536972808837891, 0.055447200775146484, 0.05504441452026367, 0.054986785888671875, 0.0553779182434082, 0.05544550323486328, 0.05499103927612305, 0.055147518157958986, 0.05540742492675781, 0.05504771041870117, 0.05940633773803711, 0.05566847991943359, 0.05490070343017578, 0.05526556777954102, 0.05530812835693359, 0.054927520751953125, 0.05491059112548828, 0.05497673416137695, 0.05486812973022461, 0.05479811096191406, 0.055089439392089844, 0.05529388809204101, 0.05492531204223633, 0.05482223892211914, 0.05509120178222656, 0.055005504608154294, 0.05515420913696289, 0.05586175918579102, 0.055904575347900394, 0.055320384979248044, 0.05495827102661133, 0.055174720764160155, 0.05532921600341797, 0.05489459228515625, 0.05484112167358399, 0.055129470825195315, 0.054958847045898436, 0.05486774444580078, 0.055032127380371096, 0.05523455810546875, 0.05494716644287109, 0.05482902526855469, 0.05505209732055664, 0.054889152526855466, 0.05501279830932617, 0.055610111236572266, 0.05567859268188476, 0.05517366409301758, 0.05501887893676758, 0.05526124954223633, 0.055063232421875, 0.054905696868896486, 0.05502860641479492, 0.055279071807861326, 0.05497296142578125, 0.05489459228515625, 0.055198848724365236, 0.05523875045776367, 0.054741695404052736, 0.05498275375366211, 0.05521612930297851, 0.054943294525146485, 0.0550629768371582, 0.055465984344482425, 0.05561139297485351, 0.05503939056396484, 0.05493356704711914, 0.05521593475341797, 0.05487225723266602, 0.054779552459716795, 0.05531036758422852, 0.055315296173095704, 0.05494371032714844, 0.05936095809936524, 0.055421886444091795, 0.05492531204223633, 0.05503180694580078, 0.05505023956298828, 0.054928577423095704, 0.05520182418823242, 0.05532956695556641, 0.05497241592407227, 0.05482086563110351, 0.05518947219848633, 0.05514448165893555, 0.054880256652832034, 0.05504771041870117, 0.055366111755371095, 0.05501529693603516, 0.05499871826171875, 0.055837120056152344, 0.05561958312988281, 0.05523865509033203, 0.05508937454223633, 0.055291072845458984, 0.05507132720947266, 0.05492940902709961, 0.05508297729492188, 0.05528579330444336, 0.05537363052368164, 0.05491852951049805, 0.05507564926147461, 0.05480243301391602, 0.05491878509521484, 0.05509772872924805, 0.055201793670654295, 0.05508095932006836, 0.055136127471923826, 0.055441150665283205, 0.055521663665771485, 0.055103488922119144, 0.055054302215576174, 0.05532825469970703, 0.05511017608642578, 0.054916702270507815, 0.0551833267211914, 0.055353279113769534, 0.054927871704101565, 0.054890464782714844, 0.05505961608886719, 0.05485862350463867, 0.05494784164428711, 0.05499699020385742, 0.05527273559570312, 0.055093982696533206, 0.055144447326660156, 0.05544755172729492, 0.055649887084960936, 0.055190975189208985, 0.055008224487304684, 0.05524009704589844, 0.055091102600097655, 0.05490963363647461, 0.055163902282714845, 0.05534822463989258, 0.0551847038269043, 0.059714046478271485, 0.055820064544677736, 0.05488899230957031, 0.05475539016723633, 0.05518297576904297, 0.05488063812255859, 0.05479219055175781, 0.05497151947021484, 0.055214977264404295, 0.05499526214599609, 0.05489753723144531, 0.055021793365478515, 0.055495262145996094, 0.054941631317138674, 0.054982017517089844, 0.05513827133178711, 0.0551759033203125, 0.05552313613891602, 0.05615840148925781, 0.05567027282714844, 0.05519724655151367, 0.05495852661132813, 0.05512857437133789, 0.055277568817138675, 0.05491616058349609, 0.05479849624633789, 0.05514934539794922, 0.055375873565673826, 0.05494169616699219, 0.05501337432861328, 0.055277183532714845, 0.055064414978027346, 0.054996959686279295, 0.055196224212646486, 0.05521408081054688, 0.0551014404296875, 0.05533612823486328, 0.05569823837280274, 0.05528956985473633, 0.05498294448852539, 0.055332862854003906, 0.05545369720458984, 0.05507823944091797, 0.055253662109375, 0.05513011169433594, 0.05590220642089844, 0.05496201705932617, 0.054876319885253905, 0.055191551208496094, 0.0550010871887207, 0.05515673446655273, 0.055228416442871096, 0.05534668731689453, 0.055062110900878904, 0.055186336517333984, 0.055562015533447265, 0.0552655029296875, 0.055215198516845705, 0.055352222442626955, 0.055473377227783206, 0.05512623977661133, 0.05502828979492187, 0.055201217651367186, 0.06010291290283203, 0.05593088150024414, 0.05529910278320312, 0.055057056427001955, 0.05504460906982422, 0.05524448013305664, 0.055281822204589846, 0.055051265716552736, 0.05487651062011719, 0.05511231994628906, 0.055104705810546876, 0.054945793151855465, 0.05492816162109375, 0.05553974533081055, 0.05508224105834961, 0.05495011138916016, 0.055306785583496096, 0.056299232482910154, 0.05577763366699219, 0.055227775573730466, 0.05527199935913086, 0.05534515380859375, 0.05492150497436524, 0.05487353515625, 0.055210208892822264, 0.05485564804077148, 0.05482432174682617, 0.05512879943847656, 0.05527961730957031, 0.05489664077758789, 0.05491507339477539, 0.055136257171630856, 0.05495107269287109, 0.054843456268310546, 0.05535391998291016, 0.05587071990966797, 0.055389152526855466, 0.05507891082763672, 0.055384159088134766, 0.0555068473815918, 0.05519683074951172, 0.05499580764770508, 0.05527347183227539, 0.05492531204223633, 0.05490073776245117, 0.0551354866027832, 0.055386528015136716, 0.054997344970703126, 0.05479119873046875, 0.05513264083862305, 0.05532460784912109, 0.05499552154541015, 0.055003135681152344, 0.05550080108642578, 0.055264961242675784, 0.05504832077026367, 0.0552163200378418, 0.05546332931518555, 0.05515529632568359, 0.05515468978881836, 0.05513779067993164, 0.05533542251586914, 0.055091007232666016, 0.05929948806762695, 0.055597408294677735, 0.05503334426879883, 0.05501337432861328, 0.05524531173706055, 0.05498470306396484, 0.05496627044677734, 0.056172542572021485, 0.05524070358276367, 0.05496422576904297, 0.05497987365722656, 0.05507670211791992, 0.05491996765136719, 0.05488649749755859, 0.05508444976806641, 0.05526179122924805, 0.055316478729248046, 0.05571379089355469, 0.05575040054321289, 0.05581644821166992, 0.05528985595703125, 0.05513382339477539, 0.055296062469482425, 0.05501996612548828, 0.05490441513061523, 0.05490284729003906, 0.05533510589599609, 0.05503286361694336, 0.054809120178222655, 0.055144927978515626, 0.05556633758544922, 0.055099552154541015, 0.055344993591308594, 0.055223838806152344, 0.05536310577392578, 0.05515363311767578, 0.055504096984863284, 0.05568723297119141, 0.055143009185791014, 0.055037086486816406, 0.05524371337890625, 0.0554598388671875, 0.05535948944091797, 0.055003135681152344, 0.05521408081054688, 0.05516611099243164, 0.05498108673095703, 0.05504025650024414, 0.05517923355102539, 0.0549582405090332, 0.05498400115966797, 0.05532307052612305, 0.05569337463378906, 0.05532281494140625, 0.05517907333374023, 0.05549484634399414, 0.055929985046386715, 0.05517401504516602, 0.055231937408447264, 0.05521648025512695, 0.05544777679443359, 0.05491507339477539, 0.054904129028320314, 0.059627521514892576, 0.0556767692565918, 0.05497052764892578, 0.05488019180297852, 0.05516060638427735, 0.055197982788085936, 0.05492486572265625, 0.05493948745727539, 0.05522419357299805, 0.054878559112548825, 0.05492095947265625, 0.055066558837890626, 0.05529167938232422, 0.055015583038330075, 0.05490969467163086, 0.05519779205322266, 0.05501327896118164, 0.05542399978637695, 0.055737281799316404, 0.05570284652709961, 0.055199649810791014, 0.05498316955566406, 0.05515708923339844, 0.05543731307983398, 0.05504409790039062, 0.05501542282104492, 0.05514009475708008, 0.05501545715332031, 0.054790367126464845, 0.05498470306396484, 0.055400447845458986, 0.055060256958007814, 0.05492079925537109, 0.055320735931396484, 0.0553026237487793, 0.05527347183227539, 0.05528575897216797, 0.05548441696166992, 0.05544947052001953, 0.055109760284423825, 0.05506588745117187, 0.05534908676147461, 0.05511667251586914, 0.054860897064208984, 0.05529692840576172, 0.05535868835449219, 0.05511862564086914, 0.054986560821533206, 0.055228641510009766, 0.055092929840087894, 0.05489078521728516, 0.05534112167358399, 0.05542019271850586, 0.055295841217041015, 0.055279937744140625, 0.05532291030883789, 0.05561161422729492, 0.05532262420654297, 0.05517014312744141, 0.055371936798095704, 0.055405311584472657, 0.05507049560546875, 0.05506851196289062, 0.06019071960449219, 0.05566320037841797, 0.055144447326660156, 0.05501683044433594, 0.054844032287597655, 0.0551464958190918, 0.05524070358276367, 0.05492531204223633, 0.054988800048828126, 0.05521337509155273, 0.05494441604614258, 0.05493571090698242, 0.054994815826416014, 0.05523427200317383, 0.054968608856201175, 0.054945793151855465, 0.05530112075805664, 0.0558766098022461, 0.05554995346069336, 0.05528937530517578, 0.055292385101318356, 0.05523155212402344, 0.054999969482421876, 0.05509040069580078, 0.05530502319335937, 0.05501353454589844, 0.05490796661376953, 0.055114177703857424, 0.055296417236328124, 0.05494777679443359, 0.05510758590698242, 0.05517926406860352, 0.05503369522094727, 0.05502787017822266, 0.055713024139404294, 0.0558969612121582, 0.05558464050292969, 0.05523984146118164, 0.055331680297851564, 0.0554598388671875, 0.05509120178222656, 0.05505843353271484, 0.05521337509155273, 0.05512406539916992, 0.05495366287231445, 0.0549587516784668, 0.05530035018920899, 0.05519769668579102, 0.05500831985473633, 0.05513478469848633, 0.05532505416870117, 0.05522227096557617, 0.05528575897216797, 0.055578624725341794, 0.055547904968261716, 0.055103103637695314, 0.0551038703918457, 0.055382015228271485, 0.055121150970458985, 0.055027713775634764, 0.05519356918334961, 0.05528656005859375, 0.05510380935668945, 0.05940092849731445, 0.05587148666381836, 0.055621631622314455, 0.054937599182128906, 0.055136257171630856, 0.055300094604492187, 0.05487615966796875, 0.05493091201782226, 0.05524124908447266, 0.05494972610473633, 0.05499100875854492, 0.05506867218017578, 0.05522431945800781, 0.05488412857055664, 0.05482723236083984, 0.05511756896972656, 0.05513804626464844, 0.05545830535888672, 0.05572608184814453, 0.05577276611328125, 0.055468448638916014, 0.05595897674560547, 0.05523091125488281, 0.05520352172851563, 0.05499667358398438, 0.055024158477783205, 0.055150657653808596, 0.0553535041809082, 0.054937599182128906, 0.05505228805541992, 0.05528339385986328, 0.054947807312011716, 0.05489904022216797, 0.05516060638427735, 0.055744735717773435, 0.05527961730957031, 0.05526528167724609, 0.05571526336669922, 0.05554547119140625, 0.05520835113525391, 0.05502620697021485, 0.05521612930297851, 0.05505228805541992, 0.05486796951293945, 0.0550167350769043, 0.055306976318359374, 0.055048030853271486, 0.05509667205810547, 0.055118656158447264, 0.05538137435913086, 0.05515737533569336, 0.05496422576904297, 0.05532876968383789, 0.05533491134643555, 0.05522784042358399, 0.055298465728759766, 0.055691326141357425, 0.05511996841430664, 0.0550645751953125, 0.055217601776123046, 0.05544976043701172, 0.05510700988769531, 0.05499593734741211, 0.05967871856689453, 0.05602099227905273, 0.055136257171630856, 0.05525299072265625, 0.05501747131347656, 0.05518691253662109, 0.055155136108398437, 0.05559305572509766, 0.05493718338012695, 0.05482947158813477, 0.05523660659790039, 0.055005054473876956, 0.054780033111572264, 0.05514854431152344, 0.055273502349853516, 0.05506083297729492, 0.055283233642578124, 0.05596579360961914, 0.05585715103149414, 0.05543894577026367, 0.055049697875976564, 0.055398399353027344, 0.05519190216064453, 0.054932254791259766, 0.05519955062866211, 0.05531238555908203, 0.05496745681762695, 0.05486678314208984, 0.055166656494140626, 0.055295806884765625, 0.05495663833618164, 0.054929313659667967, 0.055144447326660156, 0.05511782455444336, 0.05533472061157227, 0.05566278457641602, 0.05574166488647461, 0.05528860855102539, 0.05508915328979492, 0.05545574569702148, 0.05543526458740235, 0.05504409790039062, 0.055241790771484375, 0.05528857421875, 0.05504403305053711, 0.05483340835571289, 0.054924510955810545, 0.05527836990356445, 0.054973823547363285, 0.0548741455078125, 0.05515311813354492, 0.05556032180786133, 0.05528115081787109, 0.05519427108764648, 0.0553752326965332, 0.05564873504638672, 0.05530035018920899, 0.05531187057495117, 0.05525324630737305, 0.0551649284362793, 0.05506233596801758, 0.055091392517089846, 0.05539430236816406]",tokens/s,18.097733496902848,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 1020, in __init__ self.transformer = GPTJModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 712, in __init__ self.h = nn.ModuleList([GPTJBlock(config, layer_idx=i) for i in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 712, in self.h = nn.ModuleList([GPTJBlock(config, layer_idx=i) for i in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 492, in __init__ self.mlp = GPTJMLP(inner_dim, config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gptj/modeling_gptj.py"", line 472, in __init__ self.fc_in = nn.Linear(embed_dim, intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 94.12 MiB is free. Process 28160 has 14.65 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 2.49 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,902.3488,6993.870848,0.0,6591.348736,6590.657536,s,1,7.7316650390625,7.7316650390625,0.0,7.7316650390625,7.7316650390625,7.7316650390625,7.7316650390625,[7.7316650390625],,kWh,6.249157045840547e-06,6.81981213064745e-07,2.031390514009579e-06,8.962528772914872e-06,,MB,1234.321408,7258.112,0.0,6845.104128,6805.125632,s,10,2.2387210540771485,0.22387210540771485,0.006860980982676506,0.22428342437744142,0.230287255859375,0.23189871673583984,0.23318788543701172,"[0.205762939453125, 0.22159222412109375, 0.2255982666015625, 0.22400172424316406, 0.22380921936035156, 0.22599891662597657, 0.22992915344238282, 0.22456512451171876, 0.22395330810546876, 0.2335101776123047]",tokens/s,1143.510039063482,kWh,6.283555607446845e-06,6.924726440920054e-07,4.172225560000033e-06,1.1148253811538883e-05,tokens/kWh,22963237.501377113,MB,1246.425088,7260.209152,0.0,6847.20128,6805.128192,s,10,17.170465332031252,1.7170465332031248,0.0037074144946325858,1.71831298828125,1.7202278564453124,1.7209889892578125,1.7215978955078124,"[1.710521240234375, 1.7104453125, 1.717341552734375, 1.7161055908203124, 1.7200587158203124, 1.7158358154296875, 1.719595703125, 1.719284423828125, 1.7217501220703124, 1.71952685546875]",tokens/s,36.69091010741243,kWh,5.047899915463414e-05,5.568098191081764e-06,3.353133238059929e-05,8.95784297263152e-05,tokens/kWh,703294.3108344382,,s,630,17.16719076919555,0.027249509157453264,0.0003084332683327531,0.027201295852661133,0.027428953170776368,0.02754782371520996,0.029114681129455568,"[0.02920857620239258, 0.027962623596191408, 0.027306751251220705, 0.02713363265991211, 0.02696633529663086, 0.0268569278717041, 0.026939264297485353, 0.0270032958984375, 0.027158784866333007, 0.02697216033935547, 0.02710323143005371, 0.026866912841796875, 0.027019039154052734, 0.02698137664794922, 0.026959871292114256, 0.027086143493652345, 0.027078880310058593, 0.02702998352050781, 0.02715190315246582, 0.0271549129486084, 0.02708684730529785, 0.027023359298706053, 0.026994688034057617, 0.02690457534790039, 0.026973791122436523, 0.026966367721557617, 0.02702547264099121, 0.027062271118164064, 0.027107328414916993, 0.027037696838378908, 0.027104864120483397, 0.02713360023498535, 0.027085567474365236, 0.027183103561401366, 0.02717695999145508, 0.02717695999145508, 0.02726924705505371, 0.02724995231628418, 0.027183712005615233, 0.027262975692749023, 0.027143999099731444, 0.02703968048095703, 0.02712396812438965, 0.027084800720214845, 0.027229471206665037, 0.027157215118408202, 0.02711347198486328, 0.027127487182617187, 0.027089120864868164, 0.0271092472076416, 0.027039968490600585, 0.027068416595458986, 0.027164831161499023, 0.027126655578613282, 0.027124704360961913, 0.027152256011962892, 0.0271648006439209, 0.027328096389770507, 0.027172672271728517, 0.027238079071044922, 0.027163551330566405, 0.027172319412231444, 0.02713654327392578, 0.029059200286865233, 0.027767072677612303, 0.027430912017822266, 0.027130943298339844, 0.02695382308959961, 0.026888288497924805, 0.026823423385620118, 0.02681999969482422, 0.026834623336791992, 0.026835872650146485, 0.0268287353515625, 0.026856544494628907, 0.02683798408508301, 0.02689571189880371, 0.026927743911743164, 0.026850784301757812, 0.02692153549194336, 0.02693049621582031, 0.02693129539489746, 0.026992639541625976, 0.02686934471130371, 0.02687283134460449, 0.02697420883178711, 0.026998144149780273, 0.02692531204223633, 0.027044063568115236, 0.027005376815795897, 0.026913503646850585, 0.026970687866210936, 0.027146047592163085, 0.027150976181030274, 0.027090368270874025, 0.027050559997558593, 0.02710688018798828, 0.027177215576171875, 0.027262720108032226, 0.027237920761108397, 0.02731820869445801, 0.027265695571899413, 0.02716703987121582, 0.027224063873291016, 0.027195072174072264, 0.027248960494995117, 0.027235519409179686, 0.027212608337402345, 0.027276863098144533, 0.027064159393310548, 0.02706697654724121, 0.027244543075561522, 0.02732758331298828, 0.027256864547729492, 0.027294303894042967, 0.027201055526733398, 0.02720844841003418, 0.02732646369934082, 0.0273768310546875, 0.027267904281616212, 0.027340799331665038, 0.02733875274658203, 0.02734489631652832, 0.027337728500366212, 0.027339775085449217, 0.027310144424438475, 0.02922528076171875, 0.028089088439941408, 0.027412416458129883, 0.027137535095214844, 0.026980863571166993, 0.0270231990814209, 0.027017375946044923, 0.026922752380371093, 0.026970495223999025, 0.026967487335205077, 0.02702761650085449, 0.026882335662841796, 0.0269434871673584, 0.02712348747253418, 0.02698467254638672, 0.027031551361083983, 0.02706790351867676, 0.02699235153198242, 0.027068832397460937, 0.02701468849182129, 0.027100000381469726, 0.027064319610595702, 0.02701923179626465, 0.02710905647277832, 0.027117952346801758, 0.02715385627746582, 0.027113792419433593, 0.027152896881103516, 0.027629280090332033, 0.027254783630371093, 0.02714419174194336, 0.0271297607421875, 0.027088991165161135, 0.027185152053833008, 0.027309120178222655, 0.027360191345214845, 0.027394048690795897, 0.02736332893371582, 0.02733670425415039, 0.027389951705932617, 0.02728550338745117, 0.027299840927124022, 0.027241535186767578, 0.02724959945678711, 0.02727071952819824, 0.027408832550048827, 0.02761897659301758, 0.02734444808959961, 0.027466527938842772, 0.02727084732055664, 0.027217376708984376, 0.02726323127746582, 0.027226720809936523, 0.02728976058959961, 0.02738582420349121, 0.02736729621887207, 0.027267072677612306, 0.027381759643554687, 0.02735308837890625, 0.027297792434692384, 0.02759065628051758, 0.027289600372314454, 0.02734489631652832, 0.029165567398071288, 0.027844608306884764, 0.027445247650146484, 0.02715238380432129, 0.02704979133605957, 0.027089439392089843, 0.027046720504760743, 0.026866527557373048, 0.02694963264465332, 0.027118783950805664, 0.027009408950805665, 0.027068864822387694, 0.027024831771850586, 0.02709766387939453, 0.02703900718688965, 0.026954463958740234, 0.027068288803100585, 0.027117055892944338, 0.02703580856323242, 0.027044319152832032, 0.0269816951751709, 0.02692915153503418, 0.027212480545043945, 0.027043840408325196, 0.027088895797729492, 0.02714182472229004, 0.027171104431152344, 0.02714147186279297, 0.02710780715942383, 0.02711356735229492, 0.02712588882446289, 0.027033599853515625, 0.02709708786010742, 0.027112543106079103, 0.02733353614807129, 0.02736073684692383, 0.027298240661621093, 0.02723961639404297, 0.027226207733154296, 0.027177791595458984, 0.02716057586669922, 0.0271824951171875, 0.027118335723876952, 0.027200767517089844, 0.027300128936767577, 0.027287263870239258, 0.028184736251831054, 0.027277759552001953, 0.02735103988647461, 0.027225183486938476, 0.027244543075561522, 0.027198368072509766, 0.027189504623413085, 0.027268255233764648, 0.027226720809936523, 0.027285375595092774, 0.027407615661621094, 0.027429759979248045, 0.027482112884521483, 0.02749833679199219, 0.0274303035736084, 0.027312671661376953, 0.027383712768554686, 0.029056671142578126, 0.02809231948852539, 0.0275479679107666, 0.02731430435180664, 0.027148191452026366, 0.027002975463867186, 0.027006912231445312, 0.026961376190185547, 0.027017759323120116, 0.026984512329101564, 0.02714419174194336, 0.027166015625, 0.027130048751831056, 0.027044095993041993, 0.02710758399963379, 0.027058176040649414, 0.027076608657836915, 0.02712291145324707, 0.027484960556030273, 0.02711142349243164, 0.027092992782592775, 0.02723347282409668, 0.027165311813354492, 0.02717305564880371, 0.027094688415527344, 0.027172927856445313, 0.02742448043823242, 0.027142240524291993, 0.027117151260375977, 0.027099039077758787, 0.027160671234130858, 0.027085599899291993, 0.027129247665405275, 0.02733647918701172, 0.0273437442779541, 0.027329759597778322, 0.02749932861328125, 0.02735923194885254, 0.027420543670654298, 0.027272352218627928, 0.02740678405761719, 0.027724096298217774, 0.027195615768432616, 0.027242496490478517, 0.027271360397338868, 0.027303743362426757, 0.027267072677612306, 0.027432159423828126, 0.027980512619018554, 0.027295167922973634, 0.027251039505004883, 0.02727497673034668, 0.02727174377441406, 0.02728691291809082, 0.027336864471435546, 0.027231775283813476, 0.027376415252685547, 0.02741196823120117, 0.027474111557006835, 0.027339231491088866, 0.02738096046447754, 0.027368223190307617, 0.027387903213500975, 0.029067264556884766, 0.027836191177368165, 0.027463712692260743, 0.02726691246032715, 0.02712816047668457, 0.026995967864990235, 0.02694790458679199, 0.027057823181152345, 0.02712451171875, 0.027010368347167968, 0.02707321548461914, 0.026979808807373048, 0.02699078369140625, 0.0270728645324707, 0.027080703735351562, 0.02712166404724121, 0.02707587242126465, 0.02703603172302246, 0.02702351951599121, 0.027017248153686522, 0.02718022346496582, 0.027069408416748045, 0.02711347198486328, 0.027090431213378906, 0.02708531188964844, 0.027107328414916993, 0.027181055068969725, 0.027136032104492187, 0.027150304794311523, 0.02707164764404297, 0.02702012825012207, 0.027121152877807617, 0.027070976257324218, 0.027141759872436524, 0.027224031448364258, 0.027498239517211913, 0.027464351654052734, 0.02736947250366211, 0.027398143768310547, 0.027283231735229493, 0.02734716796875, 0.027299840927124022, 0.027271167755126953, 0.027230207443237304, 0.027242528915405274, 0.0272260799407959, 0.027370943069458007, 0.027158496856689453, 0.027165279388427735, 0.027240447998046875, 0.027232255935668945, 0.027262975692749023, 0.027243808746337892, 0.027284032821655275, 0.027229663848876953, 0.02727801513671875, 0.027303936004638672, 0.027254783630371093, 0.027352895736694336, 0.027277088165283202, 0.02735500717163086, 0.027371616363525392, 0.027373760223388673, 0.029223615646362305, 0.02812313652038574, 0.027559455871582032, 0.027306463241577147, 0.02715443229675293, 0.02715648078918457, 0.027200511932373047, 0.02703984069824219, 0.027116384506225586, 0.027133951187133788, 0.0272609920501709, 0.027222015380859374, 0.027047840118408203, 0.027021408081054688, 0.027084640502929688, 0.02716454315185547, 0.027090591430664064, 0.027221887588500977, 0.027044639587402344, 0.027142112731933593, 0.027228160858154295, 0.0271231689453125, 0.027109792709350586, 0.027191423416137697, 0.027152448654174804, 0.027123647689819334, 0.027222015380859374, 0.027119264602661133, 0.027210079193115234, 0.027241535186767578, 0.02711244773864746, 0.027217088699340822, 0.02712588882446289, 0.027349472045898438, 0.027331872940063475, 0.027454336166381835, 0.027373567581176757, 0.027447296142578126, 0.027441152572631834, 0.027288639068603515, 0.027214111328125, 0.027445472717285157, 0.02736172866821289, 0.027314048767089844, 0.027230335235595704, 0.027282527923583984, 0.027294624328613282, 0.027215167999267577, 0.027300352096557616, 0.027397439956665038, 0.02737241554260254, 0.027295743942260742, 0.027398048400878908, 0.027261024475097657, 0.0273768310546875, 0.027226943969726563, 0.02719968032836914, 0.02730169677734375, 0.027387903213500975, 0.02735308837890625, 0.02732796859741211, 0.027232799530029297, 0.02730803108215332, 0.02919219207763672, 0.028078079223632812, 0.0275230712890625, 0.027225727081298827, 0.027142528533935548, 0.02709199905395508, 0.027031999588012695, 0.027124256134033204, 0.02713113594055176, 0.027097856521606446, 0.027162431716918945, 0.027024959564208983, 0.027148927688598633, 0.02711689567565918, 0.027144224166870116, 0.027119359970092773, 0.02719833564758301, 0.027053056716918947, 0.027160768508911134, 0.02698303985595703, 0.027101375579833983, 0.02716057586669922, 0.027031551361083983, 0.02697395133972168, 0.027001087188720702, 0.02716057586669922, 0.027082752227783204, 0.027198623657226563, 0.027157344818115235, 0.02715648078918457, 0.02716262435913086, 0.027172864913940428, 0.027243839263916016, 0.027165376663208007, 0.027482112884521483, 0.02749235153198242, 0.027453535079956053, 0.0273767032623291, 0.027410335540771484, 0.027525728225708007, 0.027373952865600584, 0.027262943267822266, 0.027235616683959962, 0.027257280349731447, 0.027285791397094725, 0.027256832122802735, 0.027303936004638672, 0.027299840927124022, 0.0273305606842041, 0.027378847122192383, 0.027343711853027343, 0.027205408096313475, 0.027248287200927736, 0.027284032821655275, 0.02727302360534668, 0.027281600952148436, 0.027434015274047853, 0.02737455940246582, 0.02793471908569336, 0.027201536178588868, 0.027389951705932617, 0.027338815689086915, 0.027356895446777343, 0.029134048461914062, 0.028009471893310548, 0.02750054359436035, 0.027178432464599608, 0.02706489562988281, 0.027191295623779296, 0.027084800720214845, 0.02711961555480957, 0.027043455123901366, 0.02783270454406738, 0.026990591049194337, 0.027131904602050783, 0.02709503936767578, 0.027159616470336913, 0.027173824310302734, 0.02706768035888672, 0.02711174392700195, 0.027095455169677735, 0.02712166404724121, 0.027080095291137696, 0.027193952560424804, 0.02718243217468262, 0.027144031524658205, 0.027226367950439454, 0.027232704162597657, 0.02724051284790039, 0.027268224716186524, 0.02722502326965332, 0.027256832122802735, 0.02731340789794922, 0.027325183868408202, 0.027239583969116212, 0.027213760375976562, 0.027335584640502928, 0.02739596748352051, 0.027383935928344726, 0.02754764747619629, 0.027455488204956056, 0.027505983352661134, 0.02733299255371094, 0.02737388801574707, 0.0273973445892334, 0.027413280487060546, 0.02733465576171875, 0.027258880615234377, 0.027249727249145508, 0.027179967880249022, 0.027264703750610353, 0.027257152557373047, 0.02731007957458496, 0.027215871810913086, 0.027287551879882813, 0.0273305606842041, 0.02738761520385742, 0.0274803524017334, 0.027322368621826174, 0.027983871459960938, 0.02733875274658203, 0.02737923240661621, 0.027333087921142578, 0.027375328063964845, 0.02732681655883789, 0.02740163230895996, 0.029177152633666992, 0.028002592086791993, 0.027438720703125, 0.02719375991821289, 0.027183488845825197, 0.02707865524291992, 0.027107328414916993, 0.027090944290161133, 0.027064031600952148, 0.02710966491699219, 0.02711043167114258, 0.027038719177246092, 0.027127775192260742, 0.0271278076171875, 0.02713599967956543, 0.02712544059753418, 0.027126079559326173, 0.027236352920532225, 0.027174911499023437, 0.027146240234375, 0.027187391281127928, 0.027223680496215822, 0.0271976318359375, 0.02716374397277832, 0.027150272369384765, 0.02713692855834961, 0.02721526336669922, 0.02718377685546875, 0.02724857521057129, 0.027176095962524415, 0.0271430721282959, 0.027064096450805663, 0.027162431716918945, 0.027302303314208985, 0.02735513687133789, 0.027392127990722655, 0.027575519561767577, 0.027407007217407228, 0.0273756160736084, 0.027402240753173827, 0.02730169677734375, 0.027357376098632813, 0.027441152572631834, 0.027184223175048827, 0.027281375885009767, 0.027277599334716796, 0.02725119972229004, 0.02729145622253418, 0.027255136489868163, 0.027236352920532225, 0.02738492774963379, 0.02725161552429199, 0.027254783630371093, 0.02715769577026367, 0.027251520156860352, 0.027335935592651368, 0.027400960922241212, 0.027428863525390625, 0.027389568328857423, 0.027420223236083983, 0.027402784347534178, 0.02738377571105957, 0.027402240753173827]",tokens/s,36.697908730091044,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,889.491456,11766.9888,0.0,11364.466688,11346.760704,s,1,7.131568359375,7.131568359375,0.0,7.131568359375,7.131568359375,7.131568359375,7.131568359375,[7.131568359375],,kWh,7.270566158361665e-06,7.950021588458523e-07,4.214725594003932e-06,1.228029391121145e-05,,MB,1215.393792,12209.487872,0.0,11796.48,11746.412544,s,10,3.7226983337402344,0.3722698333740234,0.006646695702203998,0.37384545898437505,0.37664725341796873,0.37690944213867184,0.3771191931152344,"[0.3530875549316406, 0.3735396423339844, 0.37186837768554687, 0.37415127563476563, 0.3715611572265625, 0.377171630859375, 0.3732493896484375, 0.37536489868164064, 0.37611541748046873, 0.3765889892578125]",tokens/s,687.6732333634837,kWh,1.078500566354137e-05,1.1894091145518128e-06,7.205174414929136e-06,1.9179589193022317e-05,tokens/kWh,13347522.588916283,MB,1220.665344,12314.345472,0.0,11901.3376,11861.462528,s,10,33.010776611328126,3.3010776611328128,0.0024202063206580165,3.301094970703125,3.3031136474609375,3.3046907348632812,3.305952404785156,"[3.3009365234375, 3.298852783203125, 3.302305908203125, 3.299298828125, 3.296798583984375, 3.30051708984375, 3.30276318359375, 3.30125341796875, 3.306267822265625, 3.301782470703125]",tokens/s,19.084676722928304,kWh,9.644511080896223e-05,1.0638167738516128e-05,6.396904919106731e-05,0.00017105232773854568,tokens/kWh,368308.3465329733,,s,630,33.008018081665085,0.05239367949470642,0.00026876527422206874,0.0523701114654541,0.052641157150268555,0.05274155139923096,0.053587378501892095,"[0.053467262268066404, 0.05259468841552734, 0.05210726547241211, 0.0521069107055664, 0.05206367874145508, 0.05187424087524414, 0.05204207992553711, 0.052021377563476565, 0.052170753479003906, 0.052120960235595704, 0.05214608001708984, 0.052165374755859376, 0.05212307357788086, 0.05206419372558594, 0.052423263549804686, 0.052023265838623045, 0.05214368057250977, 0.05212758255004883, 0.05223199844360352, 0.052232574462890625, 0.05247740936279297, 0.05220403289794922, 0.05222355270385742, 0.05228227233886719, 0.05216435241699219, 0.05221769714355469, 0.052276897430419925, 0.05217359924316406, 0.05222601699829101, 0.05223628616333008, 0.052400127410888675, 0.05226444625854492, 0.052402687072753903, 0.05245132827758789, 0.05278083038330078, 0.052219486236572264, 0.05267638397216797, 0.05249513626098633, 0.05268239974975586, 0.05272172927856445, 0.05249468612670898, 0.0525002555847168, 0.05256009674072266, 0.05245542526245117, 0.05333196640014649, 0.0524505615234375, 0.052558238983154294, 0.052738399505615235, 0.0526154556274414, 0.05237936019897461, 0.05239603042602539, 0.05240563201904297, 0.05273382568359375, 0.05256256103515625, 0.05249241638183594, 0.0525656623840332, 0.05253548812866211, 0.052477760314941405, 0.05251100921630859, 0.052445247650146486, 0.05256732940673828, 0.05248467254638672, 0.052545310974121094, 0.053950431823730466, 0.05299817657470703, 0.052157760620117184, 0.05210416030883789, 0.05232099151611328, 0.05200588989257812, 0.052142017364501955, 0.05199087905883789, 0.05250604629516602, 0.05217718505859375, 0.05207247924804687, 0.05193929672241211, 0.05207376098632813, 0.051995361328125, 0.05204377746582031, 0.05250614547729492, 0.052128223419189455, 0.0521556167602539, 0.05232515335083008, 0.052348926544189454, 0.052456863403320314, 0.052234848022460936, 0.05223014450073242, 0.052219905853271485, 0.052274238586425784, 0.05222086334228516, 0.05217279815673828, 0.05215436935424805, 0.05214352035522461, 0.052134494781494144, 0.05225267028808594, 0.05211545562744141, 0.05248329544067383, 0.05235539245605469, 0.05243747329711914, 0.05233868789672851, 0.05250825500488281, 0.0524681282043457, 0.052661918640136716, 0.05257865524291992, 0.052475135803222654, 0.052388607025146486, 0.052496673583984375, 0.05230118560791015, 0.052349281311035153, 0.052236640930175784, 0.05265683364868164, 0.05231078338623047, 0.05236342239379883, 0.0522891845703125, 0.05233910369873047, 0.05242044830322266, 0.0523897590637207, 0.0523737907409668, 0.052512767791748044, 0.05236928176879883, 0.05246543884277344, 0.052533599853515626, 0.05255372619628906, 0.05252710342407227, 0.05255987167358398, 0.052508991241455076, 0.05278857421875, 0.05360134506225586, 0.05259740829467773, 0.05229916763305664, 0.05231430435180664, 0.052230785369873044, 0.05204377746582031, 0.05222195053100586, 0.052098304748535156, 0.0523056640625, 0.05230080032348633, 0.05279334259033203, 0.05230195236206055, 0.052236160278320315, 0.0522342414855957, 0.052279102325439454, 0.05213391876220703, 0.05235273742675781, 0.05224604797363281, 0.052550174713134765, 0.05238822555541992, 0.052367359161376956, 0.052256576538085936, 0.052399391174316405, 0.05221263885498047, 0.05219942474365234, 0.05213183975219727, 0.05221376037597656, 0.05219750213623047, 0.052174560546875, 0.05209308624267578, 0.05223164749145508, 0.052107105255126955, 0.05249472045898437, 0.05216697692871094, 0.05238784027099609, 0.05229347229003906, 0.05258665466308594, 0.052409374237060546, 0.05254857635498047, 0.05257235336303711, 0.05251891326904297, 0.05247078323364258, 0.0525302734375, 0.05241231918334961, 0.0533502082824707, 0.052400127410888675, 0.052531200408935545, 0.05241856002807617, 0.05256777572631836, 0.052499839782714844, 0.05246815872192383, 0.0524409294128418, 0.05264051055908203, 0.052448799133300784, 0.052608863830566406, 0.052543998718261715, 0.052536865234375, 0.05255420684814453, 0.052588542938232424, 0.05263564682006836, 0.05254963302612305, 0.052354881286621094, 0.05239241409301758, 0.053678081512451174, 0.052506622314453126, 0.05219708633422852, 0.052098751068115234, 0.05213859176635742, 0.05203148651123047, 0.05226700973510742, 0.05201087951660156, 0.052112510681152344, 0.052329120635986326, 0.05220793533325195, 0.05232271957397461, 0.05213081741333008, 0.05213043212890625, 0.05215958404541016, 0.05193820953369141, 0.05212588882446289, 0.05223199844360352, 0.052637248992919924, 0.05247609710693359, 0.05239955139160156, 0.05214896011352539, 0.052143775939941406, 0.052171070098876955, 0.052797569274902346, 0.052307968139648435, 0.052117504119873044, 0.05214940643310547, 0.05210198211669922, 0.05211119842529297, 0.05217001724243164, 0.05203580856323242, 0.05236163330078125, 0.05232255935668945, 0.05232355117797852, 0.05223904037475586, 0.05258044815063476, 0.05242675018310547, 0.05260083389282227, 0.05242582321166992, 0.05230995178222656, 0.05230486297607422, 0.05242879867553711, 0.052391071319580075, 0.05239590454101563, 0.05240243148803711, 0.05252921676635742, 0.052364128112792965, 0.05245708847045898, 0.0522806396484375, 0.052418785095214845, 0.05243766403198242, 0.052649696350097655, 0.052486431121826174, 0.05254336166381836, 0.052611198425292965, 0.0526703987121582, 0.05259798431396484, 0.052663135528564456, 0.052848575592041015, 0.05252217483520508, 0.052504833221435544, 0.05252774429321289, 0.05381516647338867, 0.05258345413208008, 0.052018657684326175, 0.0520621452331543, 0.05208089447021484, 0.05203401565551758, 0.05230940628051758, 0.05215686416625977, 0.05225289535522461, 0.05224972915649414, 0.05213455963134766, 0.05219689559936523, 0.052062782287597656, 0.05203318405151367, 0.05201103973388672, 0.05204006576538086, 0.05226448059082031, 0.05218352127075195, 0.052421985626220705, 0.05240063858032227, 0.05237491226196289, 0.05222054290771484, 0.052170753479003906, 0.05226211166381836, 0.05222480010986328, 0.05218243026733398, 0.05228774261474609, 0.0521671028137207, 0.0522088623046875, 0.05208544158935547, 0.05229363250732422, 0.052187137603759766, 0.05236940765380859, 0.052143966674804684, 0.05225795364379883, 0.052351646423339844, 0.05245142364501953, 0.05231340789794922, 0.052541664123535156, 0.05235580825805664, 0.05247795104980469, 0.05234483337402344, 0.05233388900756836, 0.05230438232421875, 0.052393470764160156, 0.05229228973388672, 0.05249801635742188, 0.05240873718261719, 0.05241142272949219, 0.05231919860839844, 0.052408321380615235, 0.05235315322875977, 0.052365184783935544, 0.05233571243286133, 0.05252188873291016, 0.05246156692504883, 0.05253078460693359, 0.05251119995117187, 0.05256800079345703, 0.0525305290222168, 0.052515167236328125, 0.052451648712158204, 0.05243494415283203, 0.05372121429443359, 0.05249718475341797, 0.05213974380493164, 0.05219356918334961, 0.05212364959716797, 0.051889183044433594, 0.05214249420166016, 0.052160606384277344, 0.052294113159179687, 0.05210639953613281, 0.052122398376464846, 0.05218928146362305, 0.05203782272338867, 0.052078369140625, 0.052125694274902344, 0.05206582260131836, 0.052292064666748045, 0.05224595260620117, 0.05247647857666016, 0.05231779098510742, 0.052986270904541014, 0.05226700973510742, 0.052223201751708984, 0.05218511962890625, 0.05232489776611328, 0.052574432373046875, 0.05219532775878906, 0.0521748161315918, 0.05221196746826172, 0.052324127197265625, 0.05225471878051758, 0.05217484664916992, 0.05228047943115234, 0.05220403289794922, 0.05240662384033203, 0.052363105773925785, 0.05267209625244141, 0.05243961715698242, 0.05274240112304687, 0.05249782562255859, 0.05251721572875977, 0.052422782897949216, 0.05252102279663086, 0.05239609527587891, 0.05244432067871094, 0.05239868927001953, 0.05297868728637695, 0.052388126373291016, 0.052394336700439456, 0.052332225799560546, 0.05243360137939453, 0.05237081527709961, 0.05244112014770508, 0.05233929443359375, 0.05253324890136719, 0.05259667205810547, 0.05277939224243164, 0.05256537628173828, 0.05255171203613281, 0.052576320648193356, 0.05254780960083008, 0.05241446304321289, 0.05257948684692383, 0.053553184509277346, 0.05257747268676758, 0.05224256134033203, 0.05225846481323242, 0.052313087463378906, 0.052393985748291017, 0.052372703552246096, 0.05217897415161133, 0.05214799880981445, 0.05220425415039062, 0.052185569763183594, 0.05228297424316406, 0.05222169494628906, 0.052337089538574216, 0.052233440399169925, 0.05203638458251953, 0.052136096954345706, 0.05223392105102539, 0.052471710205078126, 0.052506881713867186, 0.05242265701293945, 0.0522342414855957, 0.05234640121459961, 0.05216304016113281, 0.05225471878051758, 0.05221955108642578, 0.05234118270874023, 0.05225839996337891, 0.05230419158935547, 0.052217857360839844, 0.05222915267944336, 0.052242912292480466, 0.05274051284790039, 0.05250467300415039, 0.052593761444091794, 0.05240889739990234, 0.05253887939453125, 0.05252387237548828, 0.05258444976806641, 0.05279743957519531, 0.052803489685058595, 0.05250620651245117, 0.05247436904907227, 0.05239363098144531, 0.05260323333740234, 0.05235302352905274, 0.052534656524658205, 0.05257484817504883, 0.052432769775390624, 0.05241446304321289, 0.05246783828735352, 0.05247180938720703, 0.052416511535644535, 0.0525140495300293, 0.052685569763183594, 0.05258441543579102, 0.05258038330078125, 0.052408321380615235, 0.052506622314453126, 0.05254969787597656, 0.052534496307373044, 0.052347614288330076, 0.05252710342407227, 0.053510143280029294, 0.05251891326904297, 0.05214617538452149, 0.05232015991210937, 0.05221696090698242, 0.05203839874267578, 0.05212732696533203, 0.0521214714050293, 0.052150527954101564, 0.0521794548034668, 0.052235774993896485, 0.05220403289794922, 0.05216841506958008, 0.052211360931396486, 0.052243392944335935, 0.052125377655029295, 0.0521864013671875, 0.05222848129272461, 0.05251824188232422, 0.05234950256347656, 0.05251116943359375, 0.05229270553588867, 0.05214022445678711, 0.05214051055908203, 0.05218953704833985, 0.05227715301513672, 0.05232844924926758, 0.05218918228149414, 0.052221630096435545, 0.052263233184814455, 0.05228752136230469, 0.05227718353271484, 0.05240630340576172, 0.052395137786865234, 0.052527999877929686, 0.05241856002807617, 0.05266214370727539, 0.05263167953491211, 0.052646976470947265, 0.05256016159057617, 0.05248678588867187, 0.05245750427246094, 0.05263759994506836, 0.05243913650512695, 0.05257660675048828, 0.05230160140991211, 0.05243686294555664, 0.05242009735107422, 0.052558433532714846, 0.05242256164550781, 0.05242390441894531, 0.05240911865234375, 0.052448577880859375, 0.052592670440673825, 0.052576190948486326, 0.052613857269287106, 0.05265817642211914, 0.052547393798828126, 0.05248863983154297, 0.05249919891357422, 0.052550655364990234, 0.05272364807128906, 0.05254099273681641, 0.053550689697265626, 0.052628257751464846, 0.052184734344482425, 0.05217884826660156, 0.05215999984741211, 0.052278209686279296, 0.05404249572753906, 0.05203318405151367, 0.05221993637084961, 0.05216912078857422, 0.05204899215698242, 0.052111873626708986, 0.052195777893066404, 0.052217857360839844, 0.05211545562744141, 0.051904510498046875, 0.052133888244628904, 0.05221760177612305, 0.05236252975463867, 0.052323295593261716, 0.052291584014892575, 0.052299774169921875, 0.052266654968261717, 0.05218255996704101, 0.052300609588623044, 0.052238014221191405, 0.052571582794189456, 0.0522289924621582, 0.05228265762329101, 0.052177024841308595, 0.05232905578613281, 0.05221305465698242, 0.05245750427246094, 0.052350975036621096, 0.05246035385131836, 0.05240137481689453, 0.052670814514160155, 0.05255606460571289, 0.05262745666503906, 0.05271174240112304, 0.052657569885253906, 0.052544864654541015, 0.05266236877441406, 0.052519775390625, 0.05254963302612305, 0.05271283340454101, 0.05258111953735352, 0.052703102111816405, 0.052596736907958984, 0.05256192016601562, 0.052523006439208986, 0.05259206390380859, 0.05265033721923828, 0.052607200622558595, 0.0526929931640625, 0.05286105728149414, 0.05282393646240234, 0.05276671981811523, 0.05268848037719726, 0.05265609741210937, 0.052738014221191405, 0.05261155319213867, 0.052967422485351565, 0.05364070510864258, 0.052408191680908205, 0.052058975219726564, 0.05212575912475586, 0.05202534484863281, 0.052033729553222656, 0.0521541748046875, 0.052004863739013675, 0.05202860641479492, 0.05207328033447266, 0.05212188720703125, 0.052164321899414064, 0.05234611129760742, 0.05239833450317383, 0.052273662567138675, 0.052176895141601565, 0.05234703826904297, 0.052346721649169925, 0.05247590255737305, 0.052555774688720705, 0.05300387191772461, 0.0522608642578125, 0.05222032165527344, 0.05216460800170898, 0.05229363250732422, 0.05228688049316406, 0.05267241668701172, 0.05221753692626953, 0.05227417755126953, 0.052152320861816405, 0.05238784027099609, 0.05216255950927735, 0.052762622833251956, 0.05232976150512696, 0.05247235107421875, 0.052366657257080076, 0.05249523162841797, 0.052596031188964845, 0.052674785614013675, 0.052475872039794924, 0.05246966552734375, 0.05235772705078125, 0.0525167350769043, 0.05232857513427734, 0.052434207916259766, 0.05235356903076172, 0.05253753662109375, 0.05249980926513672, 0.05242537689208984, 0.05243904113769531, 0.05247180938720703, 0.052647937774658204, 0.05247590255737305, 0.052485374450683596, 0.052603134155273436, 0.052607681274414064, 0.05276176071166992, 0.05254006576538086, 0.05262691116333008, 0.05247235107421875, 0.052580352783203124, 0.05245542526245117, 0.05239388656616211]",tokens/s,19.086271658035294,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 289, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 106.12 MiB is free. Process 168604 has 14.63 GiB memory in use. Of the allocated memory 14.52 GiB is allocated by PyTorch, and 1.52 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 272, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 232.12 MiB is free. Process 83335 has 14.51 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 1.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 536.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 140.12 MiB is free. Process 71320 has 14.60 GiB memory in use. Of the allocated memory 14.49 GiB is allocated by PyTorch, and 1.57 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,738.77504,1148.125184,0.0,752.877568,713.93792,s,1,7.34036962890625,7.34036962890625,0.0,7.34036962890625,7.34036962890625,7.34036962890625,7.34036962890625,[7.34036962890625],,kWh,6.353637062502078e-06,6.932980891981484e-07,1.972779356002735e-06,9.019714507702962e-06,,MB,1052.758016,1190.068224,0.0,784.334848,638.877696,s,18,0.33536745643615723,0.01863152535756429,0.0004424272807504113,0.01849524784088135,0.018742918586730956,0.01913710832595825,0.020132650852203365,"[0.02038153648376465, 0.01852057647705078, 0.018917503356933593, 0.01836672019958496, 0.018438175201416017, 0.018667007446289064, 0.018447296142578125, 0.018466527938842774, 0.0186680965423584, 0.0185446720123291, 0.018389856338500977, 0.018467391967773437, 0.01844771194458008, 0.01845568084716797, 0.018569536209106445, 0.018547935485839842, 0.018469919204711915, 0.0186013126373291]",tokens/s,13740.152514998752,kWh,6.889110478183946e-07,7.597375514928903e-08,4.5554740322169947e-07,1.220432206189383e-06,tokens/kWh,209761753.8292616,MB,1063.030784,1215.234048,0.0,809.500672,638.880256,s,18,9.91459100341797,0.5508106113009983,0.0024000763314876027,0.5507142639160156,0.5530488464355469,0.554253955078125,0.556790341796875,"[0.5496165161132812, 0.5463087768554687, 0.5512454223632812, 0.5495008544921876, 0.5516490478515625, 0.5506685180664063, 0.5485482177734375, 0.5524202270507812, 0.5505003051757813, 0.5536944580078125, 0.5495416259765625, 0.5477716674804688, 0.550760009765625, 0.5574244384765625, 0.5520377807617187, 0.55111181640625, 0.5490191650390625, 0.5527721557617188]",tokens/s,114.37688146783496,kWh,1.5814841655654132e-05,1.744113168838617e-06,7.741326438222773e-06,2.5300281262715512e-05,tokens/kWh,2490090.8944771993,,s,1134,9.909957987785342,0.008738940024502064,0.00017457159636791476,0.008700208187103271,0.008826022624969482,0.008934936332702637,0.009401976852416994,"[0.008698080062866211, 0.008691583633422852, 0.00869820785522461, 0.008882080078125, 0.008676639556884766, 0.008678208351135254, 0.008724384307861328, 0.008626079559326172, 0.008737183570861817, 0.008652288436889649, 0.008652928352355958, 0.008659008026123047, 0.008684543609619141, 0.01020201587677002, 0.008724896430969239, 0.008742591857910157, 0.00905446434020996, 0.008732416152954102, 0.008682815551757813, 0.008698559761047364, 0.0086364164352417, 0.00865449619293213, 0.008669343948364258, 0.008659135818481446, 0.008666848182678223, 0.008692000389099121, 0.008702272415161133, 0.008726207733154297, 0.008689472198486328, 0.008659135818481446, 0.008650752067565918, 0.008712191581726075, 0.008689663887023925, 0.008630271911621093, 0.00865824031829834, 0.008696767807006836, 0.008634367942810058, 0.008652544021606446, 0.008642560005187988, 0.008699040412902832, 0.00867801570892334, 0.008753376007080078, 0.008646656036376953, 0.008667455673217774, 0.00867625617980957, 0.008809599876403808, 0.008865440368652344, 0.008675328254699707, 0.008622079849243165, 0.008738816261291504, 0.008706048011779785, 0.008652159690856934, 0.008657535552978516, 0.008615936279296875, 0.008647775650024414, 0.008620960235595703, 0.008732192039489746, 0.008659423828125, 0.00866870403289795, 0.008726207733154297, 0.008808256149291992, 0.008696767807006836, 0.008715680122375488, 0.008609663963317872, 0.008662079811096192, 0.008642784118652344, 0.008667872428894044, 0.008633824348449707, 0.00865123176574707, 0.008599616050720215, 0.00871833610534668, 0.008601344108581543, 0.008675583839416504, 0.008651040077209472, 0.00862169647216797, 0.008681568145751953, 0.008665087699890137, 0.008603872299194336, 0.0087957124710083, 0.008638655662536621, 0.008653887748718262, 0.008728575706481934, 0.008774335861206056, 0.008697343826293944, 0.008635168075561523, 0.00862003231048584, 0.008642848014831544, 0.00863372802734375, 0.00868342399597168, 0.008702400207519532, 0.008648256301879882, 0.008607295989990235, 0.008640992164611816, 0.008669088363647461, 0.00866543960571289, 0.00863212776184082, 0.008634495735168456, 0.008662464141845704, 0.008659520149230958, 0.008668479919433593, 0.008686495780944823, 0.008627264022827149, 0.008707008361816405, 0.00872163200378418, 0.008699711799621581, 0.00865817642211914, 0.00868937587738037, 0.008644864082336426, 0.008660736083984375, 0.008640512466430664, 0.008648703575134278, 0.008685055732727051, 0.008741087913513183, 0.008677536010742188, 0.008675456047058105, 0.008609919548034669, 0.00873574447631836, 0.00874294376373291, 0.0086843843460083, 0.00869711971282959, 0.008676063537597657, 0.008671232223510742, 0.008640768051147461, 0.0086812162399292, 0.00870969581604004, 0.008673727989196777, 0.008753472328186036, 0.008741184234619141, 0.008696288108825683, 0.00866703987121582, 0.008685759544372559, 0.008642368316650391, 0.008667136192321777, 0.008686816215515137, 0.008765343666076661, 0.008622976303100586, 0.008666560173034668, 0.00866323184967041, 0.008681856155395507, 0.008630335807800293, 0.008644543647766113, 0.008699071884155274, 0.008700736045837402, 0.00862003231048584, 0.008673279762268067, 0.009494112014770508, 0.009800095558166504, 0.00884489631652832, 0.008710528373718262, 0.00870809555053711, 0.008605728149414063, 0.008642560005187988, 0.008629983901977539, 0.00878211212158203, 0.00865609645843506, 0.008667584419250488, 0.00862831974029541, 0.008659296035766602, 0.008830880165100098, 0.008689663887023925, 0.008662783622741699, 0.008651007652282714, 0.00865510368347168, 0.008740480422973634, 0.008760512351989746, 0.008694720268249512, 0.008656319618225097, 0.008684096336364746, 0.00864633560180664, 0.008671551704406739, 0.008724703788757324, 0.010385184288024903, 0.008725855827331543, 0.00872659206390381, 0.008747808456420899, 0.008634176254272461, 0.008720383644104004, 0.008636287689208984, 0.008752287864685059, 0.00866806411743164, 0.008665151596069336, 0.008656031608581543, 0.00886569595336914, 0.008694399833679199, 0.008632991790771484, 0.00869331169128418, 0.00864470386505127, 0.008679360389709472, 0.00864249610900879, 0.00875276756286621, 0.00867414379119873, 0.008666496276855468, 0.008718976020812988, 0.008902239799499511, 0.008694175720214845, 0.008732735633850098, 0.008734656333923339, 0.008696096420288086, 0.008684864044189453, 0.008642368316650391, 0.008657504081726074, 0.00867948818206787, 0.008652735710144042, 0.008668864250183105, 0.008835071563720704, 0.008702143669128418, 0.008659071922302246, 0.008625696182250977, 0.008686047554016113, 0.008634367942810058, 0.008699904441833496, 0.008650752067565918, 0.008647711753845214, 0.008690655708312988, 0.008704000473022461, 0.008652704238891602, 0.008661087989807128, 0.008671520233154296, 0.008670944213867188, 0.008675328254699707, 0.008675328254699707, 0.008648320198059082, 0.00865932846069336, 0.008644736289978028, 0.008820608139038085, 0.008935040473937988, 0.008777088165283203, 0.008713088035583495, 0.00883456039428711, 0.008708736419677735, 0.00869375991821289, 0.008703871726989746, 0.008654175758361816, 0.008674079895019532, 0.00874067211151123, 0.00866323184967041, 0.009287232398986817, 0.008870335578918457, 0.008737792015075683, 0.009143296241760255, 0.008714048385620118, 0.008720576286315918, 0.00871628761291504, 0.008715744018554688, 0.008659487724304199, 0.008679424285888672, 0.008673088073730469, 0.008613408088684082, 0.008657055854797363, 0.00873475170135498, 0.008648832321166993, 0.008685695648193359, 0.008648672103881837, 0.00860598373413086, 0.008652192115783691, 0.008644063949584961, 0.008736672401428223, 0.008721376419067383, 0.008652576446533203, 0.008675552368164063, 0.008694080352783204, 0.008680928230285644, 0.008893664360046388, 0.008812864303588868, 0.008886367797851562, 0.008795040130615234, 0.008746368408203125, 0.008751423835754394, 0.008688768386840821, 0.0087193603515625, 0.008697728157043456, 0.008607744216918945, 0.008626175880432128, 0.008650943756103516, 0.008595264434814454, 0.008658080101013184, 0.008618687629699707, 0.008630016326904296, 0.008603167533874512, 0.008622976303100586, 0.008671232223510742, 0.008697855949401855, 0.00880025577545166, 0.008697855949401855, 0.008666272163391114, 0.008687520027160644, 0.008754112243652344, 0.00870809555053711, 0.008707167625427246, 0.00911248016357422, 0.00913590431213379, 0.009410783767700196, 0.008752703666687012, 0.009255007743835449, 0.009028160095214843, 0.008700991630554199, 0.008677184104919433, 0.008731552124023437, 0.008716480255126953, 0.008653696060180665, 0.008883135795593262, 0.008728384017944336, 0.008720576286315918, 0.008689727783203125, 0.008714591979980469, 0.0088635835647583, 0.00866431999206543, 0.008649215698242188, 0.008687007904052734, 0.008632767677307128, 0.008691616058349609, 0.008930624008178711, 0.00931935977935791, 0.008648287773132325, 0.00869212818145752, 0.008721376419067383, 0.008748543739318848, 0.008776191711425782, 0.008681568145751953, 0.008664383888244628, 0.008671839714050293, 0.008796159744262694, 0.008671296119689942, 0.009133855819702148, 0.009384096145629883, 0.008789183616638184, 0.008675264358520509, 0.008706784248352051, 0.008691871643066406, 0.008704000473022461, 0.008773504257202148, 0.00873635196685791, 0.00868188762664795, 0.008755007743835449, 0.008630271911621093, 0.008755616188049316, 0.00883193588256836, 0.008815232276916505, 0.008640864372253417, 0.008683520317077637, 0.008707136154174804, 0.008681471824645997, 0.008689696311950684, 0.0086495361328125, 0.008681535720825195, 0.008681119918823242, 0.009169280052185058, 0.00870195198059082, 0.008738847732543945, 0.008724448204040527, 0.008710432052612304, 0.008768671989440919, 0.008714495658874511, 0.008708415985107422, 0.008691007614135743, 0.008769280433654785, 0.008684479713439941, 0.008664095878601074, 0.008647647857666016, 0.008705375671386718, 0.008643232345581054, 0.008740863800048827, 0.008797344207763671, 0.008686431884765625, 0.008689663887023925, 0.008665087699890137, 0.008697504043579102, 0.008742783546447754, 0.00869219207763672, 0.008742400169372559, 0.008665599822998048, 0.008764800071716308, 0.008716927528381348, 0.008697919845581055, 0.008728575706481934, 0.00869164752960205, 0.00870911979675293, 0.008741184234619141, 0.008748703956604004, 0.008663776397705079, 0.008716416358947755, 0.008709280014038086, 0.008657088279724121, 0.008667872428894044, 0.008675519943237304, 0.008654591560363769, 0.008670944213867188, 0.00870246410369873, 0.008672287940979004, 0.008679295539855957, 0.008751999855041505, 0.00876255989074707, 0.008733504295349121, 0.008691712379455567, 0.008697792053222657, 0.008619839668273926, 0.008723999977111816, 0.008642463684082032, 0.008661824226379395, 0.008646976470947266, 0.008711872100830078, 0.008720383644104004, 0.00870025634765625, 0.008941216468811035, 0.008806400299072266, 0.008678624153137207, 0.008639264106750488, 0.008660991668701172, 0.008652799606323243, 0.00863759994506836, 0.008694175720214845, 0.00872697639465332, 0.00867363166809082, 0.008660639762878418, 0.008664799690246581, 0.008762751579284668, 0.0089017915725708, 0.00872815990447998, 0.008644767761230469, 0.00863167953491211, 0.008661631584167481, 0.008653887748718262, 0.008648832321166993, 0.008678208351135254, 0.008697855949401855, 0.00871628761291504, 0.008667136192321777, 0.008742303848266601, 0.00871235179901123, 0.008658368110656739, 0.008653823852539062, 0.008671232223510742, 0.008684543609619141, 0.008710304260253906, 0.008747679710388184, 0.008715776443481446, 0.00883296012878418, 0.008755904197692871, 0.008796223640441895, 0.008785280227661132, 0.008718976020812988, 0.008659456253051758, 0.008712160110473632, 0.008642463684082032, 0.00867369556427002, 0.008699999809265137, 0.008798239707946777, 0.009000831604003907, 0.008699904441833496, 0.00870304012298584, 0.008713151931762695, 0.008767487525939942, 0.008724224090576172, 0.00869753646850586, 0.00861676788330078, 0.008732416152954102, 0.008691167831420899, 0.009269791603088379, 0.0088985595703125, 0.008934880256652832, 0.00874300765991211, 0.009200160026550292, 0.008714143753051757, 0.008722304344177246, 0.008681247711181641, 0.008741151809692383, 0.00867743968963623, 0.0090316801071167, 0.00883619213104248, 0.008673600196838379, 0.008826656341552734, 0.008817472457885743, 0.008742912292480469, 0.008709759712219238, 0.008759519577026368, 0.008824543952941894, 0.008744735717773438, 0.008778656005859375, 0.00875875186920166, 0.008847647666931153, 0.008689984321594239, 0.008806079864501952, 0.008881759643554688, 0.008724864006042481, 0.008747039794921875, 0.008780063629150391, 0.008802016258239747, 0.008742912292480469, 0.00871395206451416, 0.008726719856262208, 0.008757087707519531, 0.00878559970855713, 0.00868614387512207, 0.008861311912536621, 0.008755616188049316, 0.008732352256774902, 0.008701631546020508, 0.008633952140808105, 0.008737664222717285, 0.008646783828735351, 0.008611519813537598, 0.008708415985107422, 0.00869375991821289, 0.008730624198913574, 0.008639871597290039, 0.008695839881896972, 0.008638400077819824, 0.008661664009094239, 0.008689951896667481, 0.008695520401000977, 0.00868556785583496, 0.00877558422088623, 0.008662943840026855, 0.008679519653320313, 0.00868883228302002, 0.008747039794921875, 0.008684415817260743, 0.008667136192321777, 0.008648703575134278, 0.008683103561401367, 0.008687168121337891, 0.008694399833679199, 0.00881481647491455, 0.008728128433227539, 0.00866547203063965, 0.008769599914550782, 0.009103360176086426, 0.009074687957763672, 0.00872009563446045, 0.008709728240966797, 0.008729280471801758, 0.008700991630554199, 0.00870851230621338, 0.00870639991760254, 0.008650943756103516, 0.00869375991821289, 0.00872447967529297, 0.008669183731079102, 0.008629759788513184, 0.00870851230621338, 0.008652864456176758, 0.00864412784576416, 0.008675840377807617, 0.008761183738708496, 0.008701248168945313, 0.00884928035736084, 0.00873788833618164, 0.008722304344177246, 0.008804351806640624, 0.008790016174316406, 0.008814623832702637, 0.008734687805175782, 0.00870195198059082, 0.008742912292480469, 0.008761343955993652, 0.008734720230102539, 0.008642560005187988, 0.008723999977111816, 0.009145088195800781, 0.00908463954925537, 0.008781248092651368, 0.008755776405334472, 0.00872447967529297, 0.008740768432617188, 0.008677472114562988, 0.008670880317687988, 0.00865056037902832, 0.009156607627868652, 0.008736767768859864, 0.008677696228027344, 0.008808128356933594, 0.008667263984680176, 0.008900480270385742, 0.009539584159851074, 0.009084927558898925, 0.00992255973815918, 0.008873984336853028, 0.008998335838317871, 0.008981056213378906, 0.008668767929077148, 0.009142687797546387, 0.008814047813415528, 0.00870412826538086, 0.008677791595458985, 0.008761343955993652, 0.008679424285888672, 0.008642080307006837, 0.008675104141235351, 0.008632479667663574, 0.008738752365112304, 0.008659711837768555, 0.009184703826904297, 0.008749823570251464, 0.008816287994384766, 0.008741024017333985, 0.008648544311523438, 0.008730624198913574, 0.008666943550109864, 0.008648896217346191, 0.008736031532287598, 0.008680159568786622, 0.008642560005187988, 0.008699904441833496, 0.008650239944458007, 0.008671039581298829, 0.008685407638549804, 0.00869871997833252, 0.008683327674865723, 0.008720767974853516, 0.008716032028198243, 0.008682911872863769, 0.00871836757659912, 0.008786463737487793, 0.008996959686279296, 0.008769536018371582, 0.008845312118530273, 0.008841216087341308, 0.008728575706481934, 0.008709343910217285, 0.008629023551940918, 0.00871401596069336, 0.008657119750976562, 0.008722016334533692, 0.008675935745239258, 0.008691807746887208, 0.008656736373901366, 0.008678367614746094, 0.00865167999267578, 0.008695520401000977, 0.008687040328979493, 0.008715968132019043, 0.008660863876342774, 0.008664671897888183, 0.008655200004577637, 0.008671744346618653, 0.008660991668701172, 0.008673279762268067, 0.008646656036376953, 0.008648703575134278, 0.008699071884155274, 0.008710975646972657, 0.008700096130371093, 0.008674367904663086, 0.00865775966644287, 0.00866643238067627, 0.008688223838806153, 0.008707903861999511, 0.008644800186157226, 0.008652959823608398, 0.008641599655151367, 0.008626976013183595, 0.008634367942810058, 0.008648703575134278, 0.008663040161132812, 0.008737919807434082, 0.008663488388061523, 0.008630080223083496, 0.008664735794067383, 0.008757216453552246, 0.008723039627075196, 0.008679967880249023, 0.008689472198486328, 0.00866425609588623, 0.00862502384185791, 0.008693280220031739, 0.008685376167297363, 0.008698464393615723, 0.008667488098144532, 0.00996224021911621, 0.008827872276306152, 0.008762463569641114, 0.008686495780944823, 0.008676608085632325, 0.008757599830627441, 0.008694111824035644, 0.008728832244873046, 0.008697759628295899, 0.008720288276672363, 0.008777728080749512, 0.008711423873901367, 0.008687423706054688, 0.008738911628723145, 0.008921055793762208, 0.00872332763671875, 0.00876307201385498, 0.008667455673217774, 0.008653887748718262, 0.008671903610229492, 0.008671839714050293, 0.008699584007263183, 0.008658944129943847, 0.009164799690246582, 0.008681119918823242, 0.008656319618225097, 0.008641087532043457, 0.008650655746459962, 0.008650848388671875, 0.008652799606323243, 0.00867734432220459, 0.008826911926269531, 0.008736319541931152, 0.008686016082763672, 0.008773152351379394, 0.008690143585205078, 0.008804351806640624, 0.008812543869018554, 0.008732447624206543, 0.008687840461730958, 0.00872652816772461, 0.008689663887023925, 0.008668959617614746, 0.00863696002960205, 0.008649824142456054, 0.00864089584350586, 0.008638688087463379, 0.008638463973999023, 0.008639967918395997, 0.008644255638122558, 0.008642815589904785, 0.008663071632385253, 0.008704863548278808, 0.008678912162780762, 0.008671487808227539, 0.008779680252075196, 0.008693856239318848, 0.008683103561401367, 0.008630463600158692, 0.008669407844543457, 0.00865446376800537, 0.00862758445739746, 0.008608768463134766, 0.0086364164352417, 0.008703712463378907, 0.008679712295532226, 0.008742527961730957, 0.008713791847229004, 0.008697664260864258, 0.00862217617034912, 0.008717311859130859, 0.008677280426025391, 0.008634367942810058, 0.008680831909179688, 0.008702400207519532, 0.008765151977539062, 0.008722911834716797, 0.00863424015045166, 0.008650879859924316, 0.008690688133239746, 0.00872755241394043, 0.008785920143127441, 0.008750144004821778, 0.008723135948181152, 0.008705344200134277, 0.008743583679199219, 0.008720255851745606, 0.008722880363464355, 0.008681471824645997, 0.008765439987182617, 0.008658944129943847, 0.008689023971557618, 0.008698592185974121, 0.00868956756591797, 0.008722271919250488, 0.008648863792419434, 0.00875868797302246, 0.008708703994750976, 0.008660191535949708, 0.008678175926208497, 0.008796159744262694, 0.008812543869018554, 0.008685407638549804, 0.008699392318725586, 0.008691967964172364, 0.008704416275024414, 0.008714240074157715, 0.008680512428283691, 0.008671584129333497, 0.008667743682861329, 0.00870524787902832, 0.008790271759033203, 0.008933024406433105, 0.008696127891540527, 0.008714816093444825, 0.00874227237701416, 0.008771552085876464, 0.008739487648010254, 0.008787967681884766, 0.008747008323669434, 0.008721792221069335, 0.008648544311523438, 0.00915724754333496, 0.008734880447387695, 0.00878489589691162, 0.00879923152923584, 0.008749055862426757, 0.008689056396484375, 0.008704192161560058, 0.00867574405670166, 0.008703328132629395, 0.008698528289794921, 0.008706048011779785, 0.008763392448425293, 0.008671232223510742, 0.008767487525939942, 0.00873686408996582, 0.008830880165100098, 0.008761343955993652, 0.008727968215942383, 0.008690048217773437, 0.008669407844543457, 0.008677056312561035, 0.008741184234619141, 0.008757247924804687, 0.008802304267883301, 0.008796031951904298, 0.008814720153808594, 0.00894156837463379, 0.008722432136535644, 0.008757247924804687, 0.008699904441833496, 0.008785920143127441, 0.008763392448425293, 0.008773344039916992, 0.008734496116638183, 0.008761024475097657, 0.008696640014648437, 0.00870195198059082, 0.008675680160522461, 0.00872208023071289, 0.0087259521484375, 0.008700480461120605, 0.008695136070251465, 0.008639007568359376, 0.009274944305419922, 0.008983103752136231, 0.008775679588317872, 0.00874931240081787, 0.008703871726989746, 0.008734272003173829, 0.008701760292053223, 0.008728960037231445, 0.009248895645141602, 0.008755200386047364, 0.008740863800048827, 0.008785056114196778, 0.008864607810974121, 0.008822208404541015, 0.008833600044250487, 0.008851072311401368, 0.008782591819763184, 0.00876095962524414, 0.008680992126464844, 0.008655327796936035, 0.00870582389831543, 0.008786144256591797, 0.008744895935058594, 0.00865657615661621, 0.008702336311340332, 0.008768832206726074, 0.009179840087890625, 0.008752991676330566, 0.0087041597366333, 0.008759296417236329, 0.008664480209350586, 0.00868000030517578, 0.008674464225769042, 0.008713088035583495, 0.008699392318725586, 0.008768320083618164, 0.008701631546020508, 0.008654879570007325, 0.00868553638458252, 0.008654848098754882, 0.00862003231048584, 0.008652799606323243, 0.008660127639770508, 0.008702239990234374, 0.011135295867919922, 0.010840448379516601, 0.009742207527160644, 0.008853504180908203, 0.008776000022888183, 0.008764512062072754, 0.00890991973876953, 0.008865504264831544, 0.008788064002990722, 0.008849504470825196, 0.00884175968170166, 0.00892956829071045, 0.009129376411437988, 0.008984928131103515, 0.008810400009155273, 0.008923199653625489, 0.008782048225402832, 0.008818400382995606, 0.008751040458679199, 0.008735136032104492, 0.008787487983703613, 0.008739295959472657, 0.008899616241455079, 0.009001952171325683, 0.009338208198547363, 0.008702624320983886, 0.00867363166809082, 0.00871718406677246, 0.00870684814453125, 0.008773632049560547, 0.008690912246704102, 0.008710944175720214, 0.008669535636901855, 0.00865449619293213, 0.008728704452514648, 0.008757311820983887, 0.008621888160705566, 0.008665184020996093, 0.008668831825256348, 0.008653056144714356, 0.008662464141845704, 0.008671808242797851, 0.008687616348266602, 0.008673279762268067, 0.008763360023498536, 0.008656448364257812, 0.008682208061218262, 0.00873036766052246, 0.008697855949401855, 0.00868131160736084, 0.008771648406982422, 0.00872047996520996, 0.008707615852355957, 0.008739295959472657, 0.008755552291870116, 0.008741888046264648, 0.008678048133850098, 0.008719967842102052, 0.00869212818145752, 0.008695808410644532, 0.008695808410644532, 0.008704000473022461, 0.00866425609588623, 0.008694016456604003, 0.0086626558303833, 0.008659903526306151, 0.00869983959197998, 0.008804415702819824, 0.008710016250610351, 0.00876195240020752, 0.008700032234191895, 0.008693535804748536, 0.008710463523864747, 0.008685759544372559, 0.00877344036102295, 0.008837120056152344, 0.008814592361450196, 0.008820735931396484, 0.008764639854431152, 0.008843296051025391, 0.008741632461547851, 0.008661151885986328, 0.008692959785461425, 0.008855744361877442, 0.008849856376647949, 0.008804320335388183, 0.008713824272155762, 0.008706496238708497, 0.008683520317077637, 0.008714240074157715, 0.008849344253540038, 0.008749119758605957, 0.008742719650268556, 0.008776960372924804, 0.0087192964553833, 0.008790111541748047, 0.00880787181854248, 0.008720767974853516, 0.008728320121765137, 0.008800607681274414, 0.008744959831237792, 0.008706048011779785, 0.00877558422088623, 0.008708191871643066, 0.008677120208740235, 0.00866329574584961, 0.008699904441833496, 0.00869375991821289, 0.008704000473022461, 0.008733823776245117, 0.008712832450866698, 0.008757216453552246, 0.008779775619506837, 0.008734304428100586, 0.008667712211608886, 0.008683520317077637, 0.008768863677978515, 0.008782719612121582, 0.008766528129577636, 0.008754015922546387, 0.00870025634765625, 0.00872822380065918, 0.008785920143127441, 0.008747008323669434, 0.00903104019165039, 0.008726688385009765, 0.008694016456604003, 0.008666367530822753, 0.008696800231933593, 0.008701087951660157, 0.008657055854797363, 0.008656607627868653, 0.008722432136535644, 0.008699135780334473, 0.00865766429901123, 0.008726207733154297, 0.00867465591430664, 0.008717280387878418, 0.008759263992309571, 0.008703519821166992, 0.008710368156433106, 0.008736800193786622, 0.008720895767211915, 0.008700927734375, 0.008780384063720703, 0.008732288360595704, 0.00877622413635254, 0.008730527877807617, 0.008713695526123047, 0.008661631584167481, 0.008658495903015137, 0.008632287979125977, 0.008645248413085938, 0.008641535758972169, 0.008712127685546875, 0.008642911911010742, 0.00868188762664795, 0.008749152183532715, 0.008713919639587403, 0.00864627170562744, 0.008704832077026367, 0.008669343948364258, 0.00866425609588623, 0.00869167995452881, 0.008653440475463868, 0.008696063995361328, 0.008643360137939453, 0.008644991874694825, 0.008732512474060058, 0.008759455680847168, 0.008757344245910645, 0.008767999649047852, 0.00870195198059082, 0.008763392448425293, 0.008687199592590332, 0.008701760292053223, 0.008680031776428223, 0.008695584297180176, 0.008773088455200195, 0.008917023658752441, 0.008832927703857421, 0.008719136238098144, 0.00872441577911377, 0.008675200462341309, 0.008699711799621581, 0.008628640174865723, 0.00870195198059082, 0.008649951934814453, 0.008632831573486328, 0.008692000389099121, 0.008679424285888672, 0.008769536018371582, 0.00893337631225586, 0.008797504425048829, 0.00875984001159668, 0.00874512004852295, 0.008780832290649415, 0.00867420768737793, 0.008654303550720215, 0.008691871643066406, 0.00866528034210205, 0.008700160026550293, 0.008738783836364747, 0.008714271545410155, 0.008692735671997071, 0.00870297622680664, 0.00867734432220459, 0.008642208099365235, 0.008868224143981933, 0.009127903938293457, 0.008665120124816895, 0.00869702434539795, 0.008713343620300292, 0.008679360389709472, 0.008670975685119629, 0.008644831657409669, 0.008664863586425781, 0.008652799606323243, 0.008673536300659179, 0.008965888023376466, 0.009224191665649414, 0.009723360061645508, 0.008761759757995605, 0.008790143966674805, 0.008863455772399902, 0.008805695533752442, 0.00877462387084961, 0.008822688102722168, 0.008876128196716309, 0.008775391578674317, 0.00880668830871582, 0.008660991668701172, 0.00868934440612793, 0.008677696228027344, 0.009142271995544434, 0.008873984336853028, 0.008713215827941894, 0.008743840217590332, 0.008795647621154786, 0.008777664184570313, 0.008776351928710938, 0.008728575706481934, 0.00871833610534668, 0.008730208396911621, 0.008808863639831544, 0.008779775619506837, 0.00899071979522705, 0.008815936088562012, 0.008655072212219238, 0.00874953556060791, 0.008668383598327637, 0.008608736038208008, 0.008633184432983398, 0.008602527618408204, 0.00867081642150879, 0.008732704162597657, 0.008681728363037109]",tokens/s,114.43035393265318,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1067.393024,3727.622144,0.0,3332.374528,3314.861056,s,1,6.993041015625,6.993041015625,0.0,6.993041015625,6.993041015625,6.993041015625,6.993041015625,[6.993041015625],,kWh,4.096283800011709e-06,4.442444997861467e-07,1.0897230940040692e-06,5.6302513938019245e-06,,MB,1366.413312,3796.82816,0.0,3391.094784,2593.689088,s,10,0.7121671447753907,0.07121671447753905,0.0021358946542407754,0.07064505386352539,0.07195755157470703,0.07473421554565429,0.0769555467224121,"[0.07751087951660156, 0.07007782745361328, 0.07067266845703125, 0.07134051513671875, 0.07031696319580077, 0.07069990539550781, 0.07061743927001952, 0.06981353759765625, 0.07080863952636719, 0.07030876922607422]",tokens/s,3594.66175711798,kWh,2.450450445868076e-06,2.701050560885326e-07,1.625707319083287e-06,4.346262821039895e-06,tokens/kWh,58901177.986919105,MB,1377.97632,3796.82816,0.0,3391.094784,2593.691648,s,10,10.720811645507812,1.0720811645507813,0.0023244832450649497,1.0722510986328126,1.0747999267578126,1.0751251220703124,1.0753852783203124,"[1.072836181640625, 1.0710458984375, 1.071342529296875, 1.06953369140625, 1.073463134765625, 1.0672841796875, 1.0754503173828125, 1.0747276611328125, 1.071666015625, 1.0734620361328124]",tokens/s,58.76420749020246,kWh,3.086898568371642e-05,3.404526461774013e-06,1.932455944111689e-05,5.359807158660731e-05,tokens/kWh,1175415.4232620185,,s,630,10.71847644615174,0.01701345467643132,0.0002638609085937025,0.016952816009521483,0.017211798858642578,0.017431530952453613,0.018231530761718753,"[0.017162143707275392, 0.017008895874023437, 0.017020448684692383, 0.017037792205810545, 0.016983360290527345, 0.01684550476074219, 0.018285823822021485, 0.0185229434967041, 0.017147136688232423, 0.017078720092773437, 0.016912895202636717, 0.01687923240661621, 0.016838783264160155, 0.016914432525634765, 0.016885759353637696, 0.016857120513916017, 0.01685910415649414, 0.016885311126708984, 0.016877727508544924, 0.01688096046447754, 0.016872415542602538, 0.016873472213745116, 0.016906240463256835, 0.016895999908447267, 0.01683251190185547, 0.01681100845336914, 0.01704025650024414, 0.017124704360961914, 0.016837408065795898, 0.01685481643676758, 0.0167511043548584, 0.01767366409301758, 0.016918943405151366, 0.016895872116088867, 0.016877504348754884, 0.017053760528564454, 0.017088512420654296, 0.016873056411743165, 0.016910751342773436, 0.016765216827392578, 0.016823680877685546, 0.016856992721557617, 0.01684524726867676, 0.016944543838500976, 0.01764575958251953, 0.01689411163330078, 0.01691606330871582, 0.016956096649169923, 0.017012704849243165, 0.016904191970825197, 0.017111040115356444, 0.017276927947998046, 0.01689798355102539, 0.016939071655273436, 0.01711065673828125, 0.017021312713623046, 0.01699603271484375, 0.017608863830566406, 0.01706175994873047, 0.01706422424316406, 0.016919904708862305, 0.017054367065429687, 0.01700249671936035, 0.017088287353515624, 0.01712873649597168, 0.017014848709106446, 0.016910816192626955, 0.016990623474121093, 0.01683203125, 0.01686307144165039, 0.016898303985595702, 0.01683683204650879, 0.016796031951904298, 0.016873119354248046, 0.016773248672485353, 0.01677926445007324, 0.016949247360229493, 0.01679769515991211, 0.01677497673034668, 0.01681167984008789, 0.016799648284912108, 0.01681635284423828, 0.016875295639038085, 0.016951936721801758, 0.016861183166503906, 0.0170383358001709, 0.016938079833984376, 0.016890880584716796, 0.01682934379577637, 0.016911359786987306, 0.01700796890258789, 0.01710588836669922, 0.016889823913574218, 0.01707596778869629, 0.01687855911254883, 0.017074176788330078, 0.01710678482055664, 0.01711529541015625, 0.01690630340576172, 0.017009695053100585, 0.016995296478271485, 0.01688979148864746, 0.0169749755859375, 0.016941280364990235, 0.016906911849975587, 0.016916479110717773, 0.017264352798461915, 0.017006879806518556, 0.017008127212524413, 0.016977792739868165, 0.017068416595458984, 0.017027200698852538, 0.017505727767944335, 0.018825439453125, 0.01720163154602051, 0.01717452812194824, 0.017072128295898437, 0.016901567459106447, 0.017054271697998048, 0.016908287048339844, 0.016859136581420898, 0.01743667221069336, 0.016982015609741212, 0.01681315231323242, 0.016851295471191408, 0.017053407669067384, 0.017352287292480468, 0.017213823318481446, 0.01698649597167969, 0.016977727890014647, 0.01711942481994629, 0.016994144439697264, 0.016973983764648436, 0.01694060707092285, 0.017017087936401366, 0.01685523223876953, 0.016868383407592773, 0.01688265609741211, 0.01679974365234375, 0.01693440055847168, 0.016953472137451173, 0.016944799423217773, 0.016760704040527343, 0.016912992477416993, 0.01693926429748535, 0.017176544189453125, 0.01700864028930664, 0.016979999542236328, 0.01683456039428711, 0.016855039596557618, 0.017040864944458008, 0.016959232330322267, 0.016957504272460937, 0.016998880386352538, 0.016861440658569336, 0.01688310432434082, 0.017054048538208008, 0.01688822364807129, 0.01692860794067383, 0.01695292854309082, 0.017039072036743163, 0.016966335296630858, 0.01716223907470703, 0.016910144805908203, 0.017058015823364258, 0.016839967727661134, 0.016902816772460937, 0.016957599639892577, 0.016940576553344727, 0.01694937515258789, 0.01694905662536621, 0.01687388801574707, 0.016920576095581053, 0.016824640274047852, 0.016878719329833983, 0.017070240020751953, 0.01698371124267578, 0.01682908821105957, 0.016975648880004884, 0.016970048904418944, 0.01697148895263672, 0.016988128662109376, 0.016935232162475587, 0.018765823364257812, 0.017854463577270507, 0.017028863906860352, 0.016984319686889647, 0.016895999908447267, 0.016886783599853517, 0.017741952896118164, 0.017288991928100586, 0.01700864028930664, 0.017000127792358398, 0.016959999084472658, 0.01700364875793457, 0.01699955177307129, 0.017019647598266602, 0.016988895416259767, 0.0168056640625, 0.017234432220458985, 0.016954944610595702, 0.016957183837890626, 0.01677996826171875, 0.016920576095581053, 0.016873023986816407, 0.016836544036865235, 0.016793535232543944, 0.016917055130004882, 0.01689708709716797, 0.016787647247314453, 0.016818944931030273, 0.016902143478393555, 0.016895999908447267, 0.016926496505737305, 0.016850175857543944, 0.0168187198638916, 0.016902271270751952, 0.01722604751586914, 0.017261760711669922, 0.016902656555175782, 0.017154367446899414, 0.016911680221557618, 0.017000736236572264, 0.017051519393920897, 0.01699279975891113, 0.01687286376953125, 0.01721161651611328, 0.016789888381958006, 0.016788671493530274, 0.016834463119506836, 0.017046432495117187, 0.01692790412902832, 0.017034080505371092, 0.01722777557373047, 0.017524736404418945, 0.016945152282714843, 0.016961536407470702, 0.01693075180053711, 0.016926336288452148, 0.016905824661254884, 0.01688400077819824, 0.016888383865356446, 0.017089920043945314, 0.01692326354980469, 0.016872512817382813, 0.0168785285949707, 0.01680998420715332, 0.016888832092285155, 0.016879648208618165, 0.016912960052490236, 0.01694895935058594, 0.017002847671508788, 0.017095840454101563, 0.01706831932067871, 0.017340864181518555, 0.017454336166381836, 0.017425247192382812, 0.01726063919067383, 0.017088415145874024, 0.017003807067871093, 0.01694588851928711, 0.017014080047607422, 0.01690284729003906, 0.016904319763183594, 0.017176448822021486, 0.016823583602905274, 0.016972448348999022, 0.016939071655273436, 0.016986112594604492, 0.016970943450927735, 0.016827199935913088, 0.01721343994140625, 0.01692995262145996, 0.016989023208618163, 0.017110944747924805, 0.017008735656738282, 0.017485183715820314, 0.01711782455444336, 0.01695692825317383, 0.0168985595703125, 0.016908031463623047, 0.0167491512298584, 0.01684659194946289, 0.016950239181518556, 0.01705615997314453, 0.016904415130615233, 0.01684867286682129, 0.017150623321533203, 0.01716211128234863, 0.0176680965423584, 0.017086271286010743, 0.017008832931518555, 0.016907648086547853, 0.016992832183837892, 0.01692086410522461, 0.016885536193847656, 0.016977344512939453, 0.01691507148742676, 0.01701241683959961, 0.01695359992980957, 0.016965631484985352, 0.01690118408203125, 0.016922815322875977, 0.017041696548461913, 0.017117664337158202, 0.01695974349975586, 0.017038816452026366, 0.017066015243530273, 0.01694540786743164, 0.01713497543334961, 0.016960128784179688, 0.01701273536682129, 0.01704550361633301, 0.017382591247558594, 0.016911455154418945, 0.01764761543273926, 0.01740297508239746, 0.017488224029541016, 0.017306175231933594, 0.01707119941711426, 0.01700489616394043, 0.017271551132202148, 0.016974815368652342, 0.016962303161621093, 0.017143903732299806, 0.016867328643798828, 0.016979167938232422, 0.017083168029785156, 0.016902143478393555, 0.01725164794921875, 0.0168209285736084, 0.0169881591796875, 0.01680179214477539, 0.016900096893310547, 0.0167587833404541, 0.016948928833007814, 0.016740671157836916, 0.016881664276123046, 0.0168407039642334, 0.016961536407470702, 0.01674684715270996, 0.01698371124267578, 0.01678335952758789, 0.016952352523803713, 0.016798688888549806, 0.016990207672119142, 0.016777215957641603, 0.016967872619628906, 0.01679100799560547, 0.016879552841186522, 0.017084831237792968, 0.016860383987426758, 0.016712480545043946, 0.01682636833190918, 0.01695475196838379, 0.0168986873626709, 0.016784704208374024, 0.017022655487060546, 0.01696460723876953, 0.016910335540771485, 0.01682636833190918, 0.01700422477722168, 0.01689628791809082, 0.016961631774902345, 0.016698368072509767, 0.01687443161010742, 0.016707584381103514, 0.016895999908447267, 0.016863231658935548, 0.016910144805908203, 0.016877056121826172, 0.01691103935241699, 0.01681216049194336, 0.01690611267089844, 0.016699392318725585, 0.016905824661254884, 0.016728479385375975, 0.016885759353637696, 0.01700432014465332, 0.017111808776855468, 0.017114912033081055, 0.0169836483001709, 0.01712169647216797, 0.017119232177734374, 0.016905824661254884, 0.017237600326538087, 0.01706812858581543, 0.016851680755615234, 0.01695267105102539, 0.01678607940673828, 0.016982015609741212, 0.01700249671936035, 0.01684889602661133, 0.01688175964355469, 0.017076480865478517, 0.016944799423217773, 0.01702707290649414, 0.01701888084411621, 0.01723187255859375, 0.017043455123901367, 0.017043455123901367, 0.017131296157836914, 0.01755683135986328, 0.017105152130126953, 0.01697443199157715, 0.016916032791137695, 0.016921056747436523, 0.016902143478393555, 0.016941055297851563, 0.01703321647644043, 0.017194208145141603, 0.01698896026611328, 0.01694438362121582, 0.016880512237548828, 0.01695270347595215, 0.01694099235534668, 0.016893760681152344, 0.01695635223388672, 0.01697977638244629, 0.01700387191772461, 0.016992416381835938, 0.016927232742309572, 0.01713705635070801, 0.017265247344970702, 0.017508352279663086, 0.01807974433898926, 0.018163711547851562, 0.017157503128051758, 0.01704972839355469, 0.017009151458740233, 0.01696950340270996, 0.016953567504882812, 0.016879615783691407, 0.016979072570800783, 0.016876224517822266, 0.016882080078125, 0.017008159637451174, 0.01690230369567871, 0.016910240173339842, 0.016883232116699218, 0.018102272033691406, 0.017280960083007814, 0.017482015609741212, 0.01710214424133301, 0.01697862434387207, 0.017018848419189454, 0.016967424392700194, 0.0169003849029541, 0.017005695343017577, 0.01705459213256836, 0.017047552108764647, 0.016961503982543945, 0.017036575317382813, 0.01710358428955078, 0.01714793586730957, 0.01695916748046875, 0.016933183670043945, 0.01686300849914551, 0.01700227165222168, 0.017451135635375977, 0.016933183670043945, 0.016891904830932617, 0.01688310432434082, 0.0169150390625, 0.017074079513549806, 0.016920671463012696, 0.017147424697875977, 0.016952159881591797, 0.01709017562866211, 0.01690608024597168, 0.01695318412780762, 0.01695689582824707, 0.016906944274902344, 0.016877248764038087, 0.017095136642456054, 0.016896095275878906, 0.01694095993041992, 0.017184576034545897, 0.01717398452758789, 0.016932928085327148, 0.016915103912353516, 0.016846847534179688, 0.016850944519042968, 0.016855039596557618, 0.016952512741088867, 0.016795679092407225, 0.01689654350280762, 0.01740825653076172, 0.017287168502807617, 0.017092607498168946, 0.0171495361328125, 0.016951711654663085, 0.017032896041870117, 0.01684739112854004, 0.016957216262817383, 0.016813247680664063, 0.016931808471679688, 0.01688969612121582, 0.016947200775146484, 0.016866336822509764, 0.016980960845947267, 0.017077600479125977, 0.019362144470214844, 0.017840383529663086, 0.01703731155395508, 0.017160160064697266, 0.01695952033996582, 0.016891487121582033, 0.016945568084716797, 0.016987199783325194, 0.016917280197143555, 0.016983327865600587, 0.01703206443786621, 0.016879423141479492, 0.016803232192993164, 0.016965728759765625, 0.016910528182983397, 0.016953855514526366, 0.0168221435546875, 0.016867456436157228, 0.01769267272949219, 0.01684889602661133, 0.017002016067504882, 0.0168637752532959, 0.016842079162597657, 0.0170863037109375, 0.01688652801513672, 0.016852991104125976, 0.016990207672119142, 0.016951295852661134, 0.016941055297851563, 0.016913631439208984, 0.017300128936767578, 0.016919967651367187, 0.017071935653686525, 0.016878240585327147, 0.016764575958251954, 0.016847455978393554, 0.016862592697143554, 0.016741247177124025, 0.016856191635131836, 0.01686582374572754, 0.01683875274658203, 0.016881824493408203, 0.017014144897460937, 0.017144096374511718, 0.01708255958557129, 0.017163328170776367, 0.017036224365234377, 0.01702681541442871, 0.016859743118286134, 0.016997568130493163, 0.017264799118041994, 0.01694259262084961, 0.017031007766723633, 0.016933855056762696, 0.016829439163208008, 0.017023231506347655, 0.01685990333557129, 0.016891904830932617, 0.017325279235839843, 0.017867551803588868, 0.01825923156738281, 0.017187488555908202, 0.016965696334838867, 0.016995391845703124, 0.01692153549194336, 0.017119232177734374, 0.01697782325744629, 0.017025119781494142, 0.016969728469848632, 0.017104415893554686, 0.017133216857910156, 0.01704217529296875, 0.017985599517822266, 0.017065248489379882, 0.017263328552246094, 0.016977920532226562, 0.01703763198852539, 0.0171393928527832, 0.01700249671936035, 0.01691472053527832, 0.01694281578063965, 0.016938304901123045, 0.017031871795654296, 0.016934656143188478, 0.017055999755859374, 0.01702707290649414, 0.01691267204284668, 0.01683363151550293, 0.016818815231323243, 0.01697996711730957, 0.016910688400268555, 0.016848543167114257, 0.017967071533203125, 0.016968896865844726, 0.01687228775024414, 0.016871423721313478, 0.01691263961791992, 0.016882720947265624, 0.017005279541015626, 0.01693065643310547, 0.01680838394165039, 0.016884544372558593, 0.016886688232421874, 0.0169649600982666, 0.016908544540405274, 0.017230239868164063, 0.017323040008544923, 0.017036256790161134, 0.016947200775146484, 0.016989568710327148, 0.016938911437988282, 0.016947263717651366, 0.017218015670776368, 0.016898239135742187, 0.017090240478515626, 0.016884031295776366, 0.016855039596557618, 0.01691200065612793, 0.01694246482849121, 0.01696767997741699, 0.016836896896362304, 0.016980703353881837, 0.01884569549560547, 0.0169432315826416, 0.016834400177001954, 0.016889888763427733, 0.016951295852661134, 0.016914112091064453]",tokens/s,58.77701025561237,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 289, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 150.12 MiB is free. Process 150680 has 14.59 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.43 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 1248, in __init__ self.transformer = FalconModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 973, in __init__ self.h = nn.ModuleList([FalconDecoderLayer(config, layer_idx=i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 973, in self.h = nn.ModuleList([FalconDecoderLayer(config, layer_idx=i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 722, in __init__ self.mlp = FalconMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 697, in __init__ self.dense_h_to_4h = FalconLinear(hidden_size, config.ffn_hidden_size, bias=config.bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 3.29 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.05 GiB is free. Process 202207 has 13.69 GiB memory in use. Of the allocated memory 13.57 GiB is allocated by PyTorch, and 1.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,742.764544,6193.872896,0.0,5798.62528,5774.685184,s,1,7.30814697265625,7.30814697265625,0.0,7.30814697265625,7.30814697265625,7.30814697265625,7.30814697265625,[7.30814697265625],,kWh,4.32576990416654e-06,4.700026609990046e-07,2.377224124001931e-06,7.172996689167475e-06,,MB,1047.277568,6206.455808,0.0,5800.722432,5525.736448,s,10,2.3927240905761713,0.23927240905761718,0.009610978361010573,0.2413498229980469,0.24558556365966797,0.24645138931274413,0.24714404983520508,"[0.2117969207763672, 0.2372667236328125, 0.2394752655029297, 0.24016172790527343, 0.2419894714355469, 0.24731721496582032, 0.24520982360839844, 0.24539315795898436, 0.24071017456054689, 0.2434036102294922]",tokens/s,1069.9102374915062,kWh,6.58644672194437e-06,7.263188448296269e-07,4.35650965804443e-06,1.1669275224818428e-05,tokens/kWh,21937952.020836264,MB,1051.738112,6208.55296,0.0,5802.819584,5623.660032,s,10,17.640743408203125,1.7640743408203126,0.002191758134030772,1.76315966796875,1.7671169555664061,1.767554815673828,1.7679051037597657,"[1.76262060546875, 1.7623641357421875, 1.7670196533203124, 1.761020751953125, 1.7628199462890626, 1.7648861083984375, 1.76799267578125, 1.7661329345703125, 1.76238720703125, 1.7634993896484374]",tokens/s,35.71278065906472,kWh,5.2048872468055586e-05,5.739458893656275e-06,3.445896583875587e-05,9.224729720046773e-05,tokens/kWh,682946.8386818011,,s,630,17.638176202774062,0.02799710508376833,0.00031267521339151144,0.02794905662536621,0.02817639675140381,0.028387158775329588,0.029773395805358892,"[0.029443679809570314, 0.028571552276611328, 0.027992576599121095, 0.027808864593505858, 0.02789686393737793, 0.027770912170410156, 0.027739200592041015, 0.027689472198486328, 0.027746591567993164, 0.027676671981811524, 0.02773401641845703, 0.02784867286682129, 0.027787296295166016, 0.02779961585998535, 0.02803296089172363, 0.0278154239654541, 0.02775059127807617, 0.02770102310180664, 0.02777142333984375, 0.027777023315429687, 0.027808927536010743, 0.028105567932128907, 0.02825356864929199, 0.028070240020751952, 0.027837823867797852, 0.02789878463745117, 0.02776678466796875, 0.027826175689697266, 0.027846656799316406, 0.02788675117492676, 0.02789401626586914, 0.027859264373779297, 0.027896095275878906, 0.027935007095336913, 0.02852854347229004, 0.02844633674621582, 0.028176511764526367, 0.028033151626586914, 0.027996095657348632, 0.02821126365661621, 0.028190656661987303, 0.028203008651733398, 0.027992063522338868, 0.028090368270874022, 0.027992095947265625, 0.027918560028076172, 0.02821865653991699, 0.0281246395111084, 0.027947488784790038, 0.02786886405944824, 0.027888479232788085, 0.027936767578125, 0.02787942314147949, 0.027903711318969727, 0.027988256454467772, 0.02800614356994629, 0.027918687820434572, 0.0279215030670166, 0.027949312210083007, 0.028006399154663086, 0.02790166473388672, 0.027915103912353516, 0.027994400024414064, 0.02965711975097656, 0.028714656829833984, 0.028100479125976564, 0.027877824783325195, 0.02780182456970215, 0.02776655960083008, 0.02771046447753906, 0.02781110382080078, 0.027743711471557617, 0.027674720764160155, 0.027699359893798826, 0.02783251190185547, 0.027789119720458985, 0.027770559310913087, 0.027732383728027343, 0.02778009605407715, 0.027673664093017577, 0.027684000015258788, 0.027780832290649413, 0.02774963188171387, 0.027659488677978517, 0.027762304306030272, 0.027835264205932617, 0.028010240554809572, 0.027754751205444336, 0.02776412773132324, 0.027880224227905273, 0.027821887969970704, 0.02782931137084961, 0.027871295928955077, 0.027858911514282228, 0.027855775833129884, 0.027867136001586915, 0.02799523162841797, 0.028023712158203123, 0.028037120819091797, 0.028163871765136718, 0.02815407943725586, 0.02814361572265625, 0.028098880767822267, 0.028174144744873047, 0.02891526412963867, 0.028252096176147462, 0.028002687454223633, 0.027975679397583008, 0.027850751876831056, 0.02790768051147461, 0.0280231990814209, 0.027973440170288084, 0.02785708808898926, 0.0279552001953125, 0.028215295791625978, 0.028062816619873046, 0.02811996841430664, 0.028019968032836913, 0.028074464797973632, 0.027971872329711912, 0.028000448226928713, 0.02789561653137207, 0.027987968444824218, 0.028006399154663086, 0.028024927139282226, 0.02810665512084961, 0.03003664016723633, 0.029128608703613282, 0.028288192749023437, 0.028132223129272462, 0.028026687622070313, 0.02781724739074707, 0.0277410888671875, 0.02798124885559082, 0.027775552749633788, 0.027807743072509765, 0.027850656509399413, 0.027868640899658202, 0.027816287994384764, 0.027851200103759764, 0.027843679428100586, 0.02793343925476074, 0.02778748893737793, 0.027842336654663086, 0.027757984161376953, 0.02784111976623535, 0.027804927825927736, 0.02794099235534668, 0.027890304565429687, 0.02783395195007324, 0.02781430435180664, 0.027852832794189455, 0.027858911514282228, 0.027879520416259764, 0.02803049659729004, 0.029004159927368163, 0.028035072326660155, 0.027971263885498046, 0.027991840362548828, 0.028027423858642576, 0.02816409683227539, 0.028089792251586913, 0.02815648078918457, 0.028012544631958007, 0.028051456451416015, 0.028039167404174805, 0.028008159637451173, 0.027990304946899414, 0.02792857551574707, 0.027978784561157228, 0.02790483283996582, 0.027936927795410155, 0.028025983810424804, 0.027967552185058593, 0.027942880630493164, 0.027911008834838866, 0.027987968444824218, 0.028458400726318358, 0.028442975997924804, 0.028080127716064454, 0.028004608154296874, 0.0279837760925293, 0.028059776306152345, 0.028100448608398436, 0.02808844757080078, 0.027977567672729492, 0.028049631118774412, 0.028123071670532226, 0.028211200714111328, 0.029952224731445314, 0.028782495498657225, 0.02820355224609375, 0.02788483238220215, 0.027914016723632813, 0.0277142391204834, 0.027641855239868163, 0.027703296661376952, 0.027711488723754882, 0.02772377586364746, 0.027820287704467775, 0.027768287658691406, 0.027730207443237304, 0.027835968017578126, 0.027783008575439454, 0.027781280517578125, 0.027790943145751954, 0.027753343582153322, 0.027891328811645508, 0.02777052879333496, 0.028027584075927734, 0.027891712188720705, 0.02778508758544922, 0.027773056030273437, 0.02775654411315918, 0.02789583969116211, 0.02777225685119629, 0.027763328552246093, 0.027799264907836914, 0.027834144592285157, 0.02783078384399414, 0.02777529525756836, 0.02786604881286621, 0.027984607696533204, 0.028221376419067384, 0.028030815124511718, 0.02806809616088867, 0.02802284812927246, 0.027966623306274415, 0.02812291145324707, 0.028035167694091798, 0.02793769645690918, 0.027918399810791014, 0.027848224639892578, 0.027873952865600585, 0.027918079376220702, 0.02802284812927246, 0.02807539176940918, 0.027909791946411133, 0.02795564842224121, 0.027988447189331054, 0.028108192443847657, 0.027880096435546876, 0.027940959930419923, 0.027905696868896483, 0.027959487915039063, 0.027989023208618163, 0.028122079849243163, 0.02800828742980957, 0.027977888107299804, 0.027999359130859373, 0.028023679733276366, 0.02803264045715332, 0.029866783142089844, 0.02874563217163086, 0.02847145652770996, 0.028131519317626953, 0.02785251235961914, 0.02781337547302246, 0.02802943992614746, 0.027842336654663086, 0.02774822425842285, 0.027828575134277344, 0.02788688087463379, 0.027909055709838867, 0.02772764778137207, 0.02775859260559082, 0.02788262367248535, 0.02782912063598633, 0.027840511322021484, 0.028063743591308594, 0.027953216552734375, 0.028078144073486327, 0.027840639114379884, 0.02779680061340332, 0.027750688552856444, 0.027821344375610353, 0.02782912063598633, 0.027817983627319336, 0.027786815643310547, 0.02776054382324219, 0.027836671829223635, 0.027836704254150392, 0.02774220848083496, 0.02778688049316406, 0.028008832931518554, 0.028057600021362306, 0.02813337516784668, 0.028106752395629882, 0.028106719970703124, 0.027991167068481447, 0.028171167373657227, 0.02812495994567871, 0.02795132827758789, 0.02789580726623535, 0.027906047821044923, 0.027891712188720705, 0.02822524833679199, 0.02794268798828125, 0.02802070426940918, 0.02795779228210449, 0.027884960174560547, 0.027902143478393555, 0.0278799991607666, 0.027905887603759765, 0.02796134376525879, 0.02793471908569336, 0.02796134376525879, 0.027850879669189452, 0.027968896865844726, 0.028123647689819335, 0.02794495964050293, 0.027979904174804688, 0.02797350311279297, 0.027917728424072266, 0.028029472351074218, 0.029911008834838868, 0.02967747116088867, 0.028435968399047853, 0.028311391830444337, 0.027951871871948242, 0.02782841682434082, 0.02768227195739746, 0.02777948760986328, 0.027704832077026367, 0.0277774715423584, 0.02774233627319336, 0.027694976806640625, 0.027792383193969726, 0.02776166343688965, 0.02774220848083496, 0.02773606491088867, 0.027812864303588865, 0.027823104858398437, 0.027686975479125978, 0.0278035831451416, 0.02829120063781738, 0.02816806411743164, 0.02779136085510254, 0.02792188835144043, 0.027773471832275392, 0.027768831253051757, 0.027844608306884764, 0.02790809631347656, 0.0278853759765625, 0.027888896942138672, 0.027929536819458006, 0.027830272674560546, 0.027899232864379883, 0.028008319854736327, 0.0279968318939209, 0.02810086441040039, 0.02799203109741211, 0.02801888084411621, 0.028101919174194336, 0.028114463806152342, 0.027998559951782225, 0.028013120651245116, 0.02815999984741211, 0.028128992080688475, 0.028018848419189453, 0.028219520568847658, 0.02811087989807129, 0.028026847839355468, 0.028016767501831054, 0.02800774383544922, 0.02793734359741211, 0.027983871459960938, 0.027926528930664062, 0.028039167404174805, 0.028026847839355468, 0.027977983474731447, 0.027981216430664063, 0.028245471954345704, 0.0279715518951416, 0.027988927841186523, 0.027987167358398436, 0.027995136260986327, 0.02799007987976074, 0.029646879196166993, 0.02871891212463379, 0.02826678466796875, 0.028129152297973633, 0.027922271728515625, 0.02784787178039551, 0.027771871566772462, 0.027811840057373048, 0.027785152435302735, 0.027886655807495116, 0.02798899269104004, 0.027795583724975585, 0.027916160583496094, 0.028022783279418945, 0.02794643211364746, 0.027846656799316406, 0.0279205436706543, 0.027876064300537108, 0.027932544708251954, 0.027846527099609376, 0.02783843231201172, 0.027876895904541017, 0.02795155143737793, 0.027932191848754884, 0.027951583862304688, 0.027985183715820313, 0.027863807678222656, 0.02784272003173828, 0.027879104614257813, 0.027855039596557617, 0.027789375305175782, 0.027862207412719726, 0.02793747138977051, 0.027992063522338868, 0.0281529598236084, 0.028181375503540038, 0.028096511840820314, 0.028042816162109376, 0.028105152130126952, 0.028077791213989258, 0.02948739242553711, 0.028118528366088868, 0.028002815246582033, 0.028010751724243162, 0.027967039108276366, 0.028028703689575194, 0.028020479202270507, 0.028015264511108397, 0.028071327209472655, 0.028127359390258788, 0.028112895965576173, 0.028071392059326173, 0.028103967666625977, 0.02812928009033203, 0.028184288024902342, 0.028209152221679686, 0.028160032272338868, 0.028196832656860352, 0.02798703956604004, 0.02809116744995117, 0.0281396484375, 0.028188928604125977, 0.028210880279541016, 0.02993731117248535, 0.028737024307250978, 0.02841846466064453, 0.0281112003326416, 0.027872383117675783, 0.027909088134765624, 0.027746368408203125, 0.02787708854675293, 0.02776038360595703, 0.027898208618164062, 0.027870912551879883, 0.027785728454589844, 0.02788140869140625, 0.027875328063964845, 0.027867136001586915, 0.027737632751464843, 0.027818464279174806, 0.02779862403869629, 0.027819936752319335, 0.02787833595275879, 0.02790928077697754, 0.028007328033447267, 0.02794905662536621, 0.027891712188720705, 0.027881471633911133, 0.027897823333740236, 0.02779743957519531, 0.027879680633544922, 0.02908361625671387, 0.02802060890197754, 0.02798201560974121, 0.02798182487487793, 0.027983680725097656, 0.028057600021362306, 0.02823097610473633, 0.02813164710998535, 0.028037504196166994, 0.02817638397216797, 0.02811635208129883, 0.02801318359375, 0.028001855850219727, 0.028045759201049805, 0.02797772789001465, 0.027950815200805664, 0.02808665657043457, 0.028022687911987306, 0.028006336212158204, 0.027955263137817384, 0.028005504608154298, 0.02795814323425293, 0.028006399154663086, 0.02797590446472168, 0.02808399963378906, 0.0279564151763916, 0.027960128784179687, 0.02794905662536621, 0.028080127716064454, 0.028026880264282225, 0.027960447311401366, 0.02800320053100586, 0.027987968444824218, 0.027983007431030275, 0.028234592437744142, 0.029812576293945313, 0.02880953598022461, 0.02834889602661133, 0.0279117431640625, 0.028043712615966797, 0.027821760177612304, 0.027699520111083984, 0.02779136085510254, 0.02775654411315918, 0.027942783355712892, 0.027795743942260743, 0.027720575332641603, 0.02779849624633789, 0.027813791275024414, 0.027770975112915038, 0.02777907180786133, 0.02775654411315918, 0.027703296661376952, 0.027796863555908203, 0.027800384521484374, 0.027821887969970704, 0.027797504425048827, 0.02782339286804199, 0.027810495376586915, 0.027821439743041992, 0.027744672775268556, 0.02777891159057617, 0.02778972816467285, 0.027897151947021484, 0.02782908821105957, 0.02794713592529297, 0.027846015930175782, 0.027864479064941407, 0.027986656188964842, 0.02833430480957031, 0.02815795135498047, 0.028065792083740236, 0.028176223754882813, 0.028078208923339842, 0.02802403259277344, 0.028213920593261718, 0.028141727447509767, 0.02792959976196289, 0.027988832473754884, 0.028078559875488282, 0.028057376861572267, 0.028037023544311524, 0.02795699119567871, 0.027955455780029295, 0.027998207092285156, 0.02790934371948242, 0.027950944900512694, 0.027899999618530274, 0.02792108726501465, 0.027936735153198243, 0.027914560317993165, 0.02795039939880371, 0.027946624755859375, 0.027973663330078124, 0.028000383377075194, 0.027994911193847657, 0.02800873565673828, 0.028097248077392577, 0.029820928573608397, 0.028690240859985353, 0.028274879455566407, 0.02795929527282715, 0.02796771240234375, 0.027823904037475585, 0.027788736343383788, 0.027786111831665038, 0.027862720489501953, 0.027941024780273438, 0.02780143928527832, 0.027711488723754882, 0.02775449562072754, 0.02775449562072754, 0.027686464309692384, 0.02778067207336426, 0.027896703720092772, 0.027856992721557616, 0.02784675216674805, 0.02789356803894043, 0.02775040054321289, 0.027803712844848633, 0.0278035831451416, 0.027850751876831056, 0.027805152893066405, 0.027872800827026367, 0.02778828811645508, 0.02794291114807129, 0.027966720581054687, 0.027931392669677733, 0.027945119857788085, 0.027912031173706053, 0.027933952331542968, 0.027919103622436523, 0.02814156723022461, 0.02806915283203125, 0.028011232376098632, 0.02809196853637695, 0.028127359390258788, 0.02801020812988281, 0.0279736328125, 0.028240480422973634, 0.028536159515380858, 0.02796406364440918, 0.028086271286010742, 0.02806537628173828, 0.028131872177124023, 0.028052671432495117, 0.028035776138305664, 0.027901952743530273, 0.027937919616699218, 0.02795814323425293, 0.02795484733581543, 0.027935071945190428, 0.027962656021118165, 0.028005119323730468, 0.027985887527465822, 0.02798201560974121, 0.027914079666137695, 0.02796544075012207, 0.02799190330505371, 0.02798195266723633, 0.028090368270874022]",tokens/s,35.717978591285224,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 512, in __init__ self.mlp = MistralMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 152, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 170.12 MiB is free. Process 176875 has 14.57 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 1195, in __init__ self.model = MixtralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in __init__ [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 757, in __init__ self.block_sparse_moe = MixtralSparseMoeBlock(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in __init__ self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 672, in __init__ self.w3 = nn.Linear(self.hidden_dim, self.ffn_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 178593 has 14.74 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 9.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 891, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 823, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 384, in __init__ self.fc2 = nn.Linear(config.ffn_dim, self.embed_dim, bias=config.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 784.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 210.12 MiB is free. Process 122953 has 14.53 GiB memory in use. Of the allocated memory 14.41 GiB is allocated by PyTorch, and 5.21 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1262, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1030, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1030, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 802, in __init__ self.mlp = Qwen2MoeSparseMoeBlock(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 740, in __init__ [Qwen2MoeMLP(config, intermediate_size=config.moe_intermediate_size) for _ in range(self.num_experts)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 740, in [Qwen2MoeMLP(config, intermediate_size=config.moe_intermediate_size) for _ in range(self.num_experts)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 349, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 12.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 80332 has 14.73 GiB memory in use. Of the allocated memory 14.62 GiB is allocated by PyTorch, and 948.00 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.616,6174.998528,0.0,5779.750912,5773.960192,s,1,7.478220703125,7.478220703125,0.0,7.478220703125,7.478220703125,7.478220703125,7.478220703125,[7.478220703125],,kWh,1.0453589358333676e-05,1.1408213825045796e-06,3.244724817996758e-06,1.4839135558835014e-05,,MB,1107.259392,6491.66848,0.0,6085.935104,6038.345728,s,10,2.3825533142089843,0.23825533142089844,0.014006455705070683,0.24009297180175782,0.2507077835083008,0.25461110916137697,0.2577337696838379,"[0.20144749450683594, 0.23908642578125, 0.25851443481445313, 0.23458921813964845, 0.2365148468017578, 0.23665673828125, 0.24212879943847657, 0.24109951782226563, 0.24267546081542968, 0.2498403778076172]",tokens/s,1074.4775299393157,kWh,6.354509738043421e-06,7.007880555138613e-07,4.220492506826212e-06,1.1275790300383495e-05,tokens/kWh,22703508.417613383,MB,1112.174592,6512.64,0.0,6106.906624,6086.544896,s,10,18.115085937499998,1.8115085937500002,0.005597813361466063,1.812888427734375,1.8164951049804687,1.817097100830078,1.8175786975097656,"[1.81152734375, 1.8122493896484375, 1.81234716796875, 1.79936083984375, 1.8026717529296874, 1.816361328125, 1.8176990966796875, 1.8134296875, 1.8143582763671875, 1.8150810546875]",tokens/s,34.7776434610138,kWh,5.342013054278995e-05,5.892050214093037e-06,3.532256690537347e-05,9.463474766225645e-05,tokens/kWh,665717.4194075285,,s,630,18.111963876724243,0.028749149010673394,0.00032102976876612315,0.028713279724121094,0.028936253356933592,0.02910829677581787,0.030612848529815676,"[0.030595104217529298, 0.029626304626464844, 0.02889936065673828, 0.028631040573120117, 0.028559232711791994, 0.02851024055480957, 0.028401599884033204, 0.028483039855957033, 0.028536991119384767, 0.028654111862182616, 0.028512063980102538, 0.028549312591552734, 0.028524351119995118, 0.0285861759185791, 0.028477439880371092, 0.028545024871826172, 0.02865679931640625, 0.02864121627807617, 0.028562335968017577, 0.02863420867919922, 0.028593055725097655, 0.028645376205444335, 0.02865558433532715, 0.0285098876953125, 0.02873583984375, 0.02873958396911621, 0.028579839706420897, 0.028545024871826172, 0.028762304306030273, 0.02869638442993164, 0.02867193603515625, 0.02862598419189453, 0.028791135787963867, 0.02885446357727051, 0.029012672424316405, 0.029253408432006835, 0.02896076774597168, 0.02882361602783203, 0.02870582389831543, 0.028725599288940428, 0.02872991943359375, 0.02873311996459961, 0.028721471786499024, 0.02878384017944336, 0.029506336212158202, 0.028700672149658202, 0.02862051200866699, 0.028732831954956056, 0.028803232192993165, 0.02866044807434082, 0.028728992462158202, 0.02878044891357422, 0.0287891845703125, 0.028665855407714845, 0.02880512046813965, 0.028827648162841796, 0.028753919601440428, 0.028787776947021483, 0.02870163154602051, 0.02872319984436035, 0.02877804756164551, 0.02876051139831543, 0.028659711837768553, 0.03077734375, 0.029468576431274415, 0.02901545524597168, 0.028737695693969726, 0.028594720840454103, 0.02845462417602539, 0.028565792083740233, 0.028653568267822265, 0.028620800018310546, 0.02851020812988281, 0.028553279876708984, 0.028450752258300783, 0.02874163246154785, 0.028549375534057616, 0.028566272735595703, 0.02852454376220703, 0.028439552307128906, 0.028645376205444335, 0.02858393669128418, 0.028635135650634767, 0.029360128402709962, 0.028440704345703127, 0.02865510368347168, 0.028489728927612305, 0.028670335769653322, 0.028756128311157227, 0.028819135665893555, 0.02862710380554199, 0.028683712005615234, 0.028764736175537108, 0.02868364715576172, 0.028713600158691406, 0.02884105682373047, 0.028831647872924804, 0.02899660873413086, 0.028845695495605467, 0.028809600830078125, 0.028833791732788085, 0.028876863479614257, 0.0287455997467041, 0.02876131248474121, 0.028876991271972657, 0.028716896057128908, 0.02872812843322754, 0.02876825523376465, 0.02879078483581543, 0.028712959289550782, 0.028704191207885744, 0.028803712844848634, 0.028663423538208006, 0.029017728805541994, 0.028786687850952147, 0.028726112365722655, 0.028719167709350586, 0.02877622413635254, 0.02875587272644043, 0.02867625617980957, 0.028847936630249024, 0.028702367782592775, 0.028706304550170897, 0.028779327392578127, 0.028672224044799806, 0.028718624114990234, 0.030757152557373046, 0.029655008316040038, 0.0289168643951416, 0.0286965446472168, 0.028613536834716798, 0.02853232002258301, 0.028477439880371092, 0.02859663963317871, 0.028487871170043946, 0.028628608703613282, 0.028555391311645507, 0.02861369514465332, 0.02871603202819824, 0.02860851287841797, 0.028497919082641602, 0.02874982452392578, 0.028564992904663085, 0.02863088035583496, 0.02856230354309082, 0.028636255264282227, 0.0286276798248291, 0.028499935150146486, 0.028649471282958985, 0.028495872497558594, 0.02877628707885742, 0.02866796875, 0.02862499237060547, 0.02854300880432129, 0.028601823806762697, 0.02864348793029785, 0.028832096099853516, 0.0289619197845459, 0.02867852783203125, 0.028746240615844725, 0.028991615295410156, 0.02888431930541992, 0.028897216796875, 0.028938848495483397, 0.028901567459106447, 0.02872096061706543, 0.028695552825927735, 0.02874060821533203, 0.02873958396911621, 0.02872643280029297, 0.028750688552856445, 0.02880496025085449, 0.028778175354003906, 0.028952255249023437, 0.028695327758789062, 0.028696575164794923, 0.028775903701782228, 0.02884048080444336, 0.028911615371704103, 0.028761695861816407, 0.028723615646362305, 0.028788288116455077, 0.028758144378662108, 0.028752031326293944, 0.028839647293090822, 0.02888035202026367, 0.0287938232421875, 0.02877644729614258, 0.028685375213623045, 0.03062009620666504, 0.029390335083007812, 0.028874847412109376, 0.0286110725402832, 0.028839807510375976, 0.028520448684692383, 0.028624832153320314, 0.02842425537109375, 0.02831974411010742, 0.028358047485351562, 0.028416608810424803, 0.028362720489501954, 0.028734560012817382, 0.028719680786132813, 0.028440959930419923, 0.028411903381347657, 0.028321727752685547, 0.028325727462768555, 0.028401887893676758, 0.028424192428588867, 0.028334079742431642, 0.02836070442199707, 0.028493791580200194, 0.028436288833618165, 0.028451040267944337, 0.02838755226135254, 0.02844179153442383, 0.028400224685668947, 0.02844879913330078, 0.02843235206604004, 0.028380767822265625, 0.028420511245727538, 0.02853273582458496, 0.02852604866027832, 0.02861520004272461, 0.0286167049407959, 0.028645376205444335, 0.028661184310913086, 0.028543039321899413, 0.028561920166015626, 0.02850115203857422, 0.02863395118713379, 0.02860633659362793, 0.028502143859863282, 0.028516063690185545, 0.02846134376525879, 0.02853455924987793, 0.02849184036254883, 0.028581504821777345, 0.028512575149536132, 0.028503456115722657, 0.028537696838378906, 0.028534751892089844, 0.028508159637451173, 0.02856550407409668, 0.02852659225463867, 0.028559392929077148, 0.02858540725708008, 0.028515871047973634, 0.028503040313720703, 0.0285614070892334, 0.028506111145019532, 0.02854092788696289, 0.03032268714904785, 0.029396991729736328, 0.028836992263793944, 0.02857865524291992, 0.028452896118164064, 0.028420095443725587, 0.02842624092102051, 0.028428287506103517, 0.028382463455200194, 0.028475263595581054, 0.028424543380737306, 0.028367391586303713, 0.028331104278564452, 0.0283472957611084, 0.028293184280395508, 0.0283155517578125, 0.028356639862060547, 0.02840166473388672, 0.028411712646484375, 0.028553407669067384, 0.028516351699829103, 0.028442367553710938, 0.02852275276184082, 0.028422143936157225, 0.028516351699829103, 0.028512256622314453, 0.028411359786987306, 0.02845052719116211, 0.028619039535522462, 0.028447263717651366, 0.028416000366210937, 0.02846723175048828, 0.028601696014404297, 0.028617216110229493, 0.028594303131103515, 0.028669279098510744, 0.028667680740356444, 0.028697471618652343, 0.028639232635498047, 0.028688640594482423, 0.02860972785949707, 0.028648000717163086, 0.028620351791381837, 0.02852396774291992, 0.02857472038269043, 0.028747615814208986, 0.028518272399902345, 0.028549407958984373, 0.028530048370361327, 0.028576160430908205, 0.02873776054382324, 0.02886182403564453, 0.028750463485717772, 0.02871500778198242, 0.02880512046813965, 0.028848031997680663, 0.028772447586059572, 0.028794879913330077, 0.028682239532470705, 0.02875801658630371, 0.028778495788574218, 0.028780256271362305, 0.028712575912475585, 0.030640703201293945, 0.029507104873657226, 0.029047264099121093, 0.028697599411010744, 0.028621824264526367, 0.028603904724121092, 0.028588672637939454, 0.02862054443359375, 0.02874380874633789, 0.02859519958496094, 0.028601343154907227, 0.028528640747070313, 0.028720703125, 0.028700992584228514, 0.028668031692504883, 0.02855731201171875, 0.028589920043945314, 0.0286474552154541, 0.02854310417175293, 0.028639488220214844, 0.028702720642089844, 0.028691680908203124, 0.028705535888671876, 0.0285565128326416, 0.02870044708251953, 0.02874473571777344, 0.028661407470703126, 0.028905567169189454, 0.028627967834472655, 0.028678720474243163, 0.028684352874755858, 0.02891200065612793, 0.028886560440063477, 0.028807647705078127, 0.028837631225585938, 0.030287519454956054, 0.029019744873046874, 0.028933120727539063, 0.029001728057861328, 0.028932096481323243, 0.028825599670410155, 0.028853952407836916, 0.02880031967163086, 0.028937376022338868, 0.028833887100219727, 0.028753919601440428, 0.02876323127746582, 0.028742591857910157, 0.02886627197265625, 0.028816928863525392, 0.028818111419677734, 0.028763935089111327, 0.028709056854248047, 0.028763711929321287, 0.028954879760742187, 0.028862464904785157, 0.028717056274414062, 0.02894233512878418, 0.02876006317138672, 0.02875596809387207, 0.02880732727050781, 0.02886783981323242, 0.028951135635375977, 0.030656063079833984, 0.029681791305541994, 0.02915564727783203, 0.028720191955566406, 0.02862710380554199, 0.028588991165161132, 0.028495712280273436, 0.02861392021179199, 0.028531423568725588, 0.028665855407714845, 0.028633087158203126, 0.028606464385986328, 0.029478912353515626, 0.02856550407409668, 0.028663808822631837, 0.028633216857910156, 0.028682111740112304, 0.028923904418945313, 0.028708864212036132, 0.028672000885009766, 0.02879859161376953, 0.028688928604125977, 0.028673887252807617, 0.02857094383239746, 0.028655519485473634, 0.029012767791748047, 0.028733440399169922, 0.028733503341674804, 0.028714527130126954, 0.028709280014038087, 0.0287825927734375, 0.028733440399169922, 0.02881331253051758, 0.02879692840576172, 0.02893414306640625, 0.028964864730834962, 0.029110111236572266, 0.029060991287231445, 0.02896473693847656, 0.029018527984619142, 0.028778495788574218, 0.02893337631225586, 0.02881203269958496, 0.028753919601440428, 0.02876825523376465, 0.028857471466064454, 0.02878758430480957, 0.028738912582397462, 0.02880169677734375, 0.028739072799682616, 0.028825824737548827, 0.028934431076049805, 0.028850175857543944, 0.02872684860229492, 0.028784576416015624, 0.028750335693359375, 0.02878054428100586, 0.02921881675720215, 0.029187103271484376, 0.029010879516601563, 0.02887068748474121, 0.028901344299316407, 0.028798847198486327, 0.031053024291992186, 0.029621023178100586, 0.02892361640930176, 0.028692768096923827, 0.028693824768066405, 0.028622623443603515, 0.028660640716552735, 0.02848988723754883, 0.02865340805053711, 0.028633087158203126, 0.02853887939453125, 0.028680192947387696, 0.02871603202819824, 0.028683263778686522, 0.028594175338745118, 0.02853856086730957, 0.02858425521850586, 0.028565216064453124, 0.028623424530029296, 0.028616512298583984, 0.028611936569213868, 0.028652095794677736, 0.028704767227172853, 0.028737535476684572, 0.02875116729736328, 0.028801727294921874, 0.028692384719848633, 0.02873103904724121, 0.028690784454345704, 0.028818815231323243, 0.028666591644287108, 0.02874367904663086, 0.02874367904663086, 0.028719104766845704, 0.02885171127319336, 0.028936128616333007, 0.028833984375, 0.028780927658081056, 0.029017087936401367, 0.028976127624511717, 0.028754240036010743, 0.028755136489868164, 0.028692447662353515, 0.02868262481689453, 0.02879280090332031, 0.028709056854248047, 0.02872319984436035, 0.02879078483581543, 0.028716543197631835, 0.02869487953186035, 0.028782655715942383, 0.028711008071899413, 0.02874163246154785, 0.02870681571960449, 0.02874928092956543, 0.0287805118560791, 0.028797183990478516, 0.02879017639160156, 0.02885932731628418, 0.028712928771972655, 0.0288719367980957, 0.02888547134399414, 0.028766496658325195, 0.030526592254638673, 0.029479808807373047, 0.029003231048583985, 0.028942880630493165, 0.029142047882080076, 0.028761056900024413, 0.028556800842285155, 0.028531200408935548, 0.028622528076171876, 0.028804927825927733, 0.028688320159912108, 0.028561983108520508, 0.028618751525878908, 0.028593887329101564, 0.028575807571411132, 0.028621248245239258, 0.028642208099365234, 0.02862387275695801, 0.02857088088989258, 0.02860915184020996, 0.02855116844177246, 0.028674047470092775, 0.028682016372680665, 0.028688608169555666, 0.028802976608276368, 0.02873049545288086, 0.028707807540893554, 0.02872438430786133, 0.028696767807006834, 0.028701343536376954, 0.028680192947387696, 0.028675680160522462, 0.02886697578430176, 0.028886272430419923, 0.029305791854858397, 0.02901740837097168, 0.02906675148010254, 0.02893926429748535, 0.028710079193115235, 0.02879756736755371, 0.02877440071105957, 0.02882374382019043, 0.02879897689819336, 0.028757440567016603, 0.028681888580322265, 0.028758943557739256, 0.02877644729614258, 0.02872319984436035, 0.028716064453125, 0.028703104019165038, 0.028754528045654298, 0.028711999893188477, 0.028713920593261718, 0.028762111663818358, 0.02878463935852051, 0.028856447219848633, 0.02884752082824707, 0.028783071517944337, 0.02873244857788086, 0.0288306884765625, 0.02888243293762207, 0.02881177520751953, 0.028704927444458007, 0.030879743576049806, 0.029616128921508788, 0.028861919403076173, 0.028768415451049804, 0.02875430488586426, 0.028737312316894532, 0.028647647857666016, 0.02851353645324707, 0.02863795280456543, 0.028633087158203126, 0.028542335510253907, 0.028621440887451173, 0.028727296829223634, 0.028667903900146483, 0.02856345558166504, 0.028555200576782225, 0.028687551498413087, 0.028683135986328125, 0.02856982421875, 0.02890118408203125, 0.028624319076538087, 0.028623615264892578, 0.028785760879516602, 0.028652128219604493, 0.028747167587280274, 0.028667808532714844, 0.028625024795532226, 0.02852681541442871, 0.028748224258422852, 0.028729343414306642, 0.02873075294494629, 0.028727903366088867, 0.028852256774902343, 0.028917024612426758, 0.029137632369995118, 0.0291060791015625, 0.028868703842163085, 0.028955839157104493, 0.028883935928344727, 0.0288623046875, 0.028753919601440428, 0.028792224884033202, 0.02872368049621582, 0.028729215621948242, 0.02879283142089844, 0.028700159072875975, 0.028875520706176758, 0.028786304473876954, 0.0288221435546875, 0.028912384033203124, 0.028869632720947266, 0.028778495788574218, 0.0287825927734375, 0.028800287246704102, 0.028885631561279296, 0.028855808258056642, 0.028774463653564453, 0.02878873634338379, 0.028858688354492186, 0.028833919525146485, 0.02874991989135742, 0.02874163246154785, 0.028747264862060546]",tokens/s,34.783638278431845,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,817.729536,6174.998528,0.0,5779.750912,5773.960192,s,1,7.57930224609375,7.57930224609375,0.0,7.57930224609375,7.57930224609375,7.57930224609375,7.57930224609375,[7.57930224609375],,kWh,1.031787790832368e-05,1.1308835693605839e-06,4.427225764006004e-06,1.587598724169027e-05,,MB,1109.594112,6491.66848,0.0,6085.935104,6038.345728,s,10,2.3674476165771483,0.23674476165771483,0.012145732333113422,0.24003164672851562,0.24437044525146484,0.2444778022766113,0.2445636878967285,"[0.20128944396972656, 0.23908291625976563, 0.2431251220703125, 0.24098037719726562, 0.24434658813476562, 0.2358023681640625, 0.23840220642089843, 0.23757420349121094, 0.2422592315673828, 0.2445851593017578]",tokens/s,1081.3333237342094,kWh,6.383036748641098e-06,7.038957026354253e-07,4.221482845782736e-06,1.130841529705926e-05,tokens/kWh,22638008.357065953,MB,1114.431488,6512.64,0.0,6106.906624,6086.544896,s,10,18.189046997070317,1.8189046997070313,0.002654217950322588,1.818224365234375,1.8220148559570313,1.8222690246582032,1.8224723596191408,"[1.8154215087890626, 1.818479248046875, 1.817969482421875, 1.815954345703125, 1.822523193359375, 1.8219407958984375, 1.8219583740234375, 1.8213411865234375, 1.81760107421875, 1.8158577880859375]",tokens/s,34.63622916041029,kWh,5.362022793344189e-05,5.914190468179385e-06,3.549668781761704e-05,9.503110621923831e-05,tokens/kWh,662940.8254457017,,s,630,18.185909311294548,0.028866522716340565,0.0002932959952372305,0.028806336402893068,0.029048799324035642,0.029152521705627443,0.030631099720001224,"[0.030439424514770507, 0.029577215194702147, 0.029061119079589845, 0.028778495788574218, 0.028741247177124025, 0.028761888504028322, 0.028666080474853514, 0.02862076759338379, 0.028575328826904296, 0.028547903060913087, 0.0285980167388916, 0.028608768463134766, 0.02877440071105957, 0.02878054428100586, 0.028684255599975585, 0.028745376586914062, 0.02866009521484375, 0.028655616760253907, 0.028535839080810546, 0.028687328338623048, 0.028677791595458985, 0.028649791717529297, 0.02860611152648926, 0.028621055603027343, 0.028801151275634766, 0.02874336051940918, 0.028725568771362304, 0.028696128845214844, 0.028682687759399413, 0.02873958396911621, 0.028685407638549806, 0.028771167755126954, 0.02884819221496582, 0.02881657600402832, 0.028826175689697267, 0.02907321548461914, 0.029012224197387696, 0.029159616470336915, 0.02888915252685547, 0.028945760726928713, 0.02879756736755371, 0.028757440567016603, 0.028709407806396484, 0.02875094413757324, 0.028721887588500975, 0.028811456680297852, 0.028807167053222657, 0.028862464904785157, 0.02883737564086914, 0.028744192123413087, 0.02875596809387207, 0.028755359649658203, 0.028785247802734375, 0.028887359619140626, 0.028808544158935547, 0.028736991882324217, 0.02885264015197754, 0.028909536361694337, 0.028930559158325195, 0.028878400802612305, 0.028897727966308594, 0.02893414306640625, 0.028735231399536133, 0.030709760665893555, 0.029738239288330078, 0.029081663131713866, 0.02891436767578125, 0.02869862365722656, 0.02870681571960449, 0.02865344047546387, 0.028776575088500976, 0.028708864212036132, 0.02870681571960449, 0.028622848510742187, 0.028516351699829103, 0.028704767227172853, 0.028659711837768553, 0.028740863800048828, 0.02864566421508789, 0.028596704483032226, 0.02865337562561035, 0.028733631134033204, 0.02871500778198242, 0.02873107147216797, 0.028655935287475585, 0.028940288543701172, 0.028618080139160156, 0.028738016128540038, 0.028700544357299806, 0.028721471786499024, 0.028708192825317384, 0.028711584091186522, 0.028905536651611326, 0.02885215950012207, 0.028872480392456056, 0.02888630485534668, 0.028923072814941407, 0.028931615829467773, 0.028964160919189453, 0.0289719352722168, 0.029073408126831055, 0.028901056289672853, 0.028836160659790038, 0.029100032806396486, 0.02894643211364746, 0.028848127365112306, 0.028817407608032225, 0.028824800491333007, 0.02875276756286621, 0.028757312774658202, 0.028815967559814453, 0.02894643211364746, 0.028888479232788086, 0.029149599075317383, 0.0289334716796875, 0.02890982437133789, 0.028840192794799803, 0.028915327072143556, 0.028838367462158204, 0.02889753532409668, 0.028831584930419922, 0.028825664520263673, 0.028724319458007814, 0.028959743499755858, 0.02883500862121582, 0.028834432601928712, 0.03068511962890625, 0.029756959915161134, 0.029282848358154298, 0.028835840225219726, 0.02872025680541992, 0.028674079895019532, 0.02870467185974121, 0.02870377540588379, 0.028702463150024414, 0.028508031845092773, 0.028570112228393556, 0.028591903686523437, 0.02866489601135254, 0.028709823608398438, 0.028765663146972657, 0.028669567108154298, 0.02871776008605957, 0.02872038459777832, 0.028729343414306642, 0.028711904525756837, 0.02872319984436035, 0.028667104721069335, 0.02876032066345215, 0.02870662307739258, 0.028752031326293944, 0.02892857551574707, 0.0287457275390625, 0.028843551635742187, 0.02880988883972168, 0.02877129554748535, 0.028709344863891602, 0.0287379207611084, 0.028872735977172853, 0.028924928665161134, 0.02897977638244629, 0.029137504577636718, 0.029220672607421876, 0.02905824089050293, 0.028986175537109374, 0.028895360946655273, 0.028732959747314452, 0.028706592559814455, 0.028667455673217775, 0.028799808502197266, 0.02874387168884277, 0.028805343627929688, 0.02884940719604492, 0.028914207458496093, 0.029234399795532228, 0.028875551223754882, 0.02874387168884277, 0.028743488311767578, 0.028805152893066406, 0.028767391204833983, 0.02881990432739258, 0.028968448638916015, 0.02888172721862793, 0.028921920776367186, 0.028880447387695313, 0.028848575592041015, 0.028753183364868165, 0.028771360397338866, 0.028781728744506838, 0.030577407836914063, 0.029626367568969726, 0.02905023956298828, 0.028690240859985353, 0.028605247497558595, 0.028618335723876953, 0.02858345603942871, 0.02854092788696289, 0.028541120529174804, 0.028641408920288085, 0.02858451271057129, 0.028628992080688476, 0.028626623153686522, 0.028542911529541016, 0.028655391693115234, 0.028834400177001954, 0.029147136688232423, 0.028675743103027344, 0.028739936828613283, 0.02869171142578125, 0.02874448013305664, 0.028728736877441406, 0.02874220848083496, 0.02871839904785156, 0.028764928817749023, 0.028713983535766603, 0.028719968795776367, 0.028622943878173827, 0.02855740737915039, 0.028655519485473634, 0.02880512046813965, 0.029430784225463868, 0.028821535110473633, 0.028967008590698243, 0.029035392761230468, 0.029057024002075195, 0.02898054313659668, 0.029007808685302734, 0.028895999908447264, 0.028915712356567383, 0.028823551177978517, 0.028794591903686523, 0.02874163246154785, 0.028737823486328126, 0.028807039260864257, 0.028784767150878906, 0.028716928482055665, 0.028749952316284178, 0.028794879913330077, 0.028692480087280273, 0.028769472122192382, 0.028774528503417968, 0.028954399108886718, 0.028743743896484375, 0.029012960433959963, 0.028806880950927736, 0.028739744186401368, 0.028835840225219726, 0.028784576416015624, 0.028813119888305663, 0.028815616607666017, 0.02879283142089844, 0.028725248336791992, 0.030593952178955077, 0.029666879653930663, 0.029003488540649415, 0.028742271423339842, 0.028512479782104493, 0.02851171112060547, 0.028549184799194337, 0.02873788833618164, 0.028784255981445312, 0.02875164794921875, 0.02887548828125, 0.0287903995513916, 0.028780799865722656, 0.02874982452392578, 0.02874982452392578, 0.028705888748168946, 0.0287425594329834, 0.028738847732543947, 0.028797664642333985, 0.028907520294189453, 0.028778495788574218, 0.02873139190673828, 0.02877961540222168, 0.02874435234069824, 0.028806720733642578, 0.02895270347595215, 0.028800575256347657, 0.02876927947998047, 0.028827360153198242, 0.028879295349121092, 0.028856351852416993, 0.028955968856811523, 0.028903776168823243, 0.029568960189819336, 0.02915760040283203, 0.02908723258972168, 0.029053440093994142, 0.029060096740722657, 0.029025407791137697, 0.02904051208496094, 0.02901580810546875, 0.02898150444030762, 0.028868160247802734, 0.0290263671875, 0.02889971160888672, 0.028953855514526367, 0.028871423721313478, 0.028932096481323243, 0.02893519973754883, 0.028953088760375976, 0.028901439666748047, 0.028901792526245116, 0.028929407119750977, 0.028918399810791015, 0.028999584197998047, 0.029114463806152343, 0.028911808013916015, 0.028935583114624023, 0.028879072189331053, 0.028952224731445313, 0.02897769546508789, 0.02884422492980957, 0.02901100730895996, 0.03085094451904297, 0.029772287368774415, 0.02911884880065918, 0.028860416412353516, 0.028786687850952147, 0.028706464767456055, 0.028651456832885742, 0.028719520568847655, 0.02874163246154785, 0.028817407608032225, 0.0287554874420166, 0.02874361610412598, 0.028776384353637694, 0.02867897605895996, 0.02874959945678711, 0.02868230438232422, 0.028721216201782227, 0.028815231323242187, 0.02895462417602539, 0.02900480079650879, 0.028824703216552734, 0.028760223388671874, 0.028751392364501954, 0.028686527252197266, 0.028844127655029295, 0.028983455657958984, 0.0288656005859375, 0.02892870330810547, 0.02881926345825195, 0.028952768325805664, 0.028825408935546876, 0.028833663940429688, 0.02903481674194336, 0.029097471237182617, 0.029097663879394532, 0.02904863929748535, 0.02896691131591797, 0.028903839111328124, 0.02891961669921875, 0.02899843215942383, 0.028893184661865235, 0.028821504592895508, 0.02890713691711426, 0.02888742446899414, 0.0287457275390625, 0.028825599670410155, 0.02891302490234375, 0.028805952072143554, 0.02890150451660156, 0.028868288040161134, 0.02880102348327637, 0.0287903995513916, 0.029024639129638673, 0.02887641525268555, 0.02891542434692383, 0.028961599349975584, 0.028888927459716798, 0.029097984313964844, 0.028868608474731446, 0.028903423309326173, 0.029041791915893556, 0.028922752380371095, 0.028901632308959962, 0.030714591979980468, 0.029648000717163087, 0.029075872421264647, 0.028886655807495117, 0.028729440689086914, 0.028695552825927735, 0.028726911544799803, 0.02872902488708496, 0.02876153564453125, 0.028768543243408204, 0.028693376541137697, 0.028737152099609375, 0.028776351928710937, 0.028639167785644532, 0.028799327850341797, 0.028674079895019532, 0.028730911254882814, 0.02870524787902832, 0.028753440856933595, 0.028715551376342772, 0.028815391540527344, 0.02885843276977539, 0.028814912796020508, 0.02879747200012207, 0.029027999877929686, 0.028864608764648438, 0.02880886459350586, 0.028797279357910155, 0.028792032241821287, 0.02879280090332031, 0.02886079978942871, 0.028859935760498046, 0.028881824493408204, 0.028917760848999025, 0.02926313591003418, 0.029061855316162108, 0.029056768417358398, 0.028960639953613282, 0.029030784606933594, 0.02899715232849121, 0.028813791275024415, 0.028919807434082033, 0.028809215545654295, 0.028796096801757813, 0.02899795150756836, 0.028789247512817383, 0.028983327865600587, 0.028959808349609376, 0.02896784019470215, 0.028817407608032225, 0.028957984924316407, 0.02887343978881836, 0.028899328231811523, 0.028900928497314453, 0.029280832290649414, 0.028904800415039063, 0.028936256408691408, 0.02892608070373535, 0.028896799087524416, 0.02902931213378906, 0.028900447845458983, 0.028959520339965822, 0.029071104049682616, 0.030795743942260742, 0.02978019142150879, 0.029081375122070312, 0.028708864212036132, 0.02876438331604004, 0.028681024551391602, 0.028731679916381835, 0.028720928192138673, 0.028724128723144532, 0.028729055404663088, 0.028780799865722656, 0.028673152923583984, 0.028824480056762695, 0.02868751907348633, 0.02873619270324707, 0.02869660758972168, 0.028712671279907228, 0.02871548843383789, 0.028738943099975586, 0.028777023315429688, 0.028770303726196288, 0.028896480560302733, 0.028803871154785155, 0.02879897689819336, 0.028763551712036133, 0.0291549129486084, 0.028758495330810548, 0.02883404731750488, 0.02885865592956543, 0.02874982452392578, 0.028858367919921874, 0.028931615829467773, 0.029001792907714843, 0.029075872421264647, 0.029042688369750977, 0.02900105667114258, 0.02899193572998047, 0.029013919830322265, 0.029339967727661134, 0.02894220733642578, 0.02875200080871582, 0.028896896362304688, 0.0288439998626709, 0.028815263748168944, 0.02886092758178711, 0.028923904418945313, 0.028762111663818358, 0.028915552139282225, 0.02895894432067871, 0.028728256225585936, 0.028913951873779296, 0.028887775421142577, 0.028960639953613282, 0.028829471588134765, 0.028954656600952148, 0.028872415542602538, 0.028938720703125, 0.02891788864135742, 0.028932319641113282, 0.02882966423034668, 0.029035743713378907, 0.028971616744995116, 0.02885807991027832, 0.0308175048828125, 0.029647712707519532, 0.029142976760864258, 0.028788000106811523, 0.028797504425048828, 0.029034656524658205, 0.02894438362121582, 0.028645376205444335, 0.02871500778198242, 0.028604415893554686, 0.02869001579284668, 0.028695968627929686, 0.028744991302490235, 0.028663520812988282, 0.028657663345336915, 0.02855891227722168, 0.029085535049438477, 0.029118751525878905, 0.02866217613220215, 0.02873436737060547, 0.028682432174682616, 0.02910700798034668, 0.029087039947509767, 0.028707168579101563, 0.028788223266601562, 0.028746591567993165, 0.02869990348815918, 0.028713056564331055, 0.028719423294067382, 0.0287542724609375, 0.028735488891601563, 0.028747039794921873, 0.028809471130371092, 0.02895414352416992, 0.028978111267089844, 0.0289234561920166, 0.02882431983947754, 0.02889491271972656, 0.028823392868041992, 0.028789024353027343, 0.028915679931640625, 0.028835744857788087, 0.028684160232543946, 0.02876198387145996, 0.028762367248535155, 0.028730880737304686, 0.028708736419677736, 0.028756607055664064, 0.028758079528808593, 0.028676031112670898, 0.02879283142089844, 0.02877440071105957, 0.02877449607849121, 0.02871900749206543, 0.02883516883850098, 0.02883407974243164, 0.028976543426513672, 0.02878767967224121, 0.028833791732788085, 0.028694271087646484, 0.028782880783081055, 0.028914688110351562, 0.02878563117980957, 0.030646272659301758, 0.029595008850097658, 0.02903654479980469, 0.028760704040527343, 0.028604415893554686, 0.028506111145019532, 0.028635168075561525, 0.028770559310913085, 0.028714719772338866, 0.02867967987060547, 0.028690944671630858, 0.028563232421875, 0.028610847473144532, 0.028565023422241213, 0.028729759216308593, 0.02867571258544922, 0.028573631286621094, 0.02864975929260254, 0.028586143493652343, 0.02857164764404297, 0.02872425651550293, 0.028675039291381835, 0.028907520294189453, 0.029618175506591796, 0.028811103820800783, 0.02866102409362793, 0.028671871185302733, 0.02863747215270996, 0.028848575592041015, 0.028739871978759764, 0.028704767227172853, 0.02880499267578125, 0.02879302406311035, 0.028821439743041993, 0.028876800537109375, 0.0289751033782959, 0.028887231826782225, 0.02889299201965332, 0.02892812728881836, 0.02879270362854004, 0.028729343414306642, 0.02876380729675293, 0.02867030334472656, 0.028686656951904296, 0.028714208602905272, 0.02873788833618164, 0.028745023727416993, 0.028723552703857423, 0.028799455642700197, 0.028688383102416993, 0.02877408027648926, 0.028733024597167967, 0.028727584838867188, 0.028706335067749025, 0.028859296798706056, 0.028938175201416016, 0.028977216720581053, 0.029085695266723634, 0.028866336822509765, 0.028893407821655274, 0.02896294403076172, 0.02900099182128906, 0.028803647994995116]",tokens/s,34.642205083951,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 560.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 20591 has 14.73 GiB memory in use. Of the allocated memory 14.62 GiB is allocated by PyTorch, and 1.67 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 717, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 322.12 MiB is free. Process 45056 has 14.42 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 13.04 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 706, in __init__ self.model = XGLMModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 518, in __init__ self.layers = nn.ModuleList([XGLMDecoderLayer(config) for _ in range(config.num_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 518, in self.layers = nn.ModuleList([XGLMDecoderLayer(config) for _ in range(config.num_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 372, in __init__ self.fc1 = nn.Linear(self.embed_dim, config.ffn_dim) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 128.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 78.12 MiB is free. Process 130629 has 14.66 GiB memory in use. Of the allocated memory 14.54 GiB is allocated by PyTorch, and 12.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 706, in __init__ self.model = XGLMModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 518, in __init__ self.layers = nn.ModuleList([XGLMDecoderLayer(config) for _ in range(config.num_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 518, in self.layers = nn.ModuleList([XGLMDecoderLayer(config) for _ in range(config.num_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 352, in __init__ self.self_attn = XGLMAttention( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 220, in __init__ self.q_proj = nn.Linear(embed_dim, embed_dim, bias=bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 64.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 56.12 MiB is free. Process 132978 has 14.68 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 4.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 272, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 232.12 MiB is free. Process 68277 has 14.51 GiB memory in use. Of the allocated memory 14.40 GiB is allocated by PyTorch, and 1.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 896.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 342.12 MiB is free. Process 165757 has 14.40 GiB memory in use. Of the allocated memory 14.29 GiB is allocated by PyTorch, and 1.75 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 896.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 598.12 MiB is free. Process 171822 has 14.15 GiB memory in use. Of the allocated memory 14.04 GiB is allocated by PyTorch, and 1.81 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 717, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 206221 has 14.71 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 4.20 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 657, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 324, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 238.12 MiB is free. Process 74328 has 14.51 GiB memory in use. Of the allocated memory 14.39 GiB is allocated by PyTorch, and 1.53 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 272, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.50 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.36 GiB is free. Process 77316 has 13.38 GiB memory in use. Of the allocated memory 13.27 GiB is allocated by PyTorch, and 1.86 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 711, in __init__ self.transformer = CodeGenModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 472, in __init__ self.h = nn.ModuleList([CodeGenBlock(config, layer_idx=i) for i in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 472, in self.h = nn.ModuleList([CodeGenBlock(config, layer_idx=i) for i in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 303, in __init__ self.mlp = CodeGenMLP(inner_dim, config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 282, in __init__ self.fc_out = nn.Linear(intermediate_size, embed_dim) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 520.12 MiB is free. Process 91415 has 14.23 GiB memory in use. Of the allocated memory 14.11 GiB is allocated by PyTorch, and 2.71 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,791.126016,3950.903296,0.0,3548.381184,3467.969536,s,1,7.6725634765625,7.6725634765625,0.0,7.6725634765625,7.6725634765625,7.6725634765625,7.6725634765625,[7.6725634765625],,kWh,4.699402516644113e-06,5.101462436096725e-07,9.758341139864957e-07,6.185382874240282e-06,,MB,1149.448192,3961.389056,0.0,3548.381184,3412.88448,s,10,1.0057109069824217,0.10057109069824217,0.0010864820470759373,0.10095800018310547,0.10144194564819337,0.10172946815490723,0.10195948616027832,"[0.1012562255859375, 0.10076537322998047, 0.09821481323242187, 0.1020169906616211, 0.10089315032958984, 0.10113209533691406, 0.09979235076904297, 0.09923900604248047, 0.10102285003662109, 0.1013780517578125]",tokens/s,2545.463097025699,kWh,3.189216312409167e-06,3.5149405175521295e-07,2.119624280239222e-06,5.660334644403602e-06,tokens/kWh,45227007.95669534,MB,1161.281536,3963.486208,0.0,3550.478336,3457.675776,s,10,15.605126708984375,1.5605126708984376,0.0061019349175773655,1.5613223876953124,1.5681363769531251,1.569254345703125,1.570148720703125,"[1.567887939453125, 1.549548583984375, 1.5643330078125, 1.5622947998046874, 1.553847412109375, 1.56256494140625, 1.5603499755859376, 1.55485498046875, 1.570372314453125, 1.55907275390625]",tokens/s,40.371347938962174,kWh,4.542605653009322e-05,5.010390008728334e-06,2.6361120726560256e-05,7.67975672653818e-05,tokens/kWh,820338.4852321831,,s,630,15.601213703155521,0.024763831274850028,0.0007549158128838469,0.02459724807739258,0.02503782444000244,0.02567691135406494,0.029364444084167485,"[0.025075712203979493, 0.024434688568115235, 0.02461052894592285, 0.024885536193847656, 0.02471023941040039, 0.024754079818725586, 0.024859392166137695, 0.02969625663757324, 0.024983264923095702, 0.026003583908081055, 0.02488912010192871, 0.025158016204833985, 0.02473097610473633, 0.02452547264099121, 0.024623104095458984, 0.024623071670532227, 0.025431999206542967, 0.0247743034362793, 0.02469113540649414, 0.024635391235351564, 0.024541088104248047, 0.0246744327545166, 0.024459232330322267, 0.02441526412963867, 0.025390047073364258, 0.02609286308288574, 0.024724159240722656, 0.024666112899780275, 0.024842048645019533, 0.02476176071166992, 0.024654624938964843, 0.024428543090820314, 0.02475142478942871, 0.024707775115966796, 0.024565759658813476, 0.02458176040649414, 0.024752511978149414, 0.02468659210205078, 0.02467020797729492, 0.024410112380981445, 0.02484223937988281, 0.024575136184692384, 0.025107295989990234, 0.024852479934692383, 0.024360960006713867, 0.024588287353515623, 0.024518655776977538, 0.024754175186157225, 0.025331104278564453, 0.028881088256835937, 0.025743200302124025, 0.02468057632446289, 0.024435136795043947, 0.024336320877075195, 0.024463424682617186, 0.024541183471679686, 0.024584192276000977, 0.02452275276184082, 0.024375295639038085, 0.024260608673095704, 0.024197120666503907, 0.024423648834228515, 0.02429385566711426, 0.024841567993164063, 0.02446633529663086, 0.02467635154724121, 0.024602624893188478, 0.02438470458984375, 0.02455129623413086, 0.024347423553466797, 0.024463199615478517, 0.02450604820251465, 0.024480384826660158, 0.024373247146606446, 0.024360960006713867, 0.024803007125854492, 0.024692224502563476, 0.024647775650024413, 0.02435964775085449, 0.024552576065063475, 0.024586431503295897, 0.024586944580078124, 0.02439116859436035, 0.024364608764648438, 0.02449488067626953, 0.02440003204345703, 0.026861568450927735, 0.027504159927368165, 0.024783327102661134, 0.02467196846008301, 0.024312095642089845, 0.024217599868774413, 0.024543231964111328, 0.024308736801147462, 0.024368127822875976, 0.024269887924194336, 0.024465728759765625, 0.024459903717041015, 0.02489097595214844, 0.02450604820251465, 0.02439139175415039, 0.024468416213989257, 0.024741376876831055, 0.02453356742858887, 0.02475382423400879, 0.02454153633117676, 0.02453913688659668, 0.024399808883666992, 0.024325504302978515, 0.02452534484863281, 0.024435903549194334, 0.024497343063354493, 0.024336160659790038, 0.02441651153564453, 0.02454092788696289, 0.0243240966796875, 0.024340032577514648, 0.02451705551147461, 0.024535039901733398, 0.024584192276000977, 0.02464358329772949, 0.02433843231201172, 0.024534591674804686, 0.024431039810180664, 0.024707071304321288, 0.02465996742248535, 0.02507161521911621, 0.02443059158325195, 0.02490777587890625, 0.027892927169799804, 0.024922847747802734, 0.02451875114440918, 0.024383487701416014, 0.024260608673095704, 0.02429680061340332, 0.024684736251831055, 0.024586559295654297, 0.024393856048583986, 0.024352800369262694, 0.0245166072845459, 0.02470035171508789, 0.024717887878417968, 0.024755231857299803, 0.024730592727661132, 0.02470297622680664, 0.02469273567199707, 0.024489856719970702, 0.024524255752563475, 0.024615583419799806, 0.02470297622680664, 0.024563072204589843, 0.02465996742248535, 0.024665952682495117, 0.02491254425048828, 0.024807552337646484, 0.02458624076843262, 0.02507366371154785, 0.02509004783630371, 0.02484329605102539, 0.02477564811706543, 0.02453913688659668, 0.024670143127441407, 0.02461087989807129, 0.024435903549194334, 0.0244703369140625, 0.02471116828918457, 0.027258880615234377, 0.026703872680664063, 0.024688640594482423, 0.025036575317382813, 0.02558118438720703, 0.02503536033630371, 0.024694496154785157, 0.024961248397827148, 0.024553152084350587, 0.024480127334594728, 0.02512281608581543, 0.024912927627563478, 0.024636383056640624, 0.024647584915161135, 0.024643423080444336, 0.02466633605957031, 0.02488528060913086, 0.024922111511230468, 0.0248668155670166, 0.024616960525512696, 0.024664064407348633, 0.024614431381225585, 0.02448348808288574, 0.02472287940979004, 0.024352672576904297, 0.024385568618774413, 0.02447830390930176, 0.02445110321044922, 0.02447488021850586, 0.02443561553955078, 0.0242739200592041, 0.024601408004760742, 0.02462518310546875, 0.024426496505737305, 0.02469696044921875, 0.024485343933105468, 0.024628671646118164, 0.02478179168701172, 0.02955264091491699, 0.024721088409423827, 0.02475654411315918, 0.0245166072845459, 0.024487007141113282, 0.024451648712158203, 0.024486400604248046, 0.024645471572875978, 0.024285184860229493, 0.024185888290405272, 0.024445920944213866, 0.024815071105957032, 0.02417695999145508, 0.024318239212036134, 0.024575935363769532, 0.02472297668457031, 0.02475382423400879, 0.02474844741821289, 0.02472297668457031, 0.024631872177124023, 0.0245599365234375, 0.024636608123779297, 0.024554304122924805, 0.024465408325195313, 0.024665536880493163, 0.02467897605895996, 0.02493574333190918, 0.024852575302124022, 0.024539743423461914, 0.024506368637084962, 0.024579103469848634, 0.0247674560546875, 0.024498176574707032, 0.024403968811035157, 0.024333856582641603, 0.029166048049926757, 0.024564735412597655, 0.024534015655517577, 0.02503628730773926, 0.024697343826293947, 0.024475648880004884, 0.024784448623657227, 0.029481407165527343, 0.024580095291137697, 0.0245534725189209, 0.024509824752807618, 0.024435327529907225, 0.0243240966796875, 0.024785247802734375, 0.024344255447387695, 0.024485471725463868, 0.024529151916503907, 0.024434240341186523, 0.02568079948425293, 0.024637439727783202, 0.02430771255493164, 0.024237152099609374, 0.02429635238647461, 0.02507366371154785, 0.02436262321472168, 0.024380863189697264, 0.024482751846313478, 0.024306751251220702, 0.024267711639404298, 0.02408857536315918, 0.024453119277954103, 0.024664064407348633, 0.024546495437622072, 0.02440889549255371, 0.024568960189819335, 0.024259231567382813, 0.02447145652770996, 0.02393120002746582, 0.024540864944458007, 0.02454764747619629, 0.024655872344970704, 0.024544960021972657, 0.024348735809326172, 0.024332256317138673, 0.02467849540710449, 0.029505727767944336, 0.024715263366699217, 0.024825183868408204, 0.024475488662719726, 0.024280960083007813, 0.0244335994720459, 0.02447702407836914, 0.02474153518676758, 0.024664384841918945, 0.024562112808227538, 0.024549631118774413, 0.02441811180114746, 0.024746143341064453, 0.02501840019226074, 0.025126911163330077, 0.02472742462158203, 0.02472153663635254, 0.024512191772460938, 0.024496448516845702, 0.024907455444335938, 0.024704992294311525, 0.02453334426879883, 0.024467456817626954, 0.02473369598388672, 0.024707008361816406, 0.024787008285522463, 0.0244256649017334, 0.02480620765686035, 0.0245534725189209, 0.02429747200012207, 0.025887935638427735, 0.02509619140625, 0.02511257553100586, 0.024700576782226563, 0.024501728057861327, 0.02478323173522949, 0.025055583953857423, 0.02752118492126465, 0.0263657283782959, 0.024406240463256835, 0.02467136001586914, 0.024460159301757812, 0.02451251220703125, 0.02418070411682129, 0.024383520126342772, 0.024227840423583984, 0.024506368637084962, 0.024397823333740236, 0.02441347122192383, 0.02445795249938965, 0.024526847839355468, 0.024944639205932616, 0.0267325439453125, 0.02455740737915039, 0.02460892868041992, 0.024600576400756836, 0.024793088912963866, 0.024442880630493165, 0.024436735153198243, 0.024551424026489257, 0.024571903228759767, 0.024582143783569335, 0.02466815948486328, 0.02515283203125, 0.0253221435546875, 0.02453126335144043, 0.02464739227294922, 0.02456787109375, 0.024717248916625977, 0.024483840942382814, 0.024952831268310546, 0.024906911849975587, 0.024607583999633788, 0.02483795166015625, 0.024375232696533203, 0.02451481628417969, 0.02437126350402832, 0.024547264099121092, 0.025050464630126952, 0.027230432510375976, 0.026714176177978517, 0.02462656021118164, 0.024581119537353514, 0.024528095245361328, 0.02455129623413086, 0.024335264205932617, 0.024453119277954103, 0.024576000213623047, 0.02439129638671875, 0.02449446487426758, 0.024526527404785156, 0.024508928298950194, 0.024741695404052733, 0.02455958366394043, 0.024817472457885743, 0.024710559844970705, 0.02488604736328125, 0.02488319969177246, 0.027297632217407226, 0.025170080184936522, 0.024729600906372072, 0.024705024719238283, 0.02448182487487793, 0.024887264251708983, 0.024366624832153322, 0.025672159194946288, 0.025528255462646483, 0.024526559829711914, 0.024242176055908202, 0.02408892822265625, 0.024137727737426756, 0.024465408325195313, 0.024573951721191405, 0.024233888626098633, 0.024223424911499022, 0.024401344299316407, 0.024562143325805665, 0.029279743194580078, 0.024356895446777344, 0.02432646369934082, 0.026278560638427734, 0.02675833511352539, 0.02456659126281738, 0.02464963150024414, 0.024848480224609375, 0.024655872344970704, 0.024622623443603515, 0.024684640884399416, 0.024756607055664064, 0.024551424026489257, 0.02479865646362305, 0.02467897605895996, 0.02453708839416504, 0.02445475196838379, 0.0245119686126709, 0.024503391265869142, 0.024508256912231446, 0.024388864517211915, 0.02442848014831543, 0.024308544158935547, 0.024680608749389647, 0.024504159927368162, 0.024321088790893553, 0.024064960479736327, 0.023986175537109376, 0.024638912200927735, 0.024911615371704103, 0.024527679443359374, 0.024489984512329102, 0.02467331123352051, 0.02448601531982422, 0.02460348892211914, 0.024502143859863282, 0.02448601531982422, 0.02480441665649414, 0.024767423629760744, 0.024481599807739257, 0.025036800384521486, 0.024480960845947267, 0.024339263916015624, 0.02443878364562988, 0.024409759521484376, 0.02445142364501953, 0.02473695945739746, 0.024968000411987306, 0.024705024719238283, 0.024680448532104493, 0.024600576400756836, 0.024592384338378907, 0.024366975784301758, 0.024884672164916993, 0.024797887802124025, 0.02472550392150879, 0.024647680282592774, 0.024415519714355467, 0.024349407196044923, 0.024581151962280272, 0.02452556800842285, 0.024617183685302736, 0.024449024200439453, 0.02442572784423828, 0.02446931266784668, 0.024304576873779297, 0.02438041687011719, 0.024412511825561523, 0.024707744598388672, 0.02448601531982422, 0.02454310417175293, 0.025014272689819338, 0.02469478416442871, 0.02451443290710449, 0.024739967346191407, 0.02482585525512695, 0.02445699119567871, 0.024627424240112303, 0.024655872344970704, 0.024977407455444335, 0.02962227249145508, 0.024766239166259765, 0.02479692840576172, 0.02443721580505371, 0.02450864028930664, 0.0244815673828125, 0.024780799865722656, 0.02467430305480957, 0.02488934326171875, 0.024732831954956055, 0.024739839553833007, 0.02452771186828613, 0.024479488372802734, 0.02442470359802246, 0.024594240188598633, 0.024467647552490233, 0.024696832656860353, 0.024475584030151366, 0.02439993667602539, 0.024446624755859375, 0.02476255989074707, 0.02435081672668457, 0.0243569278717041, 0.024979232788085937, 0.024524288177490236, 0.024682239532470705, 0.024546239852905275, 0.02433843231201172, 0.02550783920288086, 0.026433536529541016, 0.024627199172973634, 0.02465318489074707, 0.024707712173461915, 0.024788991928100586, 0.02494054412841797, 0.024788991928100586, 0.02470911979675293, 0.027146015167236328, 0.027191455841064454, 0.024491392135620117, 0.024772544860839844, 0.02472012710571289, 0.024567808151245117, 0.024642976760864257, 0.024535648345947264, 0.02469478416442871, 0.024999935150146483, 0.024987648010253907, 0.024481792449951172, 0.024465408325195313, 0.02456166458129883, 0.024647903442382813, 0.02466383934020996, 0.024363008499145508, 0.024382944107055663, 0.024434528350830077, 0.024758560180664062, 0.024473215103149416, 0.024412960052490235, 0.024333696365356445, 0.024576416015625, 0.02488137626647949, 0.02468659210205078, 0.024620960235595703, 0.024963008880615235, 0.024496288299560548, 0.024823808670043947, 0.024700927734375, 0.025223167419433593, 0.024815616607666017, 0.02467795181274414, 0.025255615234375, 0.024895679473876952, 0.02474220848083496, 0.02472287940979004, 0.024953664779663084, 0.025206783294677734, 0.02505891227722168, 0.024944223403930665, 0.02967635154724121, 0.024928255081176756, 0.02506342315673828, 0.02459020805358887, 0.024581472396850587, 0.024849184036254884, 0.02504819107055664, 0.024985343933105468, 0.024550207138061525, 0.02472534370422363, 0.024532320022583008, 0.024633983612060546, 0.024688447952270508, 0.024819583892822267, 0.024678943634033203, 0.02454313659667969, 0.024721632003784178, 0.0245534725189209, 0.024715263366699217, 0.02456947135925293, 0.024969600677490236, 0.02447257614135742, 0.024324384689331055, 0.024324832916259767, 0.02431551933288574, 0.024274911880493164, 0.024746400833129883, 0.02433024024963379, 0.024368160247802733, 0.02455855941772461, 0.024839263916015625, 0.024574655532836914, 0.02451273536682129, 0.024698879241943358, 0.024657920837402345, 0.024833919525146485, 0.024677791595458985, 0.024867551803588867, 0.02939904022216797, 0.024748031616210937, 0.027200864791870116, 0.02474051284790039, 0.024694623947143553, 0.024623264312744142, 0.025047040939331053, 0.02468454360961914, 0.0246212158203125, 0.024600255966186525, 0.02461884880065918, 0.024594112396240233, 0.024447551727294924, 0.024270336151123048, 0.024305631637573242, 0.025102943420410157, 0.02482966423034668, 0.024498464584350586, 0.024802751541137695, 0.024619455337524413, 0.024723232269287108, 0.024491552352905274, 0.024469343185424805, 0.024384191513061523, 0.024933727264404296, 0.024639999389648438, 0.024420799255371092, 0.024801279067993166, 0.024549375534057616, 0.024396928787231445, 0.02473664093017578, 0.024559616088867187]",tokens/s,40.38147364602637,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 688.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 598.12 MiB is free. Process 158085 has 14.15 GiB memory in use. Of the allocated memory 14.04 GiB is allocated by PyTorch, and 1.75 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 456.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 280.12 MiB is free. Process 155262 has 14.46 GiB memory in use. Of the allocated memory 14.35 GiB is allocated by PyTorch, and 3.19 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 270.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 98.12 MiB is free. Process 162767 has 14.64 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 1.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 657, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 327, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 64.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 56.12 MiB is free. Process 66518 has 14.68 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 711.00 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,887.861248,15254.552576,0.0,14852.030464,14834.516992,s,1,7.6857373046875,7.6857373046875,0.0,7.6857373046875,7.6857373046875,7.6857373046875,7.6857373046875,[7.6857373046875],,kWh,7.827192070817545e-06,8.559216565226133e-07,3.923058694010884e-06,1.2606172421351042e-05,,MB,1210.220544,15663.497216,0.0,15250.489344,15168.108544,s,10,4.941616729736327,0.4941616729736328,0.012472370755828983,0.5000536346435547,0.5020138641357422,0.5031468704223633,0.5040532754516601,"[0.4632622985839844, 0.4950245361328125, 0.5042798767089843, 0.5012466735839843, 0.5013772888183594, 0.49586441040039064, 0.501749755859375, 0.5017620849609375, 0.478189208984375, 0.498860595703125]",tokens/s,518.0490798881918,kWh,1.3696687177462359e-05,1.510491896917777e-06,9.087242118272826e-06,2.429442119265296e-05,tokens/kWh,10537398.605627973,MB,1215.5904,15705.440256,0.0,15292.432384,15247.908352,s,10,37.13355639648438,3.713355639648438,0.005517002843577152,3.7131271972656252,3.7191114990234375,3.7210601440429687,3.7226190600585936,"[3.7230087890625, 3.717416015625, 3.714741455078125, 3.718678466796875, 3.711512939453125, 3.71713671875, 3.704510498046875, 3.707626953125, 3.711103515625, 3.707821044921875]",tokens/s,16.965786774456248,kWh,0.00010888535032587595,1.2010252843417764e-05,7.235921192772827e-05,0.00019325481509702196,tokens/kWh,325994.46470904944,,s,630,37.13034784698488,0.05893706007457915,0.0002685695945156667,0.058896400451660155,0.05925175971984863,0.059444140052795406,0.059783412094116216,"[0.0595432014465332, 0.058837215423583986, 0.05874508666992188, 0.05883644866943359, 0.05859308624267578, 0.05873942565917969, 0.058912479400634765, 0.0588842887878418, 0.058787326812744144, 0.05887619018554688, 0.058886463165283204, 0.05883699035644531, 0.05873980712890625, 0.05890697479248047, 0.0589051513671875, 0.058934303283691404, 0.05901615905761719, 0.05895695877075195, 0.059157344818115236, 0.059030529022216796, 0.05903222274780273, 0.05892950439453125, 0.05895542526245117, 0.05893564987182617, 0.05875302505493164, 0.0587407341003418, 0.059017215728759766, 0.05901107025146484, 0.05908684921264649, 0.058945377349853514, 0.058929088592529294, 0.058736862182617186, 0.05896806335449219, 0.05893939208984375, 0.05901081466674805, 0.05881472015380859, 0.05896918487548828, 0.058931583404541014, 0.05909123229980469, 0.05925094223022461, 0.059287071228027344, 0.059027519226074215, 0.05921737670898437, 0.059068737030029295, 0.059179649353027344, 0.05912364959716797, 0.05924256134033203, 0.05929507064819336, 0.059343521118164065, 0.05958860778808594, 0.05944483184814453, 0.05943852615356445, 0.059507678985595704, 0.059463680267333986, 0.059400127410888674, 0.0595169906616211, 0.05956198501586914, 0.05966233444213867, 0.059633663177490234, 0.05943091201782227, 0.059325920104980466, 0.059446910858154296, 0.05931203079223633, 0.05983808135986328, 0.05918339157104492, 0.05873849487304687, 0.059020862579345704, 0.05923516845703125, 0.05928140640258789, 0.05911548614501953, 0.05911091232299805, 0.05920003128051758, 0.05890662384033203, 0.05915379333496094, 0.05876700973510742, 0.05890147018432617, 0.05879571151733398, 0.05850668716430664, 0.05859151840209961, 0.05875772857666016, 0.05857215881347656, 0.058659454345703126, 0.05895129776000976, 0.058746463775634764, 0.05884771347045899, 0.05891091156005859, 0.05884656143188476, 0.05881935882568359, 0.0589246711730957, 0.058652481079101565, 0.058501697540283205, 0.05853744125366211, 0.05853392028808594, 0.058826881408691405, 0.05871859359741211, 0.05868544006347656, 0.05877350234985351, 0.05876326370239258, 0.058826751708984375, 0.058971744537353515, 0.05921628952026367, 0.05889603042602539, 0.059060577392578126, 0.05889228820800781, 0.058726398468017575, 0.059398017883300784, 0.05888832092285156, 0.05878579330444336, 0.0586956787109375, 0.058871776580810546, 0.059015201568603515, 0.05900201416015625, 0.059018081665039065, 0.059426239013671875, 0.05912633514404297, 0.05912556838989258, 0.05919353485107422, 0.05921382522583008, 0.05945267105102539, 0.05954352188110352, 0.059579071044921876, 0.059603038787841796, 0.05957632064819336, 0.05937561416625976, 0.05946879959106445, 0.0597657585144043, 0.059692447662353515, 0.05931683349609375, 0.05878121566772461, 0.05867977523803711, 0.05852511978149414, 0.05856217575073242, 0.05857913589477539, 0.05908351898193359, 0.0587344970703125, 0.058554462432861325, 0.058554367065429686, 0.0588284797668457, 0.058676799774169924, 0.05854899215698242, 0.05852569580078125, 0.05872339248657227, 0.058861534118652345, 0.0589524154663086, 0.05891628646850586, 0.05870832061767578, 0.05866339111328125, 0.05867520141601563, 0.0587960319519043, 0.05873395156860352, 0.05884092712402344, 0.058977054595947265, 0.05899673461914062, 0.058859519958496094, 0.05891891098022461, 0.05914828872680664, 0.0594411506652832, 0.059582462310791014, 0.059202686309814456, 0.05905001449584961, 0.05914089584350586, 0.05917407989501953, 0.059261825561523436, 0.05898012924194336, 0.05884297561645508, 0.05875948715209961, 0.058990657806396486, 0.05896192169189453, 0.058689537048339846, 0.058828800201416016, 0.05884108734130859, 0.05866291046142578, 0.05887529754638672, 0.05913564682006836, 0.05883795166015625, 0.05894723129272461, 0.05878316879272461, 0.05893417739868164, 0.05918310546875, 0.059209182739257814, 0.059086910247802736, 0.0592259521484375, 0.059251327514648434, 0.05947596740722656, 0.05930368041992187, 0.0594598388671875, 0.05940208053588867, 0.059243968963623043, 0.05922889709472656, 0.05998080062866211, 0.059213249206542966, 0.058923072814941406, 0.05902182388305664, 0.058979328155517576, 0.05885440063476562, 0.05902732849121094, 0.05892512130737305, 0.05867116928100586, 0.05857475280761719, 0.05887395095825195, 0.058593215942382815, 0.05861110305786133, 0.058727073669433597, 0.05893462371826172, 0.05889673614501953, 0.05923871994018555, 0.059154430389404294, 0.05921084976196289, 0.05919631958007812, 0.05895894241333008, 0.05897900772094727, 0.059107456207275394, 0.05890467071533203, 0.05867279815673828, 0.058736991882324216, 0.058946559906005856, 0.058788864135742185, 0.058846591949462894, 0.0587470703125, 0.05893369674682617, 0.05899673461914062, 0.05904790496826172, 0.05927734375, 0.05908070373535156, 0.05916057586669922, 0.05910310363769531, 0.05906240081787109, 0.05919887924194336, 0.058939998626708984, 0.059096897125244144, 0.05897235107421875, 0.058933246612548826, 0.0589161262512207, 0.05923299026489258, 0.0592097282409668, 0.05917798233032227, 0.05914265441894531, 0.058948097229003904, 0.059082366943359374, 0.059193313598632814, 0.05899219131469727, 0.059005790710449216, 0.059066078186035154, 0.059240734100341794, 0.05910323333740235, 0.05916876983642578, 0.059138046264648435, 0.059082752227783204, 0.05925791931152344, 0.059036609649658206, 0.05920745468139649, 0.05903996658325195, 0.05979062271118164, 0.0588633918762207, 0.058710975646972655, 0.058660255432128904, 0.058757568359375, 0.05901123046875, 0.0586808967590332, 0.058802433013916015, 0.058708160400390626, 0.05868544006347656, 0.058635486602783206, 0.058469024658203125, 0.05865849685668945, 0.05866950225830078, 0.058611007690429685, 0.05860348892211914, 0.058761791229248045, 0.05924470520019531, 0.058755073547363285, 0.05855027389526367, 0.058638336181640625, 0.058894336700439455, 0.058909759521484376, 0.05869404983520508, 0.05876995086669922, 0.05865404891967774, 0.05882041549682617, 0.0587718391418457, 0.0588963851928711, 0.05869615936279297, 0.058742782592773435, 0.058969345092773434, 0.05871196746826172, 0.0588306884765625, 0.0589035530090332, 0.0589304313659668, 0.05886358261108399, 0.05886588668823242, 0.058929729461669925, 0.05888000106811524, 0.059017215728759766, 0.058834815979003904, 0.05885891342163086, 0.05879062271118164, 0.05894553756713867, 0.05885686492919922, 0.05890671920776367, 0.05879420852661133, 0.05895337677001953, 0.05914483261108398, 0.059172863006591796, 0.05927936172485351, 0.05923183822631836, 0.05920809555053711, 0.05911097717285156, 0.05916044616699219, 0.05896976089477539, 0.059255649566650394, 0.059211231231689455, 0.05929129409790039, 0.05926598358154297, 0.05948825454711914, 0.05942473602294922, 0.060399646759033206, 0.05913996887207031, 0.05906809616088867, 0.05901334381103516, 0.058908447265625, 0.05864444732666016, 0.05850783920288086, 0.05864191818237305, 0.0585344009399414, 0.05851340866088867, 0.05854003143310547, 0.05857689666748047, 0.05886361694335938, 0.05880627059936523, 0.05898233413696289, 0.05884320068359375, 0.05888809585571289, 0.059041278839111325, 0.05893929672241211, 0.058856128692626956, 0.05888409423828125, 0.05883884811401367, 0.05897439956665039, 0.059289215087890625, 0.05912358474731445, 0.05927692794799805, 0.05911846542358398, 0.05872819137573242, 0.05921612930297852, 0.05902316665649414, 0.05899283218383789, 0.05895372772216797, 0.058883487701416014, 0.05888844680786133, 0.059160511016845704, 0.05922038269042969, 0.05912931060791016, 0.05892470550537109, 0.0589952621459961, 0.058829025268554686, 0.05900080108642578, 0.05893952178955078, 0.05900252914428711, 0.05894099044799805, 0.05893561553955078, 0.058802654266357425, 0.05885712051391601, 0.05885561752319336, 0.05890268707275391, 0.05904793548583984, 0.05896771240234375, 0.0590912971496582, 0.05907846450805664, 0.05952735900878906, 0.05918086242675781, 0.05944329452514648, 0.059082847595214844, 0.059145217895507814, 0.05914726257324219, 0.05927251052856446, 0.059093696594238285, 0.059150337219238285, 0.0591912956237793, 0.060001758575439455, 0.05921756744384766, 0.05894351959228516, 0.058609600067138674, 0.0586794548034668, 0.05873535919189453, 0.05860761642456055, 0.05871791839599609, 0.05858070373535156, 0.05840339279174805, 0.058521598815917966, 0.05847795104980469, 0.058523872375488284, 0.05851772689819336, 0.05861715316772461, 0.05858803176879883, 0.05872355270385742, 0.05879596710205078, 0.0587334098815918, 0.058638336181640625, 0.05860147094726562, 0.058722305297851565, 0.05870182418823242, 0.05869878387451172, 0.059235294342041014, 0.05864857482910156, 0.058609024047851566, 0.05875331115722656, 0.05884310531616211, 0.058917247772216796, 0.058762527465820315, 0.05889888000488281, 0.05894342422485352, 0.05874480056762695, 0.05876921463012695, 0.05871673583984375, 0.058777599334716796, 0.058843135833740234, 0.058793983459472655, 0.058826751708984375, 0.05880944061279297, 0.05889321517944336, 0.05873049545288086, 0.05884108734130859, 0.05921897506713867, 0.05886617660522461, 0.058604000091552734, 0.0586690559387207, 0.05907251358032226, 0.058872928619384764, 0.058747806549072266, 0.058816062927246095, 0.05886611175537109, 0.058799198150634766, 0.05895670318603516, 0.058966014862060545, 0.05878374481201172, 0.05883475112915039, 0.05901113510131836, 0.059031681060791014, 0.058861568450927736, 0.0587509765625, 0.05875823974609375, 0.059665889739990235, 0.05880614471435547, 0.05867814254760742, 0.058654720306396485, 0.05862518310546875, 0.058633056640625, 0.05843526458740234, 0.058550209045410154, 0.058337249755859376, 0.05846255874633789, 0.05844755172729492, 0.05854857635498047, 0.05838227081298828, 0.05841929626464844, 0.05859328079223633, 0.05860147094726562, 0.05874240112304688, 0.058808032989501956, 0.058822719573974606, 0.058535648345947267, 0.05866175842285156, 0.058621696472167965, 0.058685695648193356, 0.05876688003540039, 0.058884574890136716, 0.0588260498046875, 0.058808032989501956, 0.05865081787109375, 0.05881731033325195, 0.058720256805419924, 0.05882812881469727, 0.05879449462890625, 0.05887356948852539, 0.05878726577758789, 0.05889452743530273, 0.05906892776489258, 0.058980510711669924, 0.05875523376464844, 0.05890617752075195, 0.0588579216003418, 0.0590513916015625, 0.05913782501220703, 0.059087711334228514, 0.05893734359741211, 0.058940513610839844, 0.0588583984375, 0.05887385559082031, 0.05902105712890625, 0.05881472015380859, 0.05882857513427735, 0.059238624572753903, 0.059227745056152345, 0.05902947235107422, 0.05890707015991211, 0.05913935852050781, 0.05922480010986328, 0.059202655792236325, 0.059268001556396485, 0.05928054428100586, 0.059147102355957035, 0.05908684921264649, 0.05903699111938476, 0.05903225708007812, 0.059799552917480465, 0.058959392547607424, 0.058671550750732424, 0.05878787231445313, 0.058730239868164065, 0.05867116928100586, 0.05862966537475586, 0.05856313705444336, 0.05850940704345703, 0.05882060623168945, 0.05878169631958008, 0.05873049545288086, 0.058587135314941405, 0.058734367370605466, 0.05867449569702148, 0.05886044692993164, 0.05894124984741211, 0.05907612609863281, 0.05919401550292969, 0.059066368103027345, 0.059157920837402345, 0.059503200531005856, 0.05900697708129883, 0.05871820831298828, 0.05885683059692383, 0.059035839080810545, 0.058880447387695316, 0.05907660675048828, 0.05896761703491211, 0.05886137771606445, 0.058899070739746096, 0.05879510498046875, 0.05904217529296875, 0.059095550537109375, 0.05896809768676758, 0.05902460861206055, 0.05890947341918945, 0.05880831909179687, 0.0586231689453125, 0.05902009582519531, 0.05897420883178711, 0.05874687957763672, 0.058916862487792966, 0.05904793548583984, 0.058729633331298825, 0.05868835067749024, 0.058971839904785155, 0.0590109748840332, 0.058859905242919924, 0.05889641571044922, 0.05883456039428711, 0.05935475158691406, 0.05885776138305664, 0.05884463882446289, 0.05902438354492188, 0.05881846237182617, 0.05885756683349609, 0.05906022262573242, 0.058826751708984375, 0.05883488082885742, 0.058907711029052734, 0.058827392578125, 0.0588721923828125, 0.05979299163818359, 0.05916918563842773, 0.058643775939941405, 0.05871481704711914, 0.058910720825195315, 0.058670398712158206, 0.05862060928344726, 0.05878169631958008, 0.05872777557373047, 0.05854387283325195, 0.05912390518188477, 0.058802913665771485, 0.05883391952514649, 0.058892894744873046, 0.05913232040405274, 0.059047359466552735, 0.05924716949462891, 0.05923337554931641, 0.05896799850463867, 0.05916476821899414, 0.05924748611450195, 0.05876326370239258, 0.05876326370239258, 0.058554367065429686, 0.05865676879882813, 0.058597377777099606, 0.05855171203613281, 0.05869811248779297, 0.05861526489257812, 0.05859110260009766, 0.05870857620239258, 0.058798366546630856, 0.058845184326171876, 0.058883262634277345, 0.058767711639404294, 0.05876784133911133, 0.05883420944213867, 0.05882953643798828, 0.05889843368530273, 0.05875839996337891, 0.058742942810058596, 0.0587413444519043, 0.058858943939208985, 0.05880889511108398, 0.05874687957763672, 0.0586668815612793, 0.058761344909667966, 0.05875711822509765, 0.05876908874511719, 0.05881683349609375, 0.0588939208984375, 0.05885980987548828, 0.05877753448486328, 0.05885152053833008, 0.05896502304077148, 0.05900540924072266, 0.05907273483276367, 0.05885935974121094, 0.05882720184326172, 0.058875904083251954, 0.059033214569091795, 0.058863998413085934, 0.05878550338745117]",tokens/s,16.96725284115965,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 786, in __init__ self.model = RecurrentGemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 660, in __init__ [RecurrentGemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 660, in [RecurrentGemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 490, in __init__ self.mlp_block = RecurrentGemmaMlp(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 473, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 36.12 MiB is free. Process 142064 has 14.70 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 156.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 718, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 576.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 502.12 MiB is free. Process 37100 has 14.25 GiB memory in use. Of the allocated memory 14.13 GiB is allocated by PyTorch, and 9.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 1195, in __init__ self.model = MixtralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in __init__ [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 757, in __init__ self.block_sparse_moe = MixtralSparseMoeBlock(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in __init__ self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 671, in __init__ self.w2 = nn.Linear(self.ffn_dim, self.hidden_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 360.12 MiB is free. Process 181513 has 14.39 GiB memory in use. Of the allocated memory 14.26 GiB is allocated by PyTorch, and 9.58 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 352, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 64.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 18875 has 14.71 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 85.33 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 512, in __init__ self.mlp = MistralMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 152, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 170.12 MiB is free. Process 94285 has 14.57 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.074752,1129.250816,0.0,734.0032,709.336064,s,1,7.43693310546875,7.43693310546875,0.0,7.43693310546875,7.43693310546875,7.43693310546875,7.43693310546875,[7.43693310546875],,kWh,5.201868145816964e-06,5.662632469932491e-07,1.980001584002411e-06,7.748132976812624e-06,,MB,1107.88608,1276.051456,0.0,870.31808,809.960448,s,15,0.25941091346740724,0.01729406089782715,0.0005297255663439621,0.01726335906982422,0.01760234909057617,0.01818601589202881,0.018896005973815917,"[0.019073503494262695, 0.01717180824279785, 0.01726335906982422, 0.017280704498291017, 0.0169836483001709, 0.017047456741333008, 0.01691372871398926, 0.01683955192565918, 0.017281055450439453, 0.01729737663269043, 0.0178056640625, 0.017280031204223632, 0.016884416580200196, 0.016996864318847657, 0.017291744232177733]",tokens/s,14802.769662512534,kWh,6.447960063649882e-07,7.107573282732315e-08,4.25537500105971e-07,1.1414092392982822e-06,tokens/kWh,224284149.0904561,MB,1117.888512,1311.70304,0.0,905.969664,809.963008,s,15,9.905081726074219,0.6603387817382812,0.011113331088490127,0.6602666015625,0.672424609375,0.6754632690429687,0.6802393334960937,"[0.6602666015625, 0.6636019897460937, 0.6598812255859375, 0.663210693359375, 0.6421433715820313, 0.6421513671875, 0.6425101318359375, 0.6600648803710938, 0.6729046630859375, 0.681433349609375, 0.6664047241210938, 0.6534988403320312, 0.6589554443359374, 0.6717045288085938, 0.6663499145507813]",tokens/s,95.40557323342162,kWh,1.8989165730301262e-05,2.0941903259328287e-06,8.994136702293818e-06,3.0077492758527917e-05,tokens/kWh,2094589.482766562,,s,945,9.89892097187043,0.010475048647481926,0.00034942214293159305,0.010455455780029297,0.010799897193908692,0.010900223731994629,0.011734079666137695,"[0.010524831771850585, 0.011044447898864745, 0.010804767608642579, 0.01053769588470459, 0.010745183944702148, 0.010670751571655273, 0.010538944244384766, 0.010553279876708984, 0.010522751808166505, 0.010623040199279786, 0.010565567970275878, 0.01033407974243164, 0.010340703964233398, 0.01020633602142334, 0.010228128433227538, 0.010248448371887207, 0.010231807708740234, 0.010229791641235352, 0.010223584175109863, 0.010192831993103028, 0.010149951934814452, 0.010299136161804199, 0.010492064476013184, 0.010332256317138673, 0.010460960388183593, 0.010344672203063964, 0.010338303565979003, 0.010264575958251953, 0.01027187156677246, 0.010599007606506347, 0.010914079666137695, 0.010866687774658204, 0.010795007705688477, 0.01064857578277588, 0.010570624351501464, 0.01064179229736328, 0.010612480163574219, 0.01053286361694336, 0.010628992080688477, 0.010574175834655761, 0.010753120422363281, 0.010563584327697753, 0.010381728172302247, 0.010374879837036133, 0.010424896240234375, 0.010231871604919434, 0.01022969627380371, 0.010226976394653321, 0.010394335746765137, 0.010491904258728027, 0.010414079666137695, 0.010389727592468262, 0.010287967681884766, 0.010316736221313476, 0.010220959663391113, 0.010297951698303222, 0.01044863986968994, 0.010514687538146973, 0.010532032012939454, 0.010435680389404296, 0.01058521556854248, 0.010692352294921876, 0.010808159828186035, 0.010562175750732422, 0.010714816093444825, 0.010645824432373047, 0.01053600025177002, 0.010519488334655762, 0.010887040138244628, 0.01085632038116455, 0.010617055892944337, 0.010618240356445312, 0.010597023963928223, 0.010751999855041505, 0.010502143859863282, 0.010500096321105956, 0.010448896408081054, 0.010335359573364258, 0.010203712463378907, 0.01036847972869873, 0.010305919647216798, 0.010455455780029297, 0.010528575897216797, 0.010365183830261231, 0.01032192039489746, 0.010554880142211913, 0.010589856147766114, 0.010404640197753907, 0.010297408103942871, 0.010327296257019043, 0.0102259521484375, 0.010539487838745117, 0.010960351943969727, 0.01083683204650879, 0.010840928077697754, 0.010965855598449707, 0.01082367992401123, 0.010553631782531738, 0.010542816162109375, 0.010512384414672851, 0.010647551536560058, 0.0106496000289917, 0.010687999725341797, 0.010473919868469237, 0.010450431823730469, 0.010472000122070313, 0.01069046401977539, 0.0107357120513916, 0.010489439964294434, 0.010465632438659668, 0.010470879554748536, 0.010446592330932617, 0.010420160293579101, 0.010240032196044921, 0.010281855583190919, 0.010358688354492187, 0.010215200424194337, 0.010230079650878906, 0.010329983711242675, 0.010254464149475097, 0.010201087951660156, 0.010233856201171876, 0.01031372833251953, 0.01070473575592041, 0.011079327583312988, 0.010897919654846192, 0.010456864356994628, 0.010522720336914062, 0.01052070426940918, 0.010590208053588868, 0.010569184303283692, 0.010629664421081543, 0.010823264122009277, 0.010660160064697265, 0.010504287719726562, 0.010473695755004883, 0.010469152450561523, 0.010438976287841796, 0.010337984085083008, 0.010372447967529296, 0.010510751724243164, 0.01049129581451416, 0.010369888305664062, 0.010210304260253907, 0.010255071640014648, 0.010192543983459472, 0.010182304382324219, 0.010142687797546387, 0.01010649585723877, 0.010172320365905761, 0.01024227237701416, 0.010199040412902831, 0.010194527626037597, 0.010254752159118653, 0.010178655624389648, 0.01028707218170166, 0.010684096336364746, 0.010882559776306153, 0.010722304344177246, 0.010713055610656738, 0.010653727531433106, 0.010561408042907715, 0.010559840202331543, 0.010671903610229492, 0.010686464309692383, 0.010729472160339355, 0.010584063529968261, 0.010483584403991698, 0.010444928169250488, 0.010520575523376464, 0.010780256271362304, 0.010393183708190918, 0.01028211212158203, 0.010316767692565919, 0.010276960372924806, 0.010239871978759765, 0.010244864463806153, 0.010446463584899902, 0.010401984214782715, 0.010434752464294433, 0.011067744255065917, 0.010900799751281737, 0.010617312431335449, 0.010598272323608398, 0.010414079666137695, 0.010274208068847657, 0.010244704246520997, 0.010274720191955567, 0.01090783977508545, 0.010463359832763672, 0.010580415725708009, 0.010575072288513183, 0.010600607872009277, 0.01046787166595459, 0.010377311706542968, 0.0103155517578125, 0.010293472290039062, 0.0102194242477417, 0.010303584098815918, 0.010467328071594239, 0.010647520065307618, 0.010472831726074218, 0.010435232162475586, 0.010393600463867187, 0.0104017915725708, 0.010264479637145996, 0.010223615646362304, 0.010192864418029786, 0.010254048347473144, 0.010555808067321776, 0.010784511566162109, 0.010635392189025879, 0.010571999549865723, 0.010763392448425294, 0.01072822380065918, 0.010698080062866211, 0.010678943634033202, 0.01067523193359375, 0.010715488433837891, 0.010613375663757325, 0.0107642879486084, 0.010505408287048339, 0.01054751968383789, 0.01053542423248291, 0.01043455982208252, 0.010528767585754394, 0.010347840309143066, 0.010326720237731934, 0.010420224189758302, 0.010548576354980469, 0.010561375617980958, 0.010494688034057616, 0.01040345573425293, 0.010531231880187989, 0.010448479652404785, 0.010158559799194337, 0.010239999771118164, 0.01073971176147461, 0.010825568199157715, 0.010649760246276856, 0.010462271690368652, 0.010442815780639648, 0.01073036766052246, 0.010695679664611817, 0.011336095809936523, 0.010687071800231934, 0.010659839630126953, 0.010616064071655273, 0.010724096298217774, 0.010569567680358887, 0.010330240249633789, 0.010316991806030273, 0.010115008354187011, 0.01014742374420166, 0.010104384422302246, 0.01036137580871582, 0.010434464454650879, 0.010238431930541992, 0.010141695976257324, 0.010199007987976074, 0.010194623947143554, 0.010278528213500976, 0.0105315523147583, 0.010168319702148437, 0.01021350383758545, 0.010499839782714843, 0.010750080108642578, 0.010491904258728027, 0.010308735847473145, 0.01015078353881836, 0.010141823768615723, 0.010326047897338868, 0.010106752395629883, 0.010203104019165039, 0.010205183982849121, 0.010166272163391114, 0.010172384262084961, 0.010190688133239745, 0.010260416030883788, 0.010211584091186523, 0.010395648002624512, 0.010280960083007813, 0.010286272048950195, 0.010115424156188964, 0.010086496353149415, 0.010113311767578124, 0.010067999839782715, 0.010129728317260743, 0.010073311805725097, 0.010119711875915528, 0.010057056427001953, 0.010066592216491699, 0.010179903984069824, 0.01022156810760498, 0.010140352249145508, 0.010149375915527344, 0.010062335968017578, 0.010110943794250489, 0.010053664207458497, 0.010131456375122071, 0.010143744468688964, 0.010071136474609376, 0.01020406436920166, 0.010208992004394532, 0.010132800102233887, 0.010114015579223633, 0.010153056144714356, 0.010087455749511719, 0.010112192153930664, 0.010077887535095215, 0.010138463973999024, 0.010086560249328613, 0.010093791961669922, 0.01004419231414795, 0.010070015907287597, 0.010059071540832519, 0.010023776054382325, 0.0101212158203125, 0.010085791587829589, 0.010199647903442383, 0.01008358383178711, 0.010132224082946777, 0.010192895889282226, 0.010099871635437012, 0.01010364818572998, 0.010116671562194824, 0.010193344116210937, 0.010172160148620605, 0.010232288360595703, 0.01023363208770752, 0.01013759994506836, 0.010061823844909668, 0.01012940788269043, 0.010045632362365723, 0.010098496437072753, 0.01005568027496338, 0.010137727737426758, 0.010087583541870117, 0.01010694408416748, 0.010085023880004883, 0.010033056259155274, 0.01009059238433838, 0.010097824096679687, 0.010170623779296875, 0.010101344108581543, 0.011016192436218262, 0.010645503997802735, 0.011602016448974609, 0.01024300765991211, 0.010152607917785645, 0.010113632202148438, 0.010464991569519043, 0.010153984069824219, 0.010180607795715332, 0.010149888038635254, 0.010158080101013184, 0.010202752113342286, 0.01026460838317871, 0.010336607933044433, 0.010225664138793946, 0.010185888290405273, 0.010113471984863282, 0.010113151550292968, 0.01013584041595459, 0.010145792007446289, 0.010082304000854492, 0.010145792007446289, 0.010085536003112792, 0.010154784202575684, 0.010350655555725098, 0.0101396484375, 0.010178720474243164, 0.010102527618408203, 0.010111071586608887, 0.010062975883483887, 0.010122112274169922, 0.01007414436340332, 0.010182623863220214, 0.010059359550476075, 0.01010483169555664, 0.010145792007446289, 0.01011302375793457, 0.010119168281555176, 0.010118783950805664, 0.010121600151062012, 0.010090496063232422, 0.010106880187988282, 0.010028127670288087, 0.010137760162353515, 0.01006492805480957, 0.010145376205444336, 0.010045696258544921, 0.010121088027954101, 0.010062047958374024, 0.010114080429077148, 0.010086688041687011, 0.010237631797790527, 0.010088640213012695, 0.01011193561553955, 0.010112671852111817, 0.010090496063232422, 0.010108832359313966, 0.010100640296936036, 0.010115263938903808, 0.010174464225769043, 0.010196800231933595, 0.014173919677734376, 0.010813568115234375, 0.010235456466674805, 0.01010153579711914, 0.010254336357116698, 0.010117119789123535, 0.01011302375793457, 0.010077919960021973, 0.010101023674011231, 0.010130463600158691, 0.010134495735168458, 0.010114720344543457, 0.010109215736389161, 0.01010044765472412, 0.010133855819702149, 0.010116191864013671, 0.01012009620666504, 0.010112480163574218, 0.010107423782348633, 0.010076160430908204, 0.010110112190246582, 0.010097503662109375, 0.010089792251586915, 0.0100765438079834, 0.010096960067749024, 0.010143967628479003, 0.010171296119689942, 0.010248255729675294, 0.010168448448181152, 0.010164928436279298, 0.010136608123779298, 0.010064607620239257, 0.010115103721618652, 0.01011734390258789, 0.010076383590698242, 0.01005452823638916, 0.010086400032043457, 0.010102687835693359, 0.01017369556427002, 0.010133824348449706, 0.010314271926879883, 0.010141695976257324, 0.01011302375793457, 0.010123135566711426, 0.01010912036895752, 0.010148927688598633, 0.01013644790649414, 0.010188672065734863, 0.010110783576965331, 0.010139967918395996, 0.010077695846557617, 0.010117664337158203, 0.010153951644897461, 0.010141695976257324, 0.010138879776000977, 0.010130080223083496, 0.010384575843811035, 0.010146656036376954, 0.01031174373626709, 0.010168160438537598, 0.010154144287109375, 0.010094112396240234, 0.010371552467346192, 0.010589664459228515, 0.010629664421081543, 0.010711039543151855, 0.01098134422302246, 0.010649632453918458, 0.010754048347473144, 0.01072332763671875, 0.01067580795288086, 0.010750368118286132, 0.010671327590942383, 0.010705696105957032, 0.010974559783935547, 0.010809856414794922, 0.010791071891784668, 0.010690560340881347, 0.01093120002746582, 0.01084006404876709, 0.010871135711669921, 0.010869279861450195, 0.01081603240966797, 0.01072208023071289, 0.010687264442443847, 0.010685855865478516, 0.010637120246887207, 0.010606816291809082, 0.010611295700073242, 0.010577119827270508, 0.01043507194519043, 0.010393280029296875, 0.010554080009460448, 0.010831392288208008, 0.010811648368835448, 0.010841664314270019, 0.010687135696411132, 0.010641280174255372, 0.010473440170288086, 0.01173094367980957, 0.010421728134155274, 0.010420767784118653, 0.010506239891052246, 0.010620896339416503, 0.010629152297973633, 0.010471424102783204, 0.010521856307983399, 0.010470144271850586, 0.010674176216125488, 0.01075814437866211, 0.010626079559326172, 0.010657024383544923, 0.010755807876586913, 0.010580032348632813, 0.010526656150817871, 0.01061580753326416, 0.010705727577209473, 0.01042198371887207, 0.010441184043884277, 0.01049180793762207, 0.010648768424987793, 0.010657952308654786, 0.01076095962524414, 0.010848256111145019, 0.010907487869262695, 0.01077286434173584, 0.010581791877746582, 0.01063526439666748, 0.01073523235321045, 0.010561920166015626, 0.01054319953918457, 0.010606399536132812, 0.01075823974609375, 0.01073151969909668, 0.010825728416442871, 0.010672127723693848, 0.010721280097961425, 0.010612640380859375, 0.011736543655395507, 0.010707584381103516, 0.010795007705688477, 0.01075609588623047, 0.010532896041870117, 0.010577792167663575, 0.01058137607574463, 0.010705632209777832, 0.01064252758026123, 0.010522815704345703, 0.010433247566223144, 0.010495776176452637, 0.010909536361694335, 0.010805631637573242, 0.010820799827575684, 0.010730175971984864, 0.010681728363037109, 0.010714879989624023, 0.010604960441589355, 0.010606847763061523, 0.01059670352935791, 0.010636832237243652, 0.010678367614746094, 0.010394240379333496, 0.010662079811096192, 0.010668031692504883, 0.010630847930908204, 0.010744031906127929, 0.010736736297607422, 0.010791935920715333, 0.010707200050354004, 0.010648736000061035, 0.010588768005371094, 0.010800959587097168, 0.010748255729675292, 0.010786656379699708, 0.010704319953918457, 0.011899359703063965, 0.01069257640838623, 0.010632896423339843, 0.010624768257141114, 0.010676480293273926, 0.010631232261657715, 0.010627455711364746, 0.01076633644104004, 0.010704895973205567, 0.010751711845397948, 0.010864928245544433, 0.01070899200439453, 0.010693920135498048, 0.01059008026123047, 0.010574687957763671, 0.010579968452453613, 0.011530240058898926, 0.011794783592224122, 0.011101856231689453, 0.011558879852294921, 0.010742815971374511, 0.010646528244018554, 0.010691807746887208, 0.010523424148559571, 0.010675328254699707, 0.0106561279296875, 0.010590656280517578, 0.010953151702880859, 0.010806912422180176, 0.01074176025390625, 0.010696703910827637, 0.010602335929870605, 0.010631327629089355, 0.010792960166931152, 0.010727423667907715, 0.010620800018310547, 0.01065334415435791, 0.010861023902893067, 0.010798303604125977, 0.01062377643585205, 0.010659775733947753, 0.010749792098999024, 0.010733792304992676, 0.010757632255554199, 0.01066006374359131, 0.010600959777832031, 0.011044639587402344, 0.012755200386047364, 0.01128867244720459, 0.010575231552124023, 0.01075868797302246, 0.010573823928833008, 0.01056761646270752, 0.010555392265319824, 0.01083193588256836, 0.010665023803710937, 0.010771391868591308, 0.010734720230102539, 0.010641280174255372, 0.010673151969909669, 0.010705120086669922, 0.010657343864440918, 0.01073583984375, 0.011132927894592285, 0.010823904037475586, 0.010889344215393067, 0.01096992015838623, 0.01076921558380127, 0.010764320373535157, 0.010853568077087402, 0.010759296417236329, 0.010770079612731933, 0.010720352172851562, 0.010718144416809083, 0.010672127723693848, 0.010829824447631836, 0.010696703910827637, 0.010622976303100586, 0.01077222442626953, 0.0107010555267334, 0.010629119873046875, 0.01061513614654541, 0.010712415695190429, 0.010643136024475098, 0.010446623802185058, 0.010197855949401856, 0.010129376411437989, 0.010164256095886231, 0.010182592391967773, 0.010154047966003417, 0.01011302375793457, 0.01022150421142578, 0.010141119956970215, 0.010162816047668458, 0.010215392112731933, 0.010086400032043457, 0.01017039966583252, 0.010590208053588868, 0.010972543716430664, 0.01087551975250244, 0.010643072128295898, 0.01056982421875, 0.01044099235534668, 0.01039731216430664, 0.010405920028686523, 0.01031935977935791, 0.010574687957763671, 0.010601696014404296, 0.010570528030395508, 0.0104017915725708, 0.010288415908813476, 0.010421088218688965, 0.010324000358581543, 0.01019878387451172, 0.0101048641204834, 0.010082304000854492, 0.010172160148620605, 0.010154399871826172, 0.010116959571838379, 0.010460543632507324, 0.010533056259155273, 0.01019545555114746, 0.010264255523681641, 0.010133760452270508, 0.01021951961517334, 0.010129695892333984, 0.010540767669677735, 0.01084620761871338, 0.010762240409851074, 0.010758272171020508, 0.01063481616973877, 0.010654239654541016, 0.010477343559265136, 0.01042841625213623, 0.010406144142150879, 0.010641375541687011, 0.010796832084655763, 0.010391551971435547, 0.010272543907165527, 0.010252511978149415, 0.010239232063293456, 0.01032374382019043, 0.01047651195526123, 0.010120512008666992, 0.010106559753417968, 0.010202112197875977, 0.010207232475280761, 0.010145440101623535, 0.01022339153289795, 0.010172991752624512, 0.010170368194580079, 0.010204959869384765, 0.010128671646118165, 0.010195903778076172, 0.010168319702148437, 0.010131391525268554, 0.010432576179504395, 0.010975232124328613, 0.011100128173828125, 0.010866720199584961, 0.010584063529968261, 0.010510335922241211, 0.010457375526428222, 0.010372832298278808, 0.010381312370300292, 0.010571167945861817, 0.010480223655700683, 0.01040505599975586, 0.010424192428588868, 0.010675135612487794, 0.01036291217803955, 0.01009660816192627, 0.010163264274597168, 0.010077119827270508, 0.010135552406311036, 0.010233792304992675, 0.010139360427856445, 0.010136896133422852, 0.010234848022460937, 0.010245696067810058, 0.010181056022644044, 0.010221152305603028, 0.010123680114746094, 0.010196864128112793, 0.01038144016265869, 0.010864416122436523, 0.010905823707580566, 0.010645503997802735, 0.010544639587402344, 0.010598655700683593, 0.010510592460632325, 0.010475520133972169, 0.010491519927978516, 0.010576255798339844, 0.010743807792663575, 0.010665984153747558, 0.010717503547668457, 0.010760064125061036, 0.010566656112670898, 0.010443584442138672, 0.010356736183166505, 0.010301440238952637, 0.010225664138793946, 0.010192447662353516, 0.010150336265563964, 0.010269696235656739, 0.010109951972961426, 0.010159296035766601, 0.010224448204040528, 0.010354432106018066, 0.010313088417053223, 0.010244864463806153, 0.0102457275390625, 0.010340736389160157, 0.010802720069885254, 0.01071769618988037, 0.011051136016845703, 0.010731103897094727, 0.010645376205444337, 0.010532928466796874, 0.010481984138488769, 0.010473183631896973, 0.010508735656738282, 0.010599455833435058, 0.010515423774719238, 0.010751999855041505, 0.01067625617980957, 0.010505887985229492, 0.010448448181152344, 0.010431232452392579, 0.010364128112792968, 0.010410335540771485, 0.010445247650146485, 0.010391136169433594, 0.010615200042724609, 0.010727392196655274, 0.01039686393737793, 0.01032192039489746, 0.01022163200378418, 0.010274815559387206, 0.010340224266052246, 0.010618176460266113, 0.010920255661010743, 0.010893376350402832, 0.010772352218627929, 0.010641599655151367, 0.010688032150268555, 0.010664799690246581, 0.010559488296508789, 0.010528191566467286, 0.010451519966125488, 0.010606880187988282, 0.010845919609069825, 0.011370368003845215, 0.01092416000366211, 0.013450783729553223, 0.011151840209960937, 0.010439807891845704, 0.010486656188964844, 0.010284704208374023, 0.01028326416015625, 0.010338624000549317, 0.010235487937927246, 0.010211359977722169, 0.010193056106567382, 0.010424063682556153, 0.010368895530700684, 0.012419455528259278, 0.012228608131408691, 0.01057151985168457, 0.01073305606842041, 0.011157440185546876, 0.010847040176391601, 0.01073971176147461, 0.010694656372070312, 0.01061888027191162, 0.010748991966247559, 0.01073971176147461, 0.010601344108581543, 0.010803263664245605, 0.010754048347473144, 0.010724639892578125, 0.010717920303344727, 0.01066921615600586, 0.010721983909606934, 0.010490015983581543, 0.010389504432678222, 0.010297344207763673, 0.01030288028717041, 0.01024403190612793, 0.010357151985168457, 0.010244352340698242, 0.010182656288146973, 0.010291199684143066, 0.010338303565979003, 0.01028006362915039, 0.010279808044433594, 0.010235424041748048, 0.010203616142272949, 0.010436415672302246, 0.010874943733215332, 0.010575743675231933, 0.010669407844543457, 0.010572671890258789, 0.01062502384185791, 0.010559488296508789, 0.010422271728515625, 0.01053273582458496, 0.010489184379577637, 0.010504639625549317, 0.010652000427246094, 0.010757535934448241, 0.010534496307373046, 0.010414560317993165, 0.010295136451721192, 0.010309408187866211, 0.010205280303955079, 0.010306015968322754, 0.010328415870666505, 0.010364928245544434, 0.01040998363494873, 0.01042636775970459, 0.010424320220947265, 0.010362879753112793, 0.010315296173095703, 0.010355199813842773, 0.010469344139099121, 0.01095411205291748, 0.010895359992980956, 0.010807935714721679, 0.010749888420104981, 0.010729408264160157, 0.010608768463134766, 0.011374591827392578, 0.011780096054077148, 0.011974143981933593, 0.011467007637023926, 0.010658047676086425, 0.010682271957397462, 0.010715231895446778, 0.010476608276367188, 0.010235936164855956, 0.010195872306823731, 0.010436448097229004, 0.010171584129333496, 0.01013974380493164, 0.010244992256164551, 0.010225664138793946, 0.010465279579162597, 0.010340352058410645, 0.01023369598388672, 0.01022480010986328, 0.010238975524902343, 0.010270912170410157, 0.010480480194091797, 0.010795904159545898, 0.010854496002197265, 0.010772480010986327, 0.010704895973205567, 0.010529952049255371, 0.010611552238464356, 0.010553343772888184, 0.01067244815826416, 0.010739392280578613]",tokens/s,95.46495044110252,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 717, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 50991 has 14.71 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 4.20 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 891, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 823, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 383, in __init__ self.fc1 = nn.Linear(self.embed_dim, config.ffn_dim, bias=config.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 348.12 MiB is free. Process 120110 has 14.40 GiB memory in use. Of the allocated memory 14.28 GiB is allocated by PyTorch, and 3.01 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 891, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 823, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 383, in __init__ self.fc1 = nn.Linear(self.embed_dim, config.ffn_dim, bias=config.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 72.12 MiB is free. Process 117700 has 14.67 GiB memory in use. Of the allocated memory 14.55 GiB is allocated by PyTorch, and 2.19 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,870.64576,4804.44416,0.0,4401.922048,4400.206336,s,1,7.91750048828125,7.91750048828125,0.0,7.91750048828125,7.91750048828125,7.91750048828125,7.91750048828125,[7.91750048828125],,kWh,5.536317324951293e-06,5.840939956697338e-07,9.044451679957044e-07,7.0248564886167315e-06,,MB,1230.700544,4903.010304,0.0,4490.002432,4455.927296,s,10,1.6621410522460938,0.1662141052246094,0.005410760143184518,0.16686649322509767,0.16960907135009765,0.17115725784301758,0.17239580703735352,"[0.15135784912109376, 0.1673426513671875, 0.1692489013671875, 0.16589984130859375, 0.16639033508300782, 0.169150146484375, 0.16529391479492186, 0.1654869384765625, 0.1727054443359375, 0.16926502990722656]",tokens/s,1540.1821623625783,kWh,4.687321170370995e-06,5.169084948909161e-07,3.1139663359681694e-06,8.31819600123008e-06,tokens/kWh,30775903.80920853,MB,1265.754112,4909.30176,0.0,4496.293888,4455.929856,s,10,13.568712402343749,1.3568712402343748,0.0047232895677489315,1.3553889160156252,1.3636133056640625,1.3641533935546875,1.3645854638671875,"[1.3546158447265626, 1.3646934814453124, 1.3609744873046874, 1.35433642578125, 1.3561619873046875, 1.3500045166015624, 1.351342041015625, 1.358918701171875, 1.3634932861328124, 1.354171630859375]",tokens/s,46.43034514396362,kWh,3.932146390921448e-05,4.335747987317738e-06,2.590041798663278e-05,6.955762988316501e-05,tokens/kWh,905723.7876825336,,s,630,13.56614709854125,0.021533566823081364,0.0005217741295872991,0.021427696228027343,0.021763932228088378,0.02212903537750244,0.023484619426727298,"[0.022519424438476564, 0.021417919158935546, 0.021224031448364256, 0.021213247299194337, 0.02120275115966797, 0.021082143783569336, 0.0211943359375, 0.021342784881591796, 0.021307296752929687, 0.02120204734802246, 0.021269376754760743, 0.021198944091796876, 0.02121286392211914, 0.02129747200012207, 0.02128281593322754, 0.021301151275634766, 0.021485536575317384, 0.021502016067504882, 0.021272640228271484, 0.021331872940063477, 0.021322975158691405, 0.02202003288269043, 0.02140598487854004, 0.021367456436157225, 0.021286624908447266, 0.02121958351135254, 0.02146512031555176, 0.021305023193359376, 0.02137932777404785, 0.02134841537475586, 0.02136476707458496, 0.021499616622924805, 0.02132748794555664, 0.021322496414184572, 0.021219200134277343, 0.02175539207458496, 0.022252031326293945, 0.021372928619384765, 0.021235071182250976, 0.021269119262695313, 0.02120444869995117, 0.021571903228759765, 0.02137875175476074, 0.021477920532226562, 0.021423904418945313, 0.021876512527465822, 0.02146553611755371, 0.021284799575805664, 0.021264448165893554, 0.021458431243896483, 0.021447103500366212, 0.02150726318359375, 0.021604799270629884, 0.02169696044921875, 0.026537023544311523, 0.021468320846557618, 0.02158367919921875, 0.021381088256835937, 0.021400735855102538, 0.021527360916137696, 0.02131510353088379, 0.021424831390380858, 0.02146188735961914, 0.0231441593170166, 0.02173910331726074, 0.022307519912719728, 0.021346303939819337, 0.02142393684387207, 0.021263776779174806, 0.021525279998779297, 0.021595840454101563, 0.021420352935791014, 0.02151795196533203, 0.021561216354370118, 0.02170684814453125, 0.021477792739868166, 0.021397504806518555, 0.021407903671264647, 0.021464704513549804, 0.021948415756225585, 0.021573856353759767, 0.02142393684387207, 0.02175155258178711, 0.021521888732910156, 0.021758943557739257, 0.02147327995300293, 0.021407743453979493, 0.02150739288330078, 0.021350879669189454, 0.02143459129333496, 0.021377023696899415, 0.021536352157592774, 0.021837312698364256, 0.022475679397583007, 0.02666886329650879, 0.02134444808959961, 0.021428224563598632, 0.021428224563598632, 0.021348352432250976, 0.02125619125366211, 0.021735424041748046, 0.02151628875732422, 0.02165350341796875, 0.021997631072998045, 0.021440448760986327, 0.02143040084838867, 0.021376928329467772, 0.021565183639526368, 0.02151628875732422, 0.02156540870666504, 0.022083839416503905, 0.023164608001708983, 0.021415679931640626, 0.02130156707763672, 0.021202688217163087, 0.021445119857788086, 0.021265855789184572, 0.02122604751586914, 0.0212541446685791, 0.021380735397338868, 0.02144499206542969, 0.021286176681518554, 0.02154934310913086, 0.021397951126098633, 0.021243648529052736, 0.02184217643737793, 0.023128063201904296, 0.02168806457519531, 0.02142438316345215, 0.021585920333862304, 0.021606496810913086, 0.02173139190673828, 0.021991264343261718, 0.02186956787109375, 0.021441535949707033, 0.021366783142089844, 0.021960704803466798, 0.02136003112792969, 0.021314144134521484, 0.021590015411376954, 0.022271999359130858, 0.021551103591918946, 0.021780479431152345, 0.021594112396240234, 0.02144175910949707, 0.021537216186523437, 0.0215467529296875, 0.021373472213745116, 0.021343391418457033, 0.021334943771362306, 0.021577312469482423, 0.021428224563598632, 0.02134022331237793, 0.021561695098876954, 0.021634880065917968, 0.021489887237548827, 0.021540672302246093, 0.02161814308166504, 0.02161680030822754, 0.021502143859863283, 0.021440128326416015, 0.02140028762817383, 0.02127872085571289, 0.02147929573059082, 0.021784608840942382, 0.021651039123535155, 0.02162291145324707, 0.021522815704345704, 0.02168832015991211, 0.021491455078125, 0.021477344512939454, 0.021439775466918946, 0.021444896697998046, 0.021436960220336913, 0.021459039688110353, 0.021649503707885744, 0.021622655868530273, 0.022186111450195313, 0.021536575317382813, 0.021464767456054686, 0.021502464294433594, 0.021657503128051758, 0.02165692710876465, 0.021844160079956054, 0.021541439056396484, 0.021677120208740235, 0.02154591941833496, 0.021419071197509767, 0.021648319244384765, 0.022743040084838868, 0.021530624389648437, 0.02163711929321289, 0.02144972801208496, 0.02152947235107422, 0.021518463134765624, 0.021310880661010743, 0.021461599349975585, 0.021408863067626953, 0.021590944290161132, 0.021519392013549805, 0.021398496627807618, 0.021456895828247072, 0.021339616775512694, 0.0217728328704834, 0.0216944637298584, 0.021719039916992186, 0.021450719833374023, 0.02139967918395996, 0.021333248138427734, 0.0214432315826416, 0.02145894432067871, 0.021399551391601563, 0.02127984046936035, 0.021200799942016603, 0.021414911270141602, 0.021319679260253906, 0.021595903396606445, 0.021289215087890626, 0.021553152084350585, 0.021334016799926758, 0.021262144088745116, 0.021463264465332033, 0.021258304595947266, 0.021370784759521484, 0.021619808197021483, 0.02139132881164551, 0.021351360321044923, 0.02139686393737793, 0.021604991912841796, 0.02141798400878906, 0.021446880340576173, 0.021251039505004884, 0.021416799545288086, 0.021349504470825197, 0.021420896530151366, 0.021339935302734377, 0.02138688087463379, 0.02143017578125, 0.02138591957092285, 0.02145280075073242, 0.02147737693786621, 0.02126803207397461, 0.021530879974365234, 0.02138335990905762, 0.021345375061035156, 0.022626880645751954, 0.02178598403930664, 0.02164816093444824, 0.021489856719970703, 0.021401599884033205, 0.021792959213256836, 0.021747520446777344, 0.022974431991577147, 0.021593599319458007, 0.021305503845214843, 0.02126268768310547, 0.021338016510009765, 0.02135980796813965, 0.021472160339355468, 0.021411840438842773, 0.021392736434936523, 0.021392032623291014, 0.02149580764770508, 0.021626880645751953, 0.021917119979858398, 0.02146976089477539, 0.021484928131103517, 0.02132851219177246, 0.021263519287109376, 0.021271392822265624, 0.02163302421569824, 0.021220672607421876, 0.02136751937866211, 0.021352384567260744, 0.021268512725830076, 0.02126438331604004, 0.021194751739501954, 0.026291839599609373, 0.021301631927490235, 0.021135360717773437, 0.02129100799560547, 0.021202239990234375, 0.02121183967590332, 0.021340160369873046, 0.021315071105957033, 0.021248512268066407, 0.021247871398925783, 0.021381248474121095, 0.021236831665039063, 0.021219968795776367, 0.021749408721923828, 0.021410432815551758, 0.021435487747192384, 0.02143929672241211, 0.02151545524597168, 0.021602272033691406, 0.021557504653930665, 0.021514944076538086, 0.022370464324951173, 0.021628768920898437, 0.02158297538757324, 0.02156224060058594, 0.021341503143310545, 0.02168057632446289, 0.021522687911987304, 0.021393184661865235, 0.021383520126342773, 0.021182336807250977, 0.021286527633666993, 0.02122096061706543, 0.0217096004486084, 0.021380704879760744, 0.02140563201904297, 0.02164303970336914, 0.021308095932006835, 0.022679584503173828, 0.02150383949279785, 0.021086368560791015, 0.021231744766235353, 0.021194143295288084, 0.021187040328979494, 0.021444608688354492, 0.02129747200012207, 0.021505727767944335, 0.021596160888671875, 0.021368896484375, 0.021282751083374022, 0.021660991668701172, 0.021454656600952148, 0.021762943267822264, 0.021397504806518555, 0.02146713638305664, 0.021276256561279298, 0.021219743728637695, 0.02107596778869629, 0.02122662353515625, 0.021409727096557616, 0.021205184936523437, 0.021133792877197265, 0.02157526397705078, 0.021884607315063476, 0.0212490234375, 0.02123980712890625, 0.021352287292480468, 0.02131328010559082, 0.02118288040161133, 0.021166080474853514, 0.02123366355895996, 0.021728607177734376, 0.02127235221862793, 0.02127961540222168, 0.021094400405883788, 0.021478752136230468, 0.021992095947265623, 0.02120697593688965, 0.021278783798217772, 0.021173471450805663, 0.02122012710571289, 0.021037055969238282, 0.02351103973388672, 0.021496063232421876, 0.021309183120727538, 0.021399808883666993, 0.021280511856079102, 0.021209087371826172, 0.02114684867858887, 0.02116102409362793, 0.021264095306396485, 0.021466976165771486, 0.021469343185424806, 0.02144767951965332, 0.021611520767211914, 0.021460224151611328, 0.021516992568969728, 0.021424192428588867, 0.021521888732910156, 0.021998111724853515, 0.02143417549133301, 0.022812864303588868, 0.022026592254638672, 0.021516767501831055, 0.021697696685791017, 0.02142233657836914, 0.021539424896240233, 0.02134940719604492, 0.02147011184692383, 0.021407808303833008, 0.02169241523742676, 0.021337888717651368, 0.02130556869506836, 0.021434080123901366, 0.02128041648864746, 0.02138585662841797, 0.021473056793212892, 0.02139084815979004, 0.021310176849365235, 0.02100387191772461, 0.02112518310546875, 0.021185152053833006, 0.02120854377746582, 0.021255840301513673, 0.021398111343383788, 0.02143187141418457, 0.021232063293457032, 0.021211135864257814, 0.021317632675170898, 0.0212541446685791, 0.021391359329223633, 0.021454559326171876, 0.021397727966308594, 0.02130240058898926, 0.021205951690673828, 0.021301248550415038, 0.02137014389038086, 0.021347200393676758, 0.02148534393310547, 0.021571359634399413, 0.022298912048339843, 0.021452735900878907, 0.021464704513549804, 0.021195199966430663, 0.0214835205078125, 0.02125200080871582, 0.02126448059082031, 0.0213209285736084, 0.02127952003479004, 0.021571584701538086, 0.021340160369873046, 0.02139263916015625, 0.021344575881958008, 0.021336383819580078, 0.02181747245788574, 0.021385408401489257, 0.02115155220031738, 0.02194540786743164, 0.021306304931640624, 0.021632511138916014, 0.02152252769470215, 0.02217616081237793, 0.021482528686523436, 0.021339103698730467, 0.02274716758728027, 0.021536352157592774, 0.021176799774169922, 0.021501855850219728, 0.025206783294677734, 0.022425600051879883, 0.021428224563598632, 0.021413888931274414, 0.021461183547973633, 0.021363584518432618, 0.021598207473754884, 0.021382080078125, 0.021409536361694338, 0.02170911979675293, 0.0234003849029541, 0.02163430404663086, 0.02139833641052246, 0.021575519561767578, 0.02146108818054199, 0.02131679916381836, 0.02141881561279297, 0.021437664031982422, 0.021291231155395506, 0.022123104095458986, 0.021262304306030273, 0.022519807815551757, 0.021489664077758788, 0.021370880126953123, 0.021317855834960937, 0.021387039184570314, 0.021485567092895508, 0.02127462387084961, 0.021213184356689452, 0.021393407821655275, 0.02129110336303711, 0.021439680099487303, 0.021329856872558593, 0.021265216827392578, 0.021425535202026367, 0.02118716812133789, 0.021514240264892577, 0.02124595260620117, 0.02162451171875, 0.02123391914367676, 0.02125212860107422, 0.02134223937988281, 0.021424127578735352, 0.02132899284362793, 0.02128950309753418, 0.02137094306945801, 0.021170495986938476, 0.02161408042907715, 0.021380992889404298, 0.02133875274658203, 0.021428224563598632, 0.02123353576660156, 0.02149920082092285, 0.021330751419067383, 0.02150809669494629, 0.021571935653686522, 0.021386911392211914, 0.02207744026184082, 0.02143788719177246, 0.024582143783569335, 0.021659231185913085, 0.021356704711914063, 0.02142777633666992, 0.02146611213684082, 0.02157043266296387, 0.02164816093444824, 0.02266524887084961, 0.021451871871948244, 0.021285120010375976, 0.0214432315826416, 0.02125209617614746, 0.02133024024963379, 0.021427616119384766, 0.02124355125427246, 0.021262975692749025, 0.02196659278869629, 0.024788543701171874, 0.021737152099609375, 0.021568511962890623, 0.02162073516845703, 0.02152025604248047, 0.02168025588989258, 0.021510143280029297, 0.021526079177856445, 0.02144915199279785, 0.021319679260253906, 0.021400768280029295, 0.021547584533691405, 0.02209324836730957, 0.021400543212890626, 0.021546848297119142, 0.021362688064575194, 0.02166374397277832, 0.021796863555908205, 0.02147737693786621, 0.02341993522644043, 0.021627872467041016, 0.021444608688354492, 0.02142207908630371, 0.021534719467163087, 0.02160812759399414, 0.021403648376464843, 0.0212872314453125, 0.021740575790405274, 0.021604608535766602, 0.02157846450805664, 0.02127872085571289, 0.02142646408081055, 0.02139107131958008, 0.021366783142089844, 0.021425376892089842, 0.02138806343078613, 0.02124799919128418, 0.02190336036682129, 0.021442432403564454, 0.021473407745361328, 0.021387264251708983, 0.021219167709350586, 0.021495519638061525, 0.021186975479125975, 0.021550432205200195, 0.02132601547241211, 0.023109695434570313, 0.02166988754272461, 0.021506048202514647, 0.0215285758972168, 0.021413055419921875, 0.021412128448486327, 0.021432863235473634, 0.021391359329223633, 0.021428224563598632, 0.02141798400878906, 0.021571680068969725, 0.021413055419921875, 0.021649728775024413, 0.021698976516723634, 0.02152448081970215, 0.021493759155273438, 0.021534719467163087, 0.02145859146118164, 0.021606752395629883, 0.021499103546142578, 0.021402399063110353, 0.022133888244628905, 0.02141231918334961, 0.021436063766479493, 0.02150271987915039, 0.021302719116210938, 0.021676607131958008, 0.021419551849365233, 0.02184239959716797, 0.021544960021972655, 0.02135196876525879, 0.021606367111206056, 0.021465599060058595, 0.021310495376586913, 0.021377471923828124, 0.021197343826293947, 0.021426496505737306, 0.02127225685119629, 0.02135196876525879, 0.021405887603759766, 0.021291296005249025, 0.0212541446685791, 0.021577695846557617, 0.02137718391418457, 0.02149567985534668, 0.021358591079711914, 0.02124393653869629, 0.021409408569335937, 0.021171680450439455, 0.021373823165893556, 0.022086719512939453, 0.021398303985595703, 0.02139094352722168, 0.021336639404296875, 0.021377023696899415, 0.02145894432067871, 0.021472511291503907, 0.021332735061645507, 0.021344255447387696, 0.02141798400878906, 0.021706464767456055, 0.021317920684814452, 0.02151571273803711]",tokens/s,46.43912493531363,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.734208,11724.06272,0.0,11328.815104,11314.254848,s,1,7.628697265625,7.628697265625,0.0,7.628697265625,7.628697265625,7.628697265625,7.628697265625,[7.628697265625],,kWh,1.2234974120830581e-05,1.3241086368013376e-06,4.526114731999431e-06,1.808519748963135e-05,,MB,1110.388736,12177.047552,0.0,11771.314176,11713.906688,s,10,4.138964324951171,0.4138964324951172,0.012396790772144301,0.41192939758300784,0.41933658142089847,0.4330530258178711,0.44402618133544924,"[0.3938546142578125, 0.4104230041503906, 0.41178872680664064, 0.412070068359375, 0.41628848266601565, 0.44676947021484376, 0.4114878845214844, 0.41365005493164064, 0.4091081237792969, 0.4135238952636719]",tokens/s,618.5122168285905,kWh,1.1924313853666414e-05,1.315038087562178e-06,7.905161879679933e-06,2.1144513820908526e-05,tokens/kWh,12107159.434749316,MB,1115.181056,12281.905152,0.0,11876.171776,11829.476864,s,10,33.361275634765626,3.3361275634765626,0.0045100151832228605,3.3370075683593754,3.341362548828125,3.341722900390625,3.342011181640625,"[3.32912841796875, 3.328656982421875, 3.333444091796875, 3.3338837890625, 3.336612060546875, 3.341282470703125, 3.342083251953125, 3.34015478515625, 3.337403076171875, 3.338626708984375]",tokens/s,18.884169984899497,kWh,9.75254553630005e-05,1.075733135902759e-05,6.48060074003208e-05,0.0001730887941223489,tokens/kWh,363975.0355847303,,s,630,33.357723354339605,0.052948767229110474,0.0002874159301043155,0.05293332862854004,0.05319623527526856,0.0533000020980835,0.05435796005249024,"[0.05449523162841797, 0.05295654296875, 0.052574207305908206, 0.052775550842285156, 0.05268889617919922, 0.05261721420288086, 0.052590591430664066, 0.052760574340820314, 0.05286092758178711, 0.05254553604125976, 0.05263872146606445, 0.05270220947265625, 0.052514816284179686, 0.053272575378417966, 0.05257830429077148, 0.0526110725402832, 0.05279129409790039, 0.05256796646118164, 0.05268489456176758, 0.05286300659179687, 0.052727935791015625, 0.05266006469726563, 0.052674560546875, 0.05264179229736328, 0.05286707305908203, 0.05263894271850586, 0.05263030242919922, 0.05264384078979492, 0.05286431884765625, 0.05262188720703125, 0.05265216064453125, 0.05264147186279297, 0.05304761505126953, 0.05284659194946289, 0.05277692794799805, 0.05289782333374023, 0.053129505157470704, 0.053010143280029294, 0.05290595245361328, 0.052932640075683594, 0.05292031860351563, 0.052961280822753906, 0.052879009246826175, 0.0529466552734375, 0.05275302505493164, 0.05283955383300781, 0.05302687835693359, 0.05290655899047852, 0.052933120727539064, 0.05282758331298828, 0.05286739349365235, 0.052843807220458984, 0.05286563110351562, 0.05288569641113281, 0.05292787170410156, 0.053242431640625, 0.0530145263671875, 0.052994014739990235, 0.05305347061157226, 0.052934783935546875, 0.052776161193847655, 0.052791969299316406, 0.05273964691162109, 0.05425999832153321, 0.053138656616210936, 0.05272809600830078, 0.05259724807739258, 0.052596736907958984, 0.05264384078979492, 0.052506622314453126, 0.052512767791748044, 0.05249039840698242, 0.05251055908203125, 0.052279296875, 0.052364768981933596, 0.05240067291259766, 0.05249331283569336, 0.052519935607910156, 0.052959232330322265, 0.05269708633422852, 0.052795520782470705, 0.052938816070556644, 0.05279520034790039, 0.05285059356689453, 0.052672607421875, 0.05256425476074219, 0.05253817749023437, 0.05256489562988281, 0.05248758316040039, 0.052642208099365234, 0.052571552276611325, 0.052666782379150394, 0.05269952011108398, 0.05268431854248047, 0.05276924896240234, 0.05285887908935547, 0.05284796905517578, 0.05291689682006836, 0.05286444854736328, 0.05307980728149414, 0.053176513671875, 0.05314179229736328, 0.05315545654296875, 0.05297430419921875, 0.053016574859619144, 0.052796798706054686, 0.05311065673828125, 0.05283712005615234, 0.052897342681884764, 0.05290233612060547, 0.05296332931518555, 0.052908031463623044, 0.05278310394287109, 0.052877311706542966, 0.05290598297119141, 0.05301273727416992, 0.053051136016845704, 0.05294102478027344, 0.05294163131713867, 0.05317116928100586, 0.05315129470825195, 0.053047969818115236, 0.05302463912963867, 0.053126785278320314, 0.05285823822021484, 0.05303968048095703, 0.0543590087890625, 0.05301164627075195, 0.052644577026367184, 0.05308015823364258, 0.0527011833190918, 0.052623329162597654, 0.052770206451416016, 0.052587135314941406, 0.053018112182617184, 0.05252761459350586, 0.052744094848632815, 0.052873119354248044, 0.05249862289428711, 0.052628833770751955, 0.05253936004638672, 0.052525760650634766, 0.052580352783203124, 0.05267388916015625, 0.052896415710449216, 0.05284067153930664, 0.05284636688232422, 0.05272780990600586, 0.05268889617919922, 0.052726879119873046, 0.05274252700805664, 0.0527611198425293, 0.052729854583740236, 0.05286905670166016, 0.05272377777099609, 0.05291136169433594, 0.052681056976318356, 0.05271318435668945, 0.05289849472045898, 0.05283430480957031, 0.05288889694213867, 0.05300009536743164, 0.05336483383178711, 0.05304595184326172, 0.05299100875854492, 0.053062175750732424, 0.05320684814453125, 0.05295577621459961, 0.053022048950195313, 0.05306435012817383, 0.053063617706298825, 0.05315795135498047, 0.052985855102539066, 0.052850719451904296, 0.053002208709716794, 0.05281923294067383, 0.05288832092285156, 0.05302054214477539, 0.053004383087158206, 0.05290313720703125, 0.05328108978271484, 0.05324233627319336, 0.05313702392578125, 0.05308659362792969, 0.05312470245361328, 0.053176734924316404, 0.052942848205566405, 0.05286092758178711, 0.05295513534545898, 0.05454188919067383, 0.053394176483154296, 0.05294873428344726, 0.052836353302001954, 0.05268204879760742, 0.05265478515625, 0.052647232055664066, 0.05248684692382812, 0.052467041015625, 0.052533920288085935, 0.05246156692504883, 0.05264998245239258, 0.052601119995117185, 0.052663230895996095, 0.05264652633666992, 0.052805728912353515, 0.052674625396728514, 0.052724769592285156, 0.05294588851928711, 0.052881118774414065, 0.0528175048828125, 0.05274425506591797, 0.05264857482910156, 0.052678913116455076, 0.052829856872558596, 0.0527913932800293, 0.05304524612426758, 0.05282185745239258, 0.05275459289550781, 0.052676929473876956, 0.05279097747802734, 0.05279769515991211, 0.053217025756835935, 0.05315379333496094, 0.052918270111083986, 0.052928703308105465, 0.05301331329345703, 0.05308528137207031, 0.05304883193969727, 0.05315615844726562, 0.05302076721191406, 0.05304524612426758, 0.053034366607666014, 0.052807872772216796, 0.053158336639404294, 0.052969406127929684, 0.05276883316040039, 0.05291417694091797, 0.05308415985107422, 0.052934337615966796, 0.05323955154418945, 0.052935230255126954, 0.05302614212036133, 0.05301942443847656, 0.05293388748168945, 0.05304899215698242, 0.05309539031982422, 0.05323980712890625, 0.053096256256103515, 0.053026912689208984, 0.05302076721191406, 0.052951038360595705, 0.05299507141113281, 0.054322879791259764, 0.053108993530273436, 0.0527988166809082, 0.05276755142211914, 0.052744190216064454, 0.052787200927734375, 0.052746238708496096, 0.05267011260986328, 0.05267027282714844, 0.052797985076904294, 0.05277286529541016, 0.05267660903930664, 0.05262556838989258, 0.05291196823120117, 0.053101566314697264, 0.0528397102355957, 0.05277193450927734, 0.05304998397827149, 0.052967422485351565, 0.05288748931884766, 0.052827903747558594, 0.05266873550415039, 0.05274214553833008, 0.05271254348754883, 0.052771007537841794, 0.052775646209716795, 0.052760513305664065, 0.05277907180786133, 0.052756191253662106, 0.052717376708984375, 0.052932735443115234, 0.05274857711791992, 0.05292448043823242, 0.05289984130859375, 0.053100193023681644, 0.05303443145751953, 0.05325417709350586, 0.05317695999145508, 0.05317145538330078, 0.05303807830810547, 0.053071998596191404, 0.05301615905761719, 0.05297612762451172, 0.05296310424804687, 0.052950336456298826, 0.052935264587402345, 0.05285472106933594, 0.05301283264160156, 0.05297107315063477, 0.053096736907958984, 0.05312851333618164, 0.053432544708251956, 0.053127616882324216, 0.053065185546875, 0.05307756805419922, 0.05319164657592773, 0.053147647857666014, 0.053272575378417966, 0.05306067276000977, 0.05295756912231445, 0.05308883285522461, 0.05302272033691406, 0.053008384704589843, 0.05435539245605469, 0.05310726547241211, 0.052893695831298826, 0.05260003280639648, 0.05272182464599609, 0.052859519958496096, 0.05261280059814453, 0.052625728607177735, 0.05271347045898438, 0.052836353302001954, 0.052636768341064455, 0.0527184944152832, 0.05282428741455078, 0.052754207611083986, 0.05265167999267578, 0.05271078491210938, 0.05269631958007812, 0.052975582122802733, 0.05314534378051758, 0.05326816177368164, 0.053037376403808595, 0.05290003204345703, 0.052989761352539064, 0.052978721618652344, 0.05293769454956055, 0.05279743957519531, 0.05280883026123047, 0.052851585388183596, 0.05282815933227539, 0.05282979202270508, 0.05305795288085938, 0.05369036865234375, 0.05301862335205078, 0.05299168014526367, 0.053184833526611325, 0.05325174331665039, 0.053217632293701175, 0.05322751998901367, 0.05312694549560547, 0.053225406646728514, 0.053163936614990234, 0.053221279144287106, 0.05307644653320313, 0.05311862564086914, 0.05307625579833984, 0.05315798568725586, 0.053006591796875, 0.053118686676025394, 0.05322099304199219, 0.05302924728393555, 0.05313945770263672, 0.053149696350097655, 0.05334220886230469, 0.0531328010559082, 0.05302924728393555, 0.05315724945068359, 0.05311884689331055, 0.05330614471435547, 0.053188545227050785, 0.05329321670532226, 0.05298128128051758, 0.053026302337646485, 0.05317731094360351, 0.054775169372558594, 0.05361151885986328, 0.053065727233886716, 0.053114879608154295, 0.05275804901123047, 0.05285116958618164, 0.05283430480957031, 0.05305344009399414, 0.05305344009399414, 0.05288345718383789, 0.0526541748046875, 0.052845951080322265, 0.052634143829345705, 0.052735774993896485, 0.05278332901000977, 0.05283225631713867, 0.05274009704589844, 0.05286502456665039, 0.053272575378417966, 0.053026817321777345, 0.05311078262329102, 0.05280495834350586, 0.052814495086669924, 0.05266470336914063, 0.05283187103271485, 0.052870529174804684, 0.052907745361328126, 0.05280649566650391, 0.05270044708251953, 0.05274294281005859, 0.05286656188964844, 0.0529224967956543, 0.05287155151367187, 0.05289984130859375, 0.05300223922729492, 0.053187679290771485, 0.05316009521484375, 0.0532487678527832, 0.05308415985107422, 0.05328051376342773, 0.05356364822387695, 0.05313945770263672, 0.053122814178466794, 0.053139713287353514, 0.053020286560058597, 0.05316960144042969, 0.05304825592041015, 0.053133312225341796, 0.05303500747680664, 0.053050559997558595, 0.053001022338867186, 0.053184513092041016, 0.05311888122558594, 0.05335868835449219, 0.0530145263671875, 0.05319007873535156, 0.05333871841430664, 0.05332783889770508, 0.05326233673095703, 0.05321113586425781, 0.05290972900390625, 0.05302921676635742, 0.05315910339355469, 0.054681598663330076, 0.05352470397949219, 0.052924190521240234, 0.053067424774169925, 0.05276704025268555, 0.05282819366455078, 0.05284659194946289, 0.05280767822265625, 0.05286659240722656, 0.05286284637451172, 0.05291251373291016, 0.05287913513183594, 0.05276102447509766, 0.05289318466186523, 0.05283670425415039, 0.05271567916870117, 0.05286297607421875, 0.05287740707397461, 0.0531959342956543, 0.05315068817138672, 0.05305545425415039, 0.052853790283203125, 0.052879199981689454, 0.05281273651123047, 0.05286902236938477, 0.05294873428344726, 0.05327289581298828, 0.05294083023071289, 0.05274995040893555, 0.052785537719726563, 0.05280972671508789, 0.05282928085327149, 0.05285766220092773, 0.05300028610229492, 0.05297151947021484, 0.05310259246826172, 0.05317631912231445, 0.05320073699951172, 0.05334956741333008, 0.05310927963256836, 0.052971969604492186, 0.05305487823486328, 0.052988800048828125, 0.05301353454589844, 0.05306671905517578, 0.05293353652954102, 0.05291455841064453, 0.052894142150878905, 0.05310787200927734, 0.05296774291992187, 0.0530560302734375, 0.053022270202636716, 0.05307436752319336, 0.05304844665527344, 0.05304601669311523, 0.05312217712402344, 0.053063934326171874, 0.0532938232421875, 0.0531346549987793, 0.05330505752563477, 0.05294947052001953, 0.05294742584228516, 0.052987934112548825, 0.05445257568359375, 0.05312307357788086, 0.05288473510742187, 0.05292047882080078, 0.052684993743896485, 0.05282656097412109, 0.05285254287719727, 0.052949153900146484, 0.0526376953125, 0.0526827507019043, 0.052685920715332034, 0.05293967819213867, 0.05280924987792969, 0.053345855712890626, 0.05276671981811523, 0.0529552001953125, 0.05270937728881836, 0.05282902526855469, 0.05302864074707031, 0.05301065444946289, 0.05296236801147461, 0.052706241607666016, 0.052698368072509764, 0.05262124633789062, 0.052783935546875, 0.052802623748779295, 0.05273491287231445, 0.05269094467163086, 0.05266636657714844, 0.05268479919433594, 0.05273369598388672, 0.052754688262939456, 0.05284044647216797, 0.052879070281982424, 0.05294291305541992, 0.053075489044189454, 0.053193119049072264, 0.0531827507019043, 0.05305567932128906, 0.05306553649902344, 0.05302272033691406, 0.05306924819946289, 0.05293952178955078, 0.05286614227294922, 0.05294768142700195, 0.05297356796264648, 0.0529409294128418, 0.05305740737915039, 0.05298591995239258, 0.0529788818359375, 0.052947711944580075, 0.05308121490478516, 0.053109630584716794, 0.05308755111694336, 0.05322208023071289, 0.05361891174316406, 0.05335631942749024, 0.05327193450927734, 0.05332380676269531, 0.05313187026977539, 0.053038654327392576, 0.05292281723022461, 0.05297151947021484, 0.05467548751831055, 0.05331600189208984, 0.05300611114501953, 0.05288959884643555, 0.05271084976196289, 0.052722240447998045, 0.052741310119628904, 0.05268563079833984, 0.05269913482666016, 0.05282182312011719, 0.052670654296875, 0.05275353622436523, 0.05262015914916992, 0.052779006958007815, 0.05275033569335937, 0.052746238708496096, 0.05291212844848633, 0.05336012649536133, 0.053036544799804686, 0.05310073471069336, 0.05303788757324219, 0.05288959884643555, 0.05280767822265625, 0.052668479919433596, 0.05279913711547852, 0.05272751998901367, 0.052806209564208985, 0.05264998245239258, 0.052757503509521485, 0.05282918548583984, 0.05278105545043945, 0.05269094467163086, 0.05297711944580078, 0.052953216552734376, 0.052942657470703126, 0.053198944091796874, 0.053139041900634766, 0.05313833618164063, 0.05313558578491211, 0.0531022720336914, 0.053356639862060545, 0.05296294403076172, 0.0528858871459961, 0.053008384704589843, 0.053131263732910154, 0.053043201446533204, 0.05292851257324219, 0.05303910446166992, 0.05326985549926758, 0.05310531234741211, 0.053116928100585936, 0.053071937561035155, 0.05308204650878906, 0.053231616973876954, 0.05312220764160156, 0.05317446517944336, 0.053119647979736326, 0.05332729721069336, 0.053101119995117185, 0.0531827507019043, 0.0529917106628418, 0.05295513534545898, 0.053043201446533204]",tokens/s,18.886180969482787,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 657, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 324, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 238.12 MiB is free. Process 86351 has 14.51 GiB memory in use. Of the allocated memory 14.39 GiB is allocated by PyTorch, and 1.53 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,839.00416,3354.329088,0.0,2959.081472,2957.493248,s,1,7.4474365234375,7.4474365234375,0.0,7.4474365234375,7.4474365234375,7.4474365234375,7.4474365234375,[7.4474365234375],,kWh,9.354730991644071e-06,1.02380170073714e-06,2.9752801579963073e-06,1.3353812850377518e-05,,MB,1148.882944,3423.535104,0.0,3017.801728,2552.885248,s,10,0.609890682220459,0.060989068222045896,0.003264355478831742,0.06088916778564453,0.06263210525512695,0.0660835391998291,0.06884468635559082,"[0.06953497314453125, 0.06046691131591797, 0.061311424255371096, 0.05996384048461914, 0.06186511993408203, 0.06173257446289063, 0.057616863250732425, 0.05804412841796875, 0.05776764678955078, 0.06158720016479492]",tokens/s,4197.473538503134,kWh,2.2473518942627797e-06,2.4783270569686913e-07,1.4945033323538555e-06,3.989687932313504e-06,tokens/kWh,64165419.537350394,MB,1158.61504,3423.535104,0.0,3017.801728,2552.887808,s,10,14.256443725585939,1.4256443725585934,0.0115770949596713,1.4310405883789064,1.4351067993164062,1.4353154846191407,1.4354824328613283,"[1.4318619384765625, 1.394968017578125, 1.4350604248046874, 1.43022119140625, 1.4318599853515626, 1.417967529296875, 1.42188232421875, 1.4320689697265625, 1.435524169921875, 1.4250291748046875]",tokens/s,44.190543737730586,kWh,4.106144405865308e-05,4.528718605831331e-06,2.2645821535445745e-05,6.823598419993017e-05,tokens/kWh,923266.5248208506,,s,630,14.253904968261713,0.022625245981367805,0.0005464244241510381,0.022606528282165528,0.02291669692993164,0.023132241344451902,0.024322863998413095,"[0.023183231353759767, 0.02291663932800293, 0.02278883171081543, 0.022808576583862306, 0.02265497589111328, 0.022640640258789063, 0.02253363227844238, 0.02261427116394043, 0.02275312042236328, 0.022503776550292967, 0.02271808052062988, 0.02297702407836914, 0.02273683166503906, 0.024712480545043946, 0.023758975982666016, 0.022856159210205076, 0.023077280044555663, 0.022898080825805665, 0.022765344619750976, 0.022575647354125976, 0.022521600723266602, 0.022378751754760742, 0.022550527572631835, 0.022593408584594726, 0.02283942413330078, 0.022543487548828126, 0.022626623153686524, 0.022569536209106445, 0.022713375091552735, 0.022590431213378906, 0.02249932861328125, 0.023838720321655273, 0.02262182426452637, 0.022714752197265625, 0.022550527572631835, 0.022581247329711913, 0.02278201675415039, 0.02258732795715332, 0.022398399353027343, 0.02254729652404785, 0.022716127395629882, 0.022478847503662108, 0.022501375198364256, 0.02249318313598633, 0.02269923210144043, 0.022661376953125, 0.02268623924255371, 0.022560768127441407, 0.022579200744628908, 0.02261008071899414, 0.02271241569519043, 0.022654720306396484, 0.022686847686767576, 0.022614879608154295, 0.022592992782592775, 0.02287468719482422, 0.022529247283935547, 0.022560800552368164, 0.022756095886230468, 0.022668800354003905, 0.022426080703735352, 0.022487071990966796, 0.022548479080200197, 0.023119903564453124, 0.022724576950073242, 0.022409215927124023, 0.02220649528503418, 0.02205251121520996, 0.0221146240234375, 0.022192127227783204, 0.022208511352539064, 0.02207744026184082, 0.022214496612548828, 0.022020544052124023, 0.0223189754486084, 0.022269088745117186, 0.022164159774780274, 0.022280191421508787, 0.022148351669311523, 0.022150079727172853, 0.022097728729248048, 0.02221670341491699, 0.022181888580322266, 0.022177791595458983, 0.022071296691894532, 0.022013952255249023, 0.02191302490234375, 0.022032960891723633, 0.022022144317626953, 0.022079488754272462, 0.022066783905029298, 0.022063488006591796, 0.02219603157043457, 0.022224863052368163, 0.022044927597045898, 0.022024192810058595, 0.02200707244873047, 0.022225631713867187, 0.02215711975097656, 0.022026016235351564, 0.02234432029724121, 0.022028064727783202, 0.022296607971191405, 0.021916959762573244, 0.02265567970275879, 0.02254435157775879, 0.022071327209472656, 0.022071296691894532, 0.02188483238220215, 0.022011999130249024, 0.02260915184020996, 0.02194918441772461, 0.02205708885192871, 0.022245248794555663, 0.022113727569580077, 0.022206943511962892, 0.02203446388244629, 0.021958719253540038, 0.021907360076904296, 0.021928031921386718, 0.021923391342163086, 0.02188470458984375, 0.02195248031616211, 0.021940031051635743, 0.021855104446411134, 0.021863935470581054, 0.022661951065063475, 0.022147071838378905, 0.022763519287109374, 0.02294988822937012, 0.022845439910888672, 0.022405120849609376, 0.022355968475341798, 0.022202560424804688, 0.022124191284179688, 0.021919904708862306, 0.022120447158813478, 0.022197887420654296, 0.022339712142944335, 0.022354175567626953, 0.022471935272216796, 0.02253081512451172, 0.02295132827758789, 0.02255523109436035, 0.022994943618774414, 0.022799999237060546, 0.022623903274536134, 0.023065471649169923, 0.022785888671875, 0.022597280502319336, 0.02387798309326172, 0.0240863037109375, 0.02277596855163574, 0.0229039363861084, 0.022702592849731446, 0.022706623077392577, 0.022665216445922853, 0.022751232147216797, 0.023289503097534178, 0.022743392944335937, 0.022934560775756837, 0.02271536064147949, 0.022854719161987305, 0.022920127868652343, 0.02285478401184082, 0.023030656814575196, 0.02276927947998047, 0.022881887435913087, 0.022795040130615233, 0.022810047149658202, 0.022859807968139648, 0.023675424575805664, 0.02272217559814453, 0.022903167724609375, 0.022607200622558592, 0.022772319793701173, 0.022681663513183594, 0.022754848480224608, 0.023497184753417968, 0.022666976928710936, 0.02283535957336426, 0.02256230354309082, 0.022827648162841798, 0.022626304626464845, 0.022640640258789063, 0.022769664764404295, 0.023225664138793945, 0.023077568054199218, 0.023259136199951173, 0.023173280715942383, 0.022544384002685547, 0.02251158332824707, 0.022652959823608397, 0.022775808334350587, 0.02290892791748047, 0.02257449531555176, 0.02256752014160156, 0.02261561584472656, 0.022364288330078124, 0.02256108856201172, 0.022660736083984376, 0.022800575256347655, 0.022456512451171876, 0.022605823516845702, 0.02245631980895996, 0.022345024108886717, 0.022712095260620117, 0.022649759292602538, 0.022435039520263673, 0.022591327667236327, 0.022688703536987306, 0.022552448272705077, 0.022610048294067382, 0.022529119491577147, 0.02229916763305664, 0.02257475280761719, 0.023018207550048828, 0.022540288925170897, 0.022452224731445314, 0.02272220802307129, 0.02288243293762207, 0.0227740478515625, 0.022446016311645507, 0.02257935905456543, 0.022486656188964844, 0.022621728897094726, 0.022645023345947264, 0.022602144241333007, 0.022900415420532227, 0.023608896255493166, 0.02275766372680664, 0.02283692741394043, 0.022950687408447266, 0.022642688751220705, 0.02249068832397461, 0.022573503494262695, 0.022697984695434572, 0.022753183364868163, 0.022863967895507813, 0.02411427116394043, 0.022774688720703123, 0.022840927124023438, 0.02272502326965332, 0.0228351993560791, 0.022693151473999022, 0.022702592849731446, 0.022713823318481444, 0.022968063354492186, 0.022815744400024415, 0.022587392807006838, 0.02269523239135742, 0.022446815490722655, 0.023310752868652345, 0.02254630470275879, 0.02267558479309082, 0.02251366424560547, 0.022603776931762694, 0.022486112594604493, 0.02218281555175781, 0.022349472045898437, 0.02285398483276367, 0.022614015579223632, 0.02247270393371582, 0.022338592529296875, 0.02245910453796387, 0.02238489532470703, 0.03100876808166504, 0.022345727920532226, 0.022437280654907226, 0.023004831314086913, 0.022756288528442383, 0.02308233642578125, 0.022631135940551758, 0.02263033676147461, 0.0225581111907959, 0.022931488037109374, 0.022914751052856445, 0.022610815048217773, 0.022796287536621093, 0.022519615173339842, 0.022605119705200197, 0.022644832611083986, 0.022493984222412108, 0.022519168853759767, 0.022454912185668946, 0.022534143447875975, 0.02243756866455078, 0.022460704803466798, 0.02245020866394043, 0.022250944137573243, 0.022656927108764647, 0.02285593605041504, 0.022728736877441407, 0.02263897514343262, 0.022519359588623045, 0.022444480895996093, 0.02263654327392578, 0.02262118339538574, 0.02248099136352539, 0.02244700813293457, 0.02270191955566406, 0.022631647109985352, 0.022609888076782228, 0.022504383087158204, 0.02238876724243164, 0.022629919052124022, 0.02243836784362793, 0.02244105529785156, 0.022649023056030275, 0.022704864501953127, 0.02275225639343262, 0.02248806381225586, 0.022542335510253905, 0.022619808197021484, 0.022589120864868164, 0.025994720458984374, 0.02296268844604492, 0.022968320846557616, 0.02265907287597656, 0.022701248168945313, 0.022462879180908203, 0.02231884765625, 0.022506143569946287, 0.02284726333618164, 0.022794336318969727, 0.022550655364990235, 0.022609920501708985, 0.022521087646484375, 0.022597984313964845, 0.02260121536254883, 0.022498207092285158, 0.02268342399597168, 0.02270185661315918, 0.022759359359741212, 0.022497791290283203, 0.02265907287597656, 0.02262015914916992, 0.02271177673339844, 0.022557216644287108, 0.022558719635009765, 0.022845632553100587, 0.022673215866088867, 0.02263859176635742, 0.022543807983398438, 0.022186559677124025, 0.021960704803466798, 0.022147071838378905, 0.02186979293823242, 0.02207823944091797, 0.021831680297851562, 0.02206915283203125, 0.02237654495239258, 0.022383615493774413, 0.022701055526733398, 0.022589439392089843, 0.024408063888549804, 0.023171072006225587, 0.022514879226684572, 0.0221909122467041, 0.023010368347167968, 0.02240630340576172, 0.0226296329498291, 0.022368799209594725, 0.02232524871826172, 0.022621952056884765, 0.02206924819946289, 0.021881088256835938, 0.022013952255249023, 0.021975135803222655, 0.022089887619018554, 0.022036224365234374, 0.022023551940917967, 0.0218590087890625, 0.02200476837158203, 0.02197212791442871, 0.022021888732910156, 0.02192915153503418, 0.02197587203979492, 0.022379135131835936, 0.022173919677734376, 0.022226463317871092, 0.021964479446411132, 0.022118431091308594, 0.02220230484008789, 0.022063135147094726, 0.022002464294433595, 0.02198886489868164, 0.021981695175170898, 0.02195155143737793, 0.021860639572143556, 0.022002336502075195, 0.02205881690979004, 0.022085248947143556, 0.02190540885925293, 0.022053056716918946, 0.022276479721069335, 0.022228992462158204, 0.02286367988586426, 0.02215648078918457, 0.02227507209777832, 0.022317119598388672, 0.02263852882385254, 0.02293337631225586, 0.0223287353515625, 0.02227027130126953, 0.022438304901123047, 0.023226560592651366, 0.022771520614624022, 0.02260326385498047, 0.02274287986755371, 0.022722623825073243, 0.02278665542602539, 0.022824960708618162, 0.022734848022460938, 0.02272051239013672, 0.022755327224731444, 0.022731008529663085, 0.0229039363861084, 0.022833791732788086, 0.022837087631225585, 0.02263852882385254, 0.02268921661376953, 0.022838048934936524, 0.022664480209350586, 0.02273967933654785, 0.02291312026977539, 0.022699935913085938, 0.02267523193359375, 0.02268601608276367, 0.022827072143554686, 0.022847232818603517, 0.022917215347290038, 0.022722560882568358, 0.022648735046386717, 0.02264409637451172, 0.022553312301635743, 0.022644224166870116, 0.022808704376220703, 0.022563199996948242, 0.02332467269897461, 0.025610080718994142, 0.023443424224853515, 0.022732927322387696, 0.023035839080810548, 0.022710208892822267, 0.022505247116088867, 0.022814752578735352, 0.023142335891723632, 0.023289279937744142, 0.02302239990234375, 0.02292531204223633, 0.02270412826538086, 0.02264473533630371, 0.02268511962890625, 0.022563295364379884, 0.02272060775756836, 0.02319900894165039, 0.022688512802124024, 0.02257302474975586, 0.022744672775268555, 0.02250726318359375, 0.0226265926361084, 0.02274064064025879, 0.022556800842285157, 0.022534751892089845, 0.023566335678100587, 0.022595104217529298, 0.022617887496948243, 0.022663839340209962, 0.022497055053710937, 0.022749183654785156, 0.022453760147094725, 0.022385408401489258, 0.022571008682250978, 0.022766719818115233, 0.02268582344055176, 0.022639360427856445, 0.022816768646240236, 0.022585119247436523, 0.022714591979980468, 0.022579200744628908, 0.022603776931762694, 0.022605823516845702, 0.02292736053466797, 0.02288025665283203, 0.022551872253417968, 0.02256675148010254, 0.02298944091796875, 0.02272483253479004, 0.02275062370300293, 0.022651519775390625, 0.0227491512298584, 0.022577152252197266, 0.022429695129394533, 0.02254172706604004, 0.02267366409301758, 0.02273535919189453, 0.022665056228637695, 0.0225133113861084, 0.022769216537475587, 0.022749919891357422, 0.02280006408691406, 0.02266748809814453, 0.02268079948425293, 0.02297500801086426, 0.02262022399902344, 0.022608160018920898, 0.022691232681274414, 0.022590047836303712, 0.022584415435791014, 0.02245903968811035, 0.022585599899291993, 0.02260905647277832, 0.022614879608154295, 0.022439647674560546, 0.022442272186279297, 0.022374399185180666, 0.022431392669677735, 0.02278598403930664, 0.02276393508911133, 0.02267686462402344, 0.02257574462890625, 0.02258451271057129, 0.022764352798461913, 0.02270137596130371, 0.02261020851135254, 0.022700096130371095, 0.02249558448791504, 0.02410905647277832, 0.022759424209594727, 0.02275702476501465, 0.02283113670349121, 0.02253446388244629, 0.02285753631591797, 0.022589792251586915, 0.022695648193359376, 0.022603904724121094, 0.022747135162353514, 0.02246451187133789, 0.022598943710327148, 0.02260585594177246, 0.022542976379394532, 0.022816831588745118, 0.022783296585083008, 0.02279078483581543, 0.0227205753326416, 0.022786048889160155, 0.022697984695434572, 0.02265907287597656, 0.02264473533630371, 0.022535999298095702, 0.022697439193725587, 0.02258208084106445, 0.023175071716308594, 0.026408960342407226, 0.02552217674255371, 0.02266726493835449, 0.02263033676147461, 0.02259564781188965, 0.022649856567382814, 0.022508544921875, 0.022542335510253905, 0.022421247482299806, 0.022503679275512695, 0.022815807342529297, 0.02278825569152832, 0.02292815971374512, 0.023346080780029296, 0.02264838409423828, 0.022837568283081054, 0.0225664005279541, 0.02256752014160156, 0.022618112564086915, 0.022576288223266603, 0.02247123146057129, 0.022712064743041993, 0.022574623107910155, 0.02245475196838379, 0.02256876754760742, 0.02247248077392578, 0.022536319732666017, 0.023003360748291016, 0.022938207626342775, 0.02254217529296875, 0.022457952499389647, 0.022590015411376955, 0.02239897537231445, 0.022582815170288085, 0.022581119537353516, 0.022594144821166992, 0.02254377555847168, 0.022599647521972657, 0.0223603515625, 0.022475103378295898, 0.022512704849243163, 0.022604736328125, 0.022504928588867188, 0.02260544013977051, 0.022481727600097656, 0.022407039642333985, 0.02289072036743164, 0.02266316795349121, 0.022679040908813477, 0.022471168518066405, 0.022425504684448243, 0.02259891128540039, 0.02252047920227051, 0.022554208755493164, 0.022524511337280274, 0.02270412826538086, 0.02275868797302246, 0.022628671646118165, 0.02253606414794922, 0.022464128494262697, 0.022829055786132812, 0.02246486473083496, 0.02262201690673828, 0.02249772834777832, 0.022668928146362306, 0.02251046371459961, 0.022877439498901368, 0.022794815063476564, 0.022838272094726563, 0.022537023544311523, 0.02256480026245117, 0.022585599899291993, 0.022648256301879884, 0.022693632125854492, 0.022816991806030272, 0.022650720596313477]",tokens/s,44.19841449783633,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,839.241728,8760.786944,0.0,8365.539328,8230.228992,s,1,7.50484423828125,7.50484423828125,0.0,7.50484423828125,7.50484423828125,7.50484423828125,7.50484423828125,[7.50484423828125],,kWh,1.1066832612497515e-05,1.2130818759605153e-06,5.226393070004165e-06,1.7506307558462196e-05,,MB,1169.158144,8951.627776,0.0,8545.8944,8499.295232,s,10,2.6764699096679685,0.26764699096679684,0.010224015400804483,0.27015660095214844,0.2749579895019531,0.2763633331298828,0.2774876080322266,"[0.2387668762207031, 0.2687913208007813, 0.2687487487792969, 0.264208251953125, 0.26977490234375, 0.2708685607910156, 0.27053829956054687, 0.27235858154296877, 0.27464569091796875, 0.2777686767578125]",tokens/s,956.4837589814648,kWh,7.34776420145863e-06,8.100666222931695e-07,4.890156689900046e-06,1.3047987513651845e-05,tokens/kWh,19619883.8887723,MB,1174.2208,8953.724928,0.0,8547.991552,8499.297792,s,10,18.966721679687502,1.8966721679687502,0.004236313017153842,1.8971524658203125,1.8996663452148437,1.9014992492675782,1.9029655725097656,"[1.89731689453125, 1.8964061279296875, 1.8975816650390624, 1.895955078125, 1.8856767578125, 1.896988037109375, 1.89871826171875, 1.8954876708984374, 1.9033321533203125, 1.899259033203125]",tokens/s,33.216072373471974,kWh,5.602011279520714e-05,6.179156081395586e-06,3.704018240989933e-05,9.923945128650207e-05,tokens/kWh,634828.1775371814,,s,630,18.963937675476082,0.030101488373771548,0.0003737273369058032,0.030036399841308594,0.03032941131591797,0.030539182472229004,0.03224072696685791,"[0.03222934341430664, 0.03057254409790039, 0.030236448287963867, 0.03005414390563965, 0.029918912887573243, 0.02982304000854492, 0.02989302444458008, 0.029762943267822264, 0.029848512649536134, 0.02996944046020508, 0.02985241508483887, 0.029823200225830078, 0.0300316162109375, 0.030032127380371094, 0.030001279830932617, 0.030010976791381837, 0.030052223205566407, 0.029941375732421876, 0.029847583770751952, 0.029767711639404296, 0.029906656265258787, 0.030077951431274414, 0.030069856643676757, 0.029860767364501953, 0.029891775131225585, 0.029958976745605468, 0.029800447463989257, 0.030143583297729492, 0.029955135345458985, 0.02990652847290039, 0.030183231353759766, 0.030195775985717772, 0.03033251190185547, 0.030488927841186522, 0.030249120712280274, 0.030761247634887696, 0.030257152557373046, 0.029973600387573244, 0.030194591522216797, 0.030045215606689452, 0.030088159561157228, 0.030010400772094728, 0.030073823928833007, 0.03017728042602539, 0.030138111114501952, 0.029980224609375, 0.030036575317382814, 0.030038112640380858, 0.03025017547607422, 0.03026924705505371, 0.030079679489135744, 0.030007680892944335, 0.030168256759643554, 0.030157535552978516, 0.030029855728149413, 0.03024105644226074, 0.030117599487304688, 0.030121280670166017, 0.03013088035583496, 0.030013439178466796, 0.030076223373413084, 0.030819007873535156, 0.030054399490356445, 0.032245376586914065, 0.030806367874145507, 0.030271488189697264, 0.029884416580200194, 0.02975062370300293, 0.029945663452148438, 0.030065311431884765, 0.02992918395996094, 0.029901472091674805, 0.02979596710205078, 0.02995327949523926, 0.029847904205322264, 0.029911167144775392, 0.029845279693603517, 0.0299051513671875, 0.030036224365234374, 0.02981011199951172, 0.029905664443969728, 0.03004966354370117, 0.030089088439941406, 0.03018582344055176, 0.0300032958984375, 0.029774080276489256, 0.02984351921081543, 0.030021120071411132, 0.030105344772338866, 0.02984217643737793, 0.029924863815307616, 0.029918943405151367, 0.029879072189331054, 0.030027135848999024, 0.030081151962280273, 0.03037164878845215, 0.0305097599029541, 0.03033888053894043, 0.03023993682861328, 0.03011862373352051, 0.0300731201171875, 0.030074880599975585, 0.03031449508666992, 0.03052694320678711, 0.030027711868286133, 0.030146656036376954, 0.029943552017211914, 0.029921279907226563, 0.03026940727233887, 0.030245664596557618, 0.029914495468139648, 0.030026464462280272, 0.029986719131469726, 0.030087167739868165, 0.030101152420043947, 0.030090816497802736, 0.02998137664794922, 0.03020400047302246, 0.02997987174987793, 0.03002556800842285, 0.029948320388793945, 0.03027203178405762, 0.030449663162231445, 0.030055776596069336, 0.030122655868530274, 0.030150592803955076, 0.032194526672363284, 0.030664703369140626, 0.030345216751098632, 0.02997622489929199, 0.029835615158081055, 0.029705631256103517, 0.029993024826049805, 0.029749248504638674, 0.03002217674255371, 0.030031328201293946, 0.029822784423828123, 0.02985443115234375, 0.02998886489868164, 0.029845504760742186, 0.030131231307983397, 0.03000419235229492, 0.029748607635498046, 0.029851295471191405, 0.029796672821044923, 0.029825696945190428, 0.029880319595336914, 0.030119935989379884, 0.029913087844848633, 0.029871488571166994, 0.029944448471069335, 0.029805696487426758, 0.0298353271484375, 0.030026559829711915, 0.03024684715270996, 0.03012339210510254, 0.030099264144897463, 0.03016691207885742, 0.030694400787353516, 0.030279680252075194, 0.030284896850585937, 0.030086048126220705, 0.030341119766235353, 0.030081024169921877, 0.03016294479370117, 0.03003392028808594, 0.029996192932128907, 0.029944671630859374, 0.030280832290649415, 0.03020684814453125, 0.03008230400085449, 0.03001590347290039, 0.030984384536743164, 0.029928607940673826, 0.03000831985473633, 0.03010767936706543, 0.03020716857910156, 0.03013916778564453, 0.03002572822570801, 0.03001753616333008, 0.03041231918334961, 0.03026540756225586, 0.030105920791625978, 0.03006854438781738, 0.03018351936340332, 0.030224576950073242, 0.030084800720214844, 0.030255104064941408, 0.030326528549194334, 0.03230550384521484, 0.03094432067871094, 0.030393280029296876, 0.02995609664916992, 0.030096576690673827, 0.029818975448608398, 0.02974550437927246, 0.029871648788452148, 0.03005084800720215, 0.030023231506347656, 0.029780736923217775, 0.029804191589355468, 0.030009695053100586, 0.030072832107543947, 0.029980127334594726, 0.02989302444458008, 0.030006687164306642, 0.02986240005493164, 0.029888128280639647, 0.029819488525390625, 0.02991923141479492, 0.030133344650268554, 0.02994473648071289, 0.02994175910949707, 0.029841407775878907, 0.02983526420593262, 0.02985958480834961, 0.029858047485351563, 0.030095359802246095, 0.029910144805908204, 0.03023551940917969, 0.030087167739868165, 0.030093311309814453, 0.030523391723632814, 0.030308351516723633, 0.030062591552734375, 0.030066688537597655, 0.03007427215576172, 0.029997663497924806, 0.03028531265258789, 0.029975040435791016, 0.03001510429382324, 0.030140159606933593, 0.03006502342224121, 0.030185728073120116, 0.02996019172668457, 0.030320512771606446, 0.030172351837158204, 0.03004300880432129, 0.030001216888427735, 0.029997055053710937, 0.030315967559814454, 0.030028352737426756, 0.02998681640625, 0.030385440826416015, 0.02999123191833496, 0.030050207138061523, 0.030114303588867186, 0.030000896453857423, 0.030015743255615235, 0.03041279983520508, 0.030035871505737305, 0.03011782455444336, 0.032194561004638675, 0.03055961608886719, 0.03025974464416504, 0.0302061767578125, 0.029746400833129884, 0.029642784118652343, 0.029637344360351564, 0.029662879943847656, 0.029663616180419922, 0.029749248504638674, 0.029702335357666015, 0.029714239120483397, 0.02971980857849121, 0.029775808334350586, 0.029761344909667968, 0.029825216293334962, 0.029788991928100587, 0.02980575942993164, 0.029799232482910155, 0.029742687225341798, 0.029790271759033204, 0.029730527877807618, 0.029702783584594727, 0.02976335906982422, 0.029720800399780273, 0.02975334358215332, 0.02978201675415039, 0.029722623825073242, 0.0297325439453125, 0.02982943916320801, 0.030025760650634767, 0.02995622444152832, 0.03012915229797363, 0.030204063415527345, 0.030160640716552733, 0.03006355285644531, 0.030044160842895507, 0.02999091148376465, 0.029911039352416992, 0.029886463165283202, 0.029906944274902345, 0.029959936141967774, 0.02993382453918457, 0.02999728012084961, 0.0298855037689209, 0.02983750343322754, 0.02981942367553711, 0.02979430389404297, 0.029836479187011718, 0.029874975204467774, 0.029922943115234375, 0.029850015640258788, 0.029879840850830078, 0.029921728134155272, 0.029949983596801757, 0.029882368087768556, 0.0299233283996582, 0.029998336791992188, 0.030115711212158204, 0.030094207763671874, 0.0300437126159668, 0.03003343963623047, 0.03013500785827637, 0.03227036666870117, 0.03070774459838867, 0.030063167572021484, 0.0298221435546875, 0.029780799865722657, 0.029741056442260744, 0.02984342384338379, 0.029814815521240233, 0.02975129508972168, 0.02976563262939453, 0.029744768142700197, 0.029804927825927734, 0.029714431762695313, 0.02973695945739746, 0.029788223266601563, 0.029734367370605468, 0.02970204734802246, 0.029680192947387694, 0.02979635238647461, 0.029798080444335937, 0.03004419136047363, 0.030015775680541992, 0.03017523193359375, 0.030109695434570313, 0.030031871795654298, 0.03002524757385254, 0.030126367568969727, 0.03027987289428711, 0.02996441650390625, 0.03011737632751465, 0.030230911254882812, 0.030235967636108398, 0.030366399765014648, 0.030535680770874023, 0.03032035255432129, 0.03023209571838379, 0.030370559692382813, 0.030275583267211914, 0.030246912002563478, 0.030215904235839842, 0.030181663513183594, 0.030312320709228516, 0.030072959899902343, 0.030189376831054687, 0.030119871139526366, 0.030040319442749024, 0.030205856323242186, 0.030229856491088867, 0.03002579116821289, 0.030175935745239257, 0.030336736679077148, 0.03011008071899414, 0.030156351089477538, 0.030322368621826173, 0.030352031707763672, 0.030119935989379884, 0.029997312545776367, 0.02999679946899414, 0.030227935791015625, 0.03001807975769043, 0.030109695434570313, 0.030212032318115235, 0.030224447250366212, 0.03246089553833008, 0.030785535812377928, 0.030296064376831053, 0.030003200531005858, 0.029838399887084963, 0.02984441566467285, 0.03003385543823242, 0.029978687286376954, 0.02977177619934082, 0.030101503372192383, 0.030007295608520508, 0.029728511810302734, 0.02976924705505371, 0.029909727096557617, 0.029968351364135743, 0.0299434871673584, 0.029846944808959962, 0.029975488662719728, 0.029908992767333983, 0.02994175910949707, 0.029962112426757812, 0.03015388870239258, 0.02987513542175293, 0.030248735427856447, 0.02988057518005371, 0.0299683837890625, 0.02990675163269043, 0.030146751403808594, 0.02997657585144043, 0.03017919921875, 0.030275039672851563, 0.03054204750061035, 0.030611648559570312, 0.0303372802734375, 0.03058016014099121, 0.030196287155151366, 0.030342720031738282, 0.030116287231445313, 0.03018121528625488, 0.030054559707641603, 0.03018956756591797, 0.030346687316894532, 0.030284351348876953, 0.029919071197509764, 0.02993078422546387, 0.0300184326171875, 0.030264511108398437, 0.030042848587036132, 0.030166624069213867, 0.030046720504760743, 0.02996220779418945, 0.029890592575073243, 0.030304256439208983, 0.03020150375366211, 0.02998512077331543, 0.03005232048034668, 0.03001683235168457, 0.030007999420166017, 0.030062623977661133, 0.030327871322631837, 0.03016729545593262, 0.030329376220703124, 0.03024470329284668, 0.033325214385986325, 0.031747360229492184, 0.030630399703979492, 0.030443231582641603, 0.03010201644897461, 0.029898752212524415, 0.029747200012207032, 0.029849599838256836, 0.029787263870239257, 0.029743999481201173, 0.029798015594482422, 0.029914527893066405, 0.029762527465820313, 0.030080608367919922, 0.03008348846435547, 0.029916511535644532, 0.03004425621032715, 0.0298417911529541, 0.03005459213256836, 0.029999103546142578, 0.029865503311157227, 0.030129919052124022, 0.02990358352661133, 0.029900543212890623, 0.029943712234497072, 0.029884096145629882, 0.029882816314697264, 0.030289119720458984, 0.03001651191711426, 0.029937664031982423, 0.030094783782958986, 0.03002217674255371, 0.030293439865112303, 0.030196607589721678, 0.030329727172851564, 0.030202720642089845, 0.030250751495361328, 0.030138208389282228, 0.030110111236572267, 0.030038015365600586, 0.029962175369262694, 0.029919296264648437, 0.02990438461303711, 0.02988310432434082, 0.029844671249389648, 0.02991574478149414, 0.029870080947875976, 0.029865983963012696, 0.029877471923828124, 0.02999171257019043, 0.02994700813293457, 0.029911584854125976, 0.029886816024780275, 0.029911039352416992, 0.02993152046203613, 0.0299597110748291, 0.030132768630981445, 0.030000991821289062, 0.029981983184814452, 0.030069568634033202, 0.030105600357055663, 0.030066688537597655, 0.030100576400756834, 0.03231948852539063, 0.030898143768310547, 0.030517248153686522, 0.0302073917388916, 0.029964895248413087, 0.029888416290283205, 0.030077024459838866, 0.02993715286254883, 0.030192031860351562, 0.02982512092590332, 0.029755392074584962, 0.030082176208496094, 0.03023551940917969, 0.030211103439331054, 0.030012351989746094, 0.029923360824584962, 0.030080223083496095, 0.03039516830444336, 0.03018956756591797, 0.030070783615112305, 0.030017023086547853, 0.02989926338195801, 0.030019584655761718, 0.02995609664916992, 0.030053855895996094, 0.030200319290161134, 0.030001119613647462, 0.03005638313293457, 0.030107776641845704, 0.030286048889160155, 0.030365472793579103, 0.03043231964111328, 0.030298336029052735, 0.030472671508789063, 0.030595327377319338, 0.030195711135864257, 0.030294015884399415, 0.031135744094848632, 0.030269439697265626, 0.030029312133789062, 0.030003679275512694, 0.029959615707397462, 0.030313056945800783, 0.02992905616760254, 0.030305856704711913, 0.030077280044555663, 0.030072959899902343, 0.03001363182067871, 0.029970624923706054, 0.030193119049072265, 0.030144960403442382, 0.029935264587402345, 0.02997907257080078, 0.03078348731994629, 0.03033087921142578, 0.030126079559326172, 0.03022233581542969, 0.03003392028808594, 0.030023008346557616, 0.030261568069458008, 0.030318143844604493, 0.030155168533325196, 0.030433664321899413, 0.03231110382080078, 0.03096182441711426, 0.03017932891845703, 0.0299532470703125, 0.030118688583374024, 0.03018137550354004, 0.02980454444885254, 0.029800447463989257, 0.02979151916503906, 0.030052959442138674, 0.029849536895751955, 0.030277183532714844, 0.029979263305664063, 0.029915136337280275, 0.030113792419433592, 0.0298570556640625, 0.030149343490600587, 0.030085119247436523, 0.029816831588745117, 0.030126079559326172, 0.030058496475219725, 0.029834943771362303, 0.029780288696289063, 0.029863744735717773, 0.029990463256835936, 0.029991552352905272, 0.029853599548339844, 0.029851743698120117, 0.030260223388671875, 0.03029875183105469, 0.030181760787963866, 0.030283775329589844, 0.03032268714904785, 0.030320640563964843, 0.030232576370239257, 0.030253055572509766, 0.030107648849487304, 0.030191295623779296, 0.03000966453552246, 0.03007187271118164, 0.03032294464111328, 0.03018207931518555, 0.029961919784545897, 0.030002496719360353, 0.030135295867919923, 0.030277631759643556, 0.030091264724731445, 0.030021312713623047, 0.029951583862304686, 0.03130646324157715, 0.030169343948364256, 0.029867776870727537, 0.030291007995605468, 0.030268352508544923, 0.030053728103637694, 0.03010371208190918, 0.030046783447265624, 0.03003308868408203, 0.030317312240600587, 0.030013439178466796, 0.030104639053344727, 0.030155712127685547, 0.030236064910888674]",tokens/s,33.22094866482862,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.96768,718.209024,0.0,322.961408,314.743808,s,1,7.3543603515625,7.3543603515625,0.0,7.3543603515625,7.3543603515625,7.3543603515625,7.3543603515625,[7.3543603515625],,kWh,4.528132787478019e-06,4.921199367924644e-07,1.0097230300071258e-06,6.02997575427761e-06,,MB,1109.393408,810.483712,0.0,404.750336,391.119872,s,28,0.23511241626739496,0.00839687200954982,5.0066419832193083e-05,0.008390496253967285,0.008446630477905273,0.008482257795333861,0.008542315378189086,"[0.008561599731445312, 0.008347359657287598, 0.008490176200866699, 0.008378623962402344, 0.00836246395111084, 0.008417216300964356, 0.008345791816711426, 0.008420000076293946, 0.008392512321472168, 0.008333632469177246, 0.00831372833251953, 0.008388480186462403, 0.008340831756591796, 0.00840828800201416, 0.008381471633911133, 0.008366623878479003, 0.008437088012695312, 0.008401503562927246, 0.008437664031982421, 0.008411968231201172, 0.008467552185058593, 0.008403200149536133, 0.008402400016784668, 0.008382783889770509, 0.008385055541992188, 0.008348480224609375, 0.008399552345275878, 0.008386367797851563]",tokens/s,30487.543421984927,kWh,2.677828497402986e-07,2.9515348800167903e-08,1.7613773129606e-07,4.734359298365265e-07,tokens/kWh,540727865.9403706,MB,1119.39584,823.066624,0.0,417.333248,391.122432,s,28,10.094631469726561,0.3605225524902344,0.02247068146145327,0.3560248718261719,0.3585250762939453,0.35991512908935547,0.4454740164184571,"[0.4770150146484375, 0.35675845336914064, 0.35692742919921877, 0.35595477294921873, 0.3581532897949219, 0.35647695922851563, 0.35771258544921875, 0.359392578125, 0.3544256591796875, 0.35476324462890624, 0.3531097717285156, 0.35399282836914064, 0.354876708984375, 0.3577764892578125, 0.3552135009765625, 0.35681475830078124, 0.3555978088378906, 0.3551192626953125, 0.3559044494628906, 0.35580642700195314, 0.3554816589355469, 0.356094970703125, 0.35668875122070315, 0.3554750671386719, 0.3562126159667969, 0.3550398864746094, 0.36019650268554687, 0.3576500244140625]",tokens/s,174.74634961069876,kWh,1.0572432539457617e-05,1.1659751416953781e-06,4.684432000777751e-06,1.6422839681930743e-05,tokens/kWh,3836120.9888272765,,s,1764,10.082759708881394,0.005715850175102823,0.0028396695228811914,0.0056310720443725586,0.005697811079025269,0.0057735600233078,0.006137980718612669,"[0.005451935768127442, 0.005694911956787109, 0.005716127872467041, 0.005833183765411377, 0.005695487976074219, 0.005738399982452393, 0.0056648640632629395, 0.005642240047454834, 0.005666783809661865, 0.005847072124481201, 0.005715583801269531, 0.005724544048309326, 0.005670911788940429, 0.005752831935882568, 0.00588972806930542, 0.0057432641983032225, 0.005648032188415527, 0.12485836791992187, 0.005994336128234863, 0.005746848106384277, 0.005677055835723877, 0.005640096187591552, 0.005617760181427002, 0.005640416145324707, 0.005646111965179443, 0.00564134407043457, 0.005684095859527588, 0.005607423782348633, 0.0056258559226989744, 0.005586080074310303, 0.005639008045196533, 0.005646336078643799, 0.005631999969482422, 0.0056975998878479005, 0.005652416229248047, 0.005857279777526855, 0.005732160091400146, 0.0056724481582641605, 0.005646080017089844, 0.0056063361167907715, 0.005668863773345947, 0.005616991996765137, 0.005664735794067383, 0.005669472217559815, 0.005745759963989258, 0.005702239990234375, 0.005610208034515381, 0.005725120067596436, 0.005620192050933838, 0.00561900806427002, 0.005652607917785645, 0.005610655784606933, 0.00562553596496582, 0.005668320178985596, 0.005626399993896484, 0.005594336032867432, 0.0056096000671386715, 0.005651103973388672, 0.005603040218353271, 0.005607711791992188, 0.005594751834869385, 0.005601151943206787, 0.005672959804534912, 0.0053487358093261715, 0.00566048002243042, 0.005829823970794678, 0.00569920015335083, 0.005656479835510254, 0.00567084789276123, 0.00575107192993164, 0.005640192031860352, 0.005719808101654053, 0.005639776229858399, 0.005657023906707763, 0.005617887973785401, 0.005703680038452149, 0.005678336143493653, 0.005645055770874024, 0.005648672103881836, 0.005684959888458252, 0.0056295361518859865, 0.005683360099792481, 0.005615647792816162, 0.005832255840301514, 0.0056162881851196285, 0.0056258559226989744, 0.0056293120384216305, 0.005704319953918457, 0.0056258559226989744, 0.005584479808807373, 0.005620128154754639, 0.005591040134429932, 0.005576704025268555, 0.005662399768829346, 0.005558591842651367, 0.0056442880630493165, 0.005581151962280273, 0.006147456169128418, 0.0056118078231811525, 0.005596896171569824, 0.005625440120697022, 0.005673664093017578, 0.005819519996643066, 0.005682047843933106, 0.005592671871185303, 0.005717663764953613, 0.005618239879608154, 0.005618207931518555, 0.005633696079254151, 0.005638112068176269, 0.005595168113708496, 0.00559884786605835, 0.005642240047454834, 0.005654911994934082, 0.005627200126647949, 0.005609951972961426, 0.005610879898071289, 0.005659488201141358, 0.005621664047241211, 0.005635551929473877, 0.005616256237030029, 0.00561900806427002, 0.005775743961334228, 0.00571014404296875, 0.0056852478981018065, 0.005670911788940429, 0.005462143898010254, 0.005672351837158203, 0.0056938881874084475, 0.005642943859100342, 0.005635807991027832, 0.005670911788940429, 0.005609471797943116, 0.005649983882904053, 0.005628352165222168, 0.005670239925384521, 0.005642015933990479, 0.005604512214660644, 0.005652192115783691, 0.005644224166870117, 0.005637663841247559, 0.005656991958618164, 0.0056070399284362795, 0.005659135818481445, 0.005647359848022461, 0.005661695957183838, 0.005672639846801758, 0.005637728214263916, 0.005648575782775879, 0.00561411190032959, 0.0057712640762329105, 0.0056442880630493165, 0.005677055835723877, 0.005649856090545654, 0.005614143848419189, 0.005646207809448242, 0.005642303943634033, 0.005602719783782959, 0.005648672103881836, 0.005681280136108398, 0.0056302080154418946, 0.005625247955322266, 0.0056431999206542965, 0.005610559940338135, 0.005598048210144043, 0.005646016120910645, 0.005592864036560058, 0.005897503852844238, 0.005659647941589355, 0.00566921615600586, 0.005752480030059815, 0.005650207996368408, 0.006117599964141846, 0.00565772819519043, 0.005741439819335938, 0.005654079914093018, 0.005665215969085693, 0.0056483840942382815, 0.005677055835723877, 0.005657599925994873, 0.005628960132598877, 0.005646304130554199, 0.005646336078643799, 0.005611072063446045, 0.005717472076416015, 0.005589983940124512, 0.005621503829956054, 0.005628159999847412, 0.005701632022857666, 0.005383647918701172, 0.005673503875732422, 0.005656576156616211, 0.005604351997375488, 0.005631328105926514, 0.005637440204620361, 0.005618239879608154, 0.006198400020599365, 0.005634751796722412, 0.005666656017303467, 0.0056475200653076174, 0.005649375915527344, 0.005646336078643799, 0.0056852478981018065, 0.0056483840942382815, 0.00566476821899414, 0.0056234879493713376, 0.00561359977722168, 0.005646431922912597, 0.0057112002372741695, 0.005601696014404297, 0.005632448196411133, 0.00561356782913208, 0.005638144016265869, 0.005622943878173828, 0.005605663776397705, 0.005655104160308838, 0.005598495960235596, 0.005700319766998291, 0.005609471797943116, 0.005598464012145996, 0.005632768154144287, 0.005642111778259277, 0.0056096000671386715, 0.0056135039329528805, 0.005603392124176026, 0.005705728054046631, 0.005608863830566406, 0.0056735677719116214, 0.005622911930084229, 0.0056265277862548825, 0.005627327919006347, 0.005628064155578614, 0.005742720127105713, 0.005609983921051025, 0.005632319927215576, 0.00563375997543335, 0.005650400161743164, 0.00563318395614624, 0.005595168113708496, 0.005622015953063965, 0.005632639884948731, 0.005698592185974121, 0.00565340805053711, 0.00562992000579834, 0.005658656120300293, 0.005629280090332031, 0.005730976104736328, 0.005600992202758789, 0.005593376159667969, 0.005635104179382324, 0.005600224018096924, 0.00563750410079956, 0.0053547840118408204, 0.005625919818878174, 0.005614336013793945, 0.005592127799987793, 0.005851583957672119, 0.0055996479988098145, 0.005638144016265869, 0.005631999969482422, 0.005609663963317871, 0.005643871784210205, 0.005616991996765137, 0.005630847930908203, 0.005634047985076904, 0.0056566400527954105, 0.005642399787902832, 0.005639071941375732, 0.005694047927856445, 0.005653120040893554, 0.005654176235198974, 0.005654528141021729, 0.0064386558532714844, 0.0060900158882141115, 0.005679232120513916, 0.0057721281051635745, 0.005627456188201904, 0.005675456047058105, 0.005650207996368408, 0.0056566400527954105, 0.005642399787902832, 0.0056236801147460935, 0.005717887878417969, 0.005633696079254151, 0.005647071838378906, 0.005633344173431396, 0.005812128067016601, 0.005694111824035644, 0.005626976013183594, 0.005659552097320557, 0.005713888168334961, 0.005675039768218994, 0.005768223762512207, 0.0057704000473022465, 0.00564192008972168, 0.005763199806213379, 0.005664063930511475, 0.005603744029998779, 0.005640480041503906, 0.00559660816192627, 0.005674848079681397, 0.005652224063873291, 0.005618336200714111, 0.005638463973999024, 0.0056258559226989744, 0.005609471797943116, 0.005650432109832764, 0.005611519813537597, 0.0056863040924072265, 0.005667808055877685, 0.005612576007843017, 0.00563647985458374, 0.00562992000579834, 0.0056408319473266605, 0.0060208959579467775, 0.005398528099060058, 0.005647903919219971, 0.005673439979553223, 0.005654528141021729, 0.0056436161994934085, 0.005665440082550049, 0.005638144016265869, 0.005645408153533936, 0.005747615814208984, 0.005658624172210694, 0.005678976058959961, 0.005658207893371582, 0.005701759815216065, 0.005628159999847412, 0.0056505918502807615, 0.00561356782913208, 0.005611839771270752, 0.005738175868988037, 0.00559830379486084, 0.00565340805053711, 0.005636159896850586, 0.005669919967651367, 0.005665760040283203, 0.005622911930084229, 0.0056650562286376955, 0.005895711898803711, 0.00567193603515625, 0.005675007820129394, 0.005646560192108154, 0.005658400058746338, 0.005636096000671387, 0.005645823955535889, 0.005622208118438721, 0.005627552032470703, 0.005656991958618164, 0.005623807907104492, 0.005623807907104492, 0.0056258559226989744, 0.005620736122131348, 0.0056824002265930176, 0.005569568157196045, 0.005622879981994629, 0.005609119892120361, 0.005601280212402344, 0.005591040134429932, 0.005674975872039795, 0.00563375997543335, 0.00601529598236084, 0.005791744232177734, 0.005816319942474365, 0.005626976013183594, 0.005622655868530274, 0.0056945281028747555, 0.005658944129943847, 0.005583712100982666, 0.00562275218963623, 0.005596000194549561, 0.005590943813323974, 0.0056481599807739254, 0.005566976070404053, 0.005661664009094239, 0.005583072185516357, 0.005581727981567383, 0.005351871967315674, 0.0055927357673645015, 0.005861663818359375, 0.005599232196807862, 0.005593088150024414, 0.0055808000564575196, 0.005574656009674072, 0.005601280212402344, 0.0055668802261352535, 0.005585984230041504, 0.005609536170959472, 0.005597536087036133, 0.005701759815216065, 0.005819456100463867, 0.006220736026763916, 0.006209536075592041, 0.006112383842468262, 0.005630847930908203, 0.005658976078033447, 0.005942431926727295, 0.006500703811645508, 0.005635263919830322, 0.00566806411743164, 0.00565993595123291, 0.005824704170227051, 0.0056293439865112305, 0.005583744049072265, 0.005662432193756103, 0.005600736141204834, 0.005636991977691651, 0.00562716817855835, 0.005610144138336182, 0.00562716817855835, 0.005579487800598144, 0.005614943981170654, 0.005626656055450439, 0.005584767818450928, 0.00566476821899414, 0.005595136165618897, 0.005623807907104492, 0.005607423782348633, 0.005611519813537597, 0.0056274237632751465, 0.005595615863800049, 0.005617504119873047, 0.005621920108795166, 0.00561356782913208, 0.005627903938293457, 0.005601471900939942, 0.005625664234161377, 0.005611519813537597, 0.005642240047454834, 0.005631999969482422, 0.005606527805328369, 0.005675456047058105, 0.005648575782775879, 0.005651936054229736, 0.0056368961334228515, 0.00561356782913208, 0.005621632099151611, 0.005621664047241211, 0.005624032020568848, 0.005595136165618897, 0.0054579200744628905, 0.0056179518699646, 0.005590752124786377, 0.005627903938293457, 0.005619775772094726, 0.005631936073303223, 0.005582848072052002, 0.005594783782958984, 0.0055937919616699215, 0.0055968317985534664, 0.005603328227996827, 0.005595136165618897, 0.005574016094207763, 0.005615615844726562, 0.005590784072875977, 0.005890944004058838, 0.006743231773376465, 0.0072341117858886715, 0.007020607948303222, 0.005933248043060303, 0.005666816234588623, 0.005607264041900635, 0.005623968124389649, 0.005601280212402344, 0.005635488033294678, 0.005896800041198731, 0.005623839855194092, 0.00566048002243042, 0.005616896152496338, 0.005601439952850342, 0.00566326379776001, 0.005600575923919678, 0.0056143999099731445, 0.005635935783386231, 0.005613791942596436, 0.005615359783172608, 0.005619743824005127, 0.0056154241561889645, 0.005615327835083008, 0.005630688190460205, 0.005600895881652832, 0.005638688087463379, 0.005617504119873047, 0.005627903938293457, 0.005611519813537597, 0.005607168197631836, 0.005603583812713623, 0.00566048002243042, 0.005601471900939942, 0.005636096000671387, 0.005638144016265869, 0.005668863773345947, 0.005622111797332764, 0.005592639923095703, 0.005636191844940185, 0.005596288204193115, 0.005617695808410645, 0.0057923197746276855, 0.005624095916748047, 0.005634047985076904, 0.005629024028778076, 0.005600160121917725, 0.005645311832427978, 0.0053821439743042, 0.005654528141021729, 0.005611199855804444, 0.005638463973999024, 0.0056863360404968265, 0.005675968170166016, 0.00562716817855835, 0.005627967834472656, 0.005765024185180664, 0.0056241598129272465, 0.005613984107971192, 0.0056217598915100095, 0.005694464206695556, 0.00564089584350586, 0.005593728065490722, 0.005627711772918701, 0.00559500789642334, 0.00557260799407959, 0.005636096000671387, 0.005580031871795654, 0.005630080223083496, 0.005605120182037354, 0.005630847930908203, 0.005646336078643799, 0.005609471797943116, 0.005633215904235839, 0.005614208221435547, 0.0056302080154418946, 0.00560697603225708, 0.0056910080909729, 0.005626207828521729, 0.005607840061187744, 0.0056356477737426755, 0.005591487884521485, 0.005600768089294434, 0.0056161279678344726, 0.005627520084381103, 0.005617536067962646, 0.005609983921051025, 0.005580671787261963, 0.005673056125640869, 0.005580192089080811, 0.005642784118652344, 0.005609151840209961, 0.005581215858459472, 0.005631999969482422, 0.005601280212402344, 0.0056200637817382815, 0.005588384151458741, 0.005654784202575683, 0.005650432109832764, 0.005602943897247315, 0.005619808197021485, 0.005597472190856934, 0.005605184078216553, 0.005611711978912353, 0.005601280212402344, 0.005654528141021729, 0.005615039825439453, 0.005571135997772217, 0.005619711875915527, 0.005582047939300537, 0.005603231906890869, 0.005328896045684814, 0.005587007999420166, 0.0057134079933166505, 0.005808576107025147, 0.005640543937683105, 0.005611167907714844, 0.005603328227996827, 0.005582240104675293, 0.005572224140167236, 0.005599967956542969, 0.005561600208282471, 0.005635072231292724, 0.005791168212890625, 0.005562943935394287, 0.005598239898681641, 0.005573599815368652, 0.005601280212402344, 0.005603295803070068, 0.005590496063232422, 0.00571449613571167, 0.005748479843139648, 0.005908448219299317, 0.005828479766845703, 0.005654943943023682, 0.005672959804534912, 0.005621344089508057, 0.005613984107971192, 0.005588607788085937, 0.005615200042724609, 0.005628704071044922, 0.005603328227996827, 0.005594783782958984, 0.005584415912628174, 0.005616543769836425, 0.0056109437942504885, 0.005659103870391846, 0.005634047985076904, 0.005586944103240967, 0.005629951953887939, 0.005659711837768555, 0.005616576194763184, 0.005606431961059571, 0.005612512111663818, 0.005652480125427246, 0.005611519813537597, 0.005621632099151611, 0.005603456020355225, 0.005615615844726562, 0.0056217598915100095, 0.005594560146331787, 0.0056059517860412595, 0.005599135875701904, 0.005616928100585937, 0.005638688087463379, 0.0055668802261352535, 0.005640160083770752, 0.005582752227783203, 0.0056217598915100095, 0.005620960235595703, 0.005571360111236572, 0.0055808000564575196, 0.005596320152282715, 0.005585855960845947, 0.005375936031341553, 0.005572800159454346, 0.005634751796722412, 0.005596255779266358, 0.005602208137512207, 0.00556441593170166, 0.005563936233520508, 0.005577184200286865, 0.0055623679161071774, 0.005613247871398926, 0.005595680236816406, 0.005551904201507568, 0.005631872177124023, 0.005574783802032471, 0.005588992118835449, 0.0055582718849182125, 0.005554175853729248, 0.005576704025268555, 0.005625152111053467, 0.005554463863372803, 0.005603744029998779, 0.005564703941345215, 0.005600992202758789, 0.005583136081695556, 0.005590464115142822, 0.0055790400505065915, 0.005560287952423096, 0.005601471900939942, 0.005729983806610108, 0.005621856212615967, 0.005621823787689209, 0.005582848072052002, 0.005611519813537597, 0.005597184181213379, 0.005582431793212891, 0.005625823974609375, 0.0056936640739440915, 0.005648608207702637, 0.005609471797943116, 0.005612544059753418, 0.005598207950592041, 0.005588992118835449, 0.005619711875915527, 0.005594399929046631, 0.0055938239097595216, 0.005576704025268555, 0.0055474557876586915, 0.005591616153717041, 0.0055848960876464845, 0.005658048152923584, 0.005597536087036133, 0.005578976154327392, 0.005595136165618897, 0.005615520000457764, 0.005625951766967774, 0.0055848960876464845, 0.005586016178131104, 0.005907360076904297, 0.00556441593170166, 0.005599391937255859, 0.005594560146331787, 0.005580543994903564, 0.005618656158447265, 0.005347455978393555, 0.005714111804962159, 0.005601280212402344, 0.005615615844726562, 0.0055922560691833495, 0.005575488090515137, 0.005606656074523926, 0.005620480060577393, 0.005619904041290283, 0.005605184078216553, 0.0055920639038085935, 0.005618688106536865, 0.005592991828918457, 0.005619391918182373, 0.005583263874053955, 0.005570559978485107, 0.005586368083953857, 0.005589087963104248, 0.005625984191894531, 0.0056629438400268555, 0.005606527805328369, 0.005643551826477051, 0.005572319984436035, 0.005596511840820313, 0.00563267183303833, 0.005607423782348633, 0.005597087860107422, 0.005609568119049072, 0.005629439830780029, 0.0055895037651062015, 0.005590688228607177, 0.00564463996887207, 0.0055808000564575196, 0.005623839855194092, 0.005581952095031738, 0.005581920146942139, 0.00562713623046875, 0.005626368045806885, 0.005607552051544189, 0.005607295989990235, 0.0055760002136230466, 0.005628608226776123, 0.005578303813934326, 0.005639840126037598, 0.005589600086212158, 0.005580671787261963, 0.005603519916534424, 0.005578048229217529, 0.005658432006835938, 0.005609471797943116, 0.005600607872009277, 0.0056459841728210445, 0.005607423782348633, 0.005623807907104492, 0.005599232196807862, 0.005689343929290771, 0.005613247871398926, 0.0055790719985961915, 0.005605375766754151, 0.0057935361862182615, 0.005843200206756592, 0.005621407985687256, 0.005589344024658203, 0.005359615802764893, 0.005588992118835449, 0.005627903938293457, 0.005600351810455323, 0.005607935905456543, 0.00558735990524292, 0.0055764479637145994, 0.00561568021774292, 0.005962080001831055, 0.005650271892547607, 0.00561356782913208, 0.005608799934387207, 0.005634719848632812, 0.005581056118011475, 0.005631455898284912, 0.0055957441329956056, 0.005596864223480225, 0.005619487762451172, 0.005582143783569336, 0.005630879878997803, 0.005605375766754151, 0.005615231990814209, 0.005630239963531494, 0.005602431774139404, 0.005630943775177002, 0.005629951953887939, 0.005631040096282959, 0.005640960216522216, 0.005596704006195068, 0.00559990406036377, 0.005619584083557129, 0.005633408069610595, 0.0055857281684875484, 0.005574016094207763, 0.005589568138122559, 0.005603328227996827, 0.005576704025268555, 0.005641983985900879, 0.005595232009887695, 0.005625376224517822, 0.005601920127868652, 0.005615744113922119, 0.005615488052368164, 0.005597184181213379, 0.005728256225585937, 0.005671040058135986, 0.005633408069610595, 0.005808127880096436, 0.0055976958274841304, 0.005701632022857666, 0.005605216026306152, 0.005673120021820069, 0.005617152214050293, 0.005607935905456543, 0.0056258559226989744, 0.005607295989990235, 0.005611648082733154, 0.005658624172210694, 0.0056852478981018065, 0.005679103851318359, 0.005660672187805176, 0.005639552116394043, 0.005730432033538818, 0.0053366079330444335, 0.005619616031646729, 0.005648287773132324, 0.005702527999877929, 0.00559827184677124, 0.005630559921264648, 0.005577087879180908, 0.0056044158935546875, 0.005632768154144287, 0.005777279853820801, 0.005920063972473144, 0.005594079971313477, 0.005649759769439698, 0.005605663776397705, 0.005640575885772705, 0.0056135358810424805, 0.005608575820922852, 0.005626016139984131, 0.006284031867980957, 0.006352543830871582, 0.006281023979187012, 0.005992512226104737, 0.005953440189361572, 0.005671487808227539, 0.005652480125427246, 0.005664480209350586, 0.005693727970123291, 0.005685408115386963, 0.005635583877563476, 0.005593311786651611, 0.005654655933380127, 0.005595136165618897, 0.005643551826477051, 0.005616352081298828, 0.005612800121307373, 0.005616384029388428, 0.0055848960876464845, 0.00564192008972168, 0.005636415958404541, 0.0055848960876464845, 0.005705728054046631, 0.005818143844604492, 0.005635488033294678, 0.005607391834259033, 0.005608287811279297, 0.005614880084991455, 0.0055979199409484865, 0.005593088150024414, 0.005605375766754151, 0.005615615844726562, 0.00562332820892334, 0.005609856128692627, 0.005625984191894531, 0.005609439849853516, 0.005595136165618897, 0.005617504119873047, 0.005593247890472412, 0.005637440204620361, 0.0056031041145324706, 0.005601439952850342, 0.005611519813537597, 0.005579520225524903, 0.005607423782348633, 0.0055354881286621095, 0.0056302080154418946, 0.005605375766754151, 0.005625279903411865, 0.005620287895202637, 0.005609471797943116, 0.005631999969482422, 0.005592607975006104, 0.0056427202224731444, 0.0056258559226989744, 0.005642240047454834, 0.005650239944458008, 0.005594719886779785, 0.005634655952453613, 0.005621471881866455, 0.005595424175262451, 0.005646207809448242, 0.0056152639389038085, 0.005620192050933838, 0.005603328227996827, 0.005611519813537597, 0.005638144016265869, 0.005593088150024414, 0.005633664131164551, 0.0056221442222595215, 0.005589248180389405, 0.005627647876739502, 0.005605728149414063, 0.005602975845336914, 0.005596704006195068, 0.005595104217529297, 0.0056427521705627445, 0.005609471797943116, 0.005619711875915527, 0.005640192031860352, 0.0056154241561889645, 0.005628032207489014, 0.00560748815536499, 0.005602880001068115, 0.005626304149627686, 0.006440959930419922, 0.005617568016052246, 0.005811647891998291, 0.0056241598129272465, 0.005601600170135498, 0.005647647857666016, 0.005601215839385987, 0.00560649585723877, 0.005590720176696777, 0.005574656009674072, 0.005638144016265869, 0.005619711875915527, 0.005631999969482422, 0.005607423782348633, 0.005605375766754151, 0.005633855819702148, 0.005606688022613525, 0.005602208137512207, 0.005615488052368164, 0.005611648082733154, 0.00563750410079956, 0.005589632034301758, 0.005623807907104492, 0.005357696056365967, 0.005615392208099365, 0.005610144138336182, 0.005662720203399658, 0.005636127948760986, 0.00562172794342041, 0.0056258878707885745, 0.005606880187988281, 0.005597311973571778, 0.005630335807800293, 0.005605375766754151, 0.005617631912231445, 0.005586976051330566, 0.005574656009674072, 0.005621503829956054, 0.005589248180389405, 0.005646624088287353, 0.00561078405380249, 0.005677504062652588, 0.005631872177124023, 0.005640160083770752, 0.005658783912658691, 0.005633952140808106, 0.005636191844940185, 0.005627903938293457, 0.005605375766754151, 0.005607423782348633, 0.005647679805755615, 0.005659167766571045, 0.005640192031860352, 0.0056303682327270505, 0.005684991836547851, 0.005611008167266846, 0.0056254081726074216, 0.005742847919464111, 0.00564521598815918, 0.00566044807434082, 0.005623167991638184, 0.00564195203781128, 0.005622687816619873, 0.005656576156616211, 0.005615615844726562, 0.005617919921875, 0.005653439998626709, 0.005621920108795166, 0.005859839916229248, 0.006127776145935059, 0.005861343860626221, 0.005633056163787842, 0.005645408153533936, 0.005681056022644043, 0.005660672187805176, 0.005675072193145752, 0.005652160167694092, 0.005671040058135986, 0.005619679927825928, 0.0057077760696411135, 0.005691391944885254, 0.00563420820236206, 0.005642240047454834, 0.005809760093688965, 0.0060661759376525876, 0.005627744197845459, 0.005402912139892578, 0.005650432109832764, 0.0056310720443725586, 0.005659552097320557, 0.005699584007263184, 0.005703392028808593, 0.005613855838775635, 0.005617631912231445, 0.005639616012573242, 0.005603456020355225, 0.005628384113311767, 0.005604351997375488, 0.005596159934997558, 0.005651552200317383, 0.005622464179992676, 0.005635903835296631, 0.00564028787612915, 0.0057736320495605465, 0.005648575782775879, 0.005601088047027588, 0.005692575931549072, 0.005643104076385498, 0.005672959804534912, 0.005646336078643799, 0.0056640000343322755, 0.0059028158187866215, 0.0056239042282104496, 0.005629119873046875, 0.005641215801239013, 0.005639935970306396, 0.0056343040466308595, 0.005616640090942383, 0.0056492481231689455, 0.0056440639495849605, 0.005651904106140137, 0.00561407995223999, 0.005625311851501465, 0.005635039806365967, 0.005624896049499512, 0.005618624210357666, 0.005627264022827148, 0.005617472171783447, 0.005647168159484863, 0.00563750410079956, 0.005636735916137696, 0.005650432109832764, 0.005627456188201904, 0.005652927875518799, 0.005603328227996827, 0.005670015811920166, 0.005622655868530274, 0.005640192031860352, 0.005624864101409912, 0.005604320049285889, 0.0056258559226989744, 0.005612544059753418, 0.005628032207489014, 0.005606272220611572, 0.005631999969482422, 0.005609471797943116, 0.005629568099975586, 0.00564467191696167, 0.0056442880630493165, 0.005392384052276611, 0.005628223896026611, 0.00562278413772583, 0.005644224166870117, 0.005609344005584717, 0.00564518404006958, 0.005613152027130127, 0.005597599983215332, 0.0056258559226989744, 0.0056137280464172365, 0.005595200061798096, 0.005631775856018067, 0.005588992118835449, 0.005666944026947021, 0.00559500789642334, 0.005629727840423584, 0.005615359783172608, 0.005591519832611084, 0.005651679992675782, 0.0056061758995056155, 0.005640192031860352, 0.005605375766754151, 0.005584991931915283, 0.005654431819915771, 0.005826560020446778, 0.005627520084381103, 0.0056180481910705566, 0.005630015850067139, 0.005613024234771728, 0.00559113597869873, 0.0056236801147460935, 0.005609536170959472, 0.005638591766357422, 0.005629568099975586, 0.005754784107208252, 0.0056835517883300785, 0.005652607917785645, 0.005662720203399658, 0.005637695789337158, 0.005605823993682861, 0.005654528141021729, 0.005618847846984863, 0.005651296138763428, 0.0056483840942382815, 0.005646016120910645, 0.005632415771484375, 0.005599135875701904, 0.005643743991851807, 0.005634592056274414, 0.005645919799804688, 0.005633600234985351, 0.005624095916748047, 0.005687520027160644, 0.005640543937683105, 0.005658432006835938, 0.00564243221282959, 0.005607423782348633, 0.005652480125427246, 0.005600927829742431, 0.005658976078033447, 0.005617663860321045, 0.005599232196807862, 0.005646336078643799, 0.005373248100280762, 0.005667808055877685, 0.005599391937255859, 0.00562937593460083, 0.005628320217132568, 0.0056211199760437015, 0.005627711772918701, 0.005606207847595215, 0.005609471797943116, 0.005586559772491455, 0.005560704231262207, 0.005619711875915527, 0.005566463947296142, 0.005599232196807862, 0.005583936214447021, 0.0055799040794372555, 0.005639071941375732, 0.005593311786651611, 0.005688000202178955, 0.005631999969482422, 0.005623807907104492, 0.005658239841461182, 0.005621600151062012, 0.005698336124420166, 0.005666336059570312, 0.005676928043365478, 0.005670591831207276, 0.005666528224945068, 0.005673920154571533, 0.005637663841247559, 0.005816800117492676, 0.00568236780166626, 0.005868447780609131, 0.00567849588394165, 0.00563865613937378, 0.005652480125427246, 0.005619872093200684, 0.00564796781539917, 0.005635903835296631, 0.0056406397819519045, 0.0057051520347595214, 0.005878335952758789, 0.0057019200325012205, 0.005631103992462159, 0.005673439979553223, 0.005615744113922119, 0.005610847949981689, 0.005648799896240234, 0.005646592140197754, 0.005660672187805176, 0.005642240047454834, 0.005629439830780029, 0.00565670394897461, 0.005616000175476074, 0.005628159999847412, 0.005633791923522949, 0.005629759788513183, 0.005635488033294678, 0.005613408088684082, 0.005641151905059814, 0.005627903938293457, 0.005650432109832764, 0.005628096103668213, 0.005391615867614746, 0.005603936195373535, 0.0056068158149719235, 0.005608191967010498, 0.005625631809234619, 0.005613247871398926, 0.0056241598129272465, 0.005600736141204834, 0.00563478422164917, 0.005750239849090576, 0.005603871822357178, 0.005610879898071289, 0.005611616134643555, 0.005654111862182617, 0.005636223793029785, 0.00563647985458374, 0.005613887786865234, 0.0056096000671386715, 0.005654016017913818, 0.005614240169525147, 0.005646175861358642, 0.0056258559226989744, 0.005621344089508057, 0.005663392066955567, 0.005615231990814209, 0.005680992126464844, 0.005644576072692871, 0.005668032169342041, 0.005655295848846435, 0.005627967834472656, 0.00566476821899414, 0.005615615844726562, 0.005635200023651123, 0.005624767780303955, 0.005627264022827148, 0.005717984199523926, 0.0056341438293457035, 0.005664544105529785, 0.0056464638710021975, 0.005617695808410645, 0.005681727886199951, 0.005624959945678711, 0.005687424182891846, 0.005649151802062988, 0.005652671813964844, 0.005670752048492432, 0.005657919883728027, 0.005671008110046387, 0.005624383926391602, 0.005765120029449463, 0.005711391925811767, 0.005634528160095215, 0.005660287857055664, 0.005626239776611328, 0.005668863773345947, 0.005634047985076904, 0.005666336059570312, 0.005622591972351074, 0.005610879898071289, 0.005744448184967041, 0.005611487865447998, 0.0056570878028869625, 0.005646336078643799, 0.005398591995239258, 0.005645023822784424, 0.005597280025482178, 0.005656544208526611, 0.005597216129302978, 0.005633887767791748, 0.005626016139984131, 0.005608640193939209, 0.0056360640525817875, 0.00561033582687378, 0.005647552013397217, 0.00565715217590332, 0.0058178558349609374, 0.005609856128692627, 0.0056277761459350584, 0.005628416061401367, 0.005615615844726562, 0.005629951953887939, 0.005611519813537597, 0.005615615844726562, 0.005634047985076904, 0.005609471797943116, 0.00562713623046875, 0.005624576091766358, 0.005608511924743652, 0.005651391983032226, 0.00563097620010376, 0.005670976161956787, 0.005669727802276612, 0.005640255928039551, 0.005724192142486572, 0.005666207790374756, 0.005723936080932617, 0.005642240047454834, 0.005661375999450683, 0.005699711799621582, 0.005635488033294678, 0.005679711818695068, 0.005642240047454834, 0.005646240234375, 0.0056054720878601074, 0.005605120182037354, 0.005646592140197754, 0.0056146240234375, 0.005630943775177002, 0.005633503913879395, 0.005596896171569824, 0.005672863960266113, 0.0056267518997192385, 0.005643455982208252, 0.005632863998413086, 0.005673247814178467, 0.005649663925170898, 0.005616096019744873, 0.0056622719764709475, 0.005607359886169434, 0.005636608123779297, 0.005610976219177246, 0.005618207931518555, 0.005633535861968994, 0.0056202239990234375, 0.005636096000671387, 0.005631999969482422, 0.005351679801940918, 0.0056003198623657225, 0.005632991790771484, 0.005727519989013672, 0.0056388797760009764, 0.005646336078643799, 0.005638144016265869, 0.0056258559226989744, 0.005672095775604248, 0.00567145586013794, 0.005695648193359375, 0.005652544021606446, 0.0056341438293457035, 0.0056433920860290525, 0.005633056163787842, 0.005653535842895508, 0.005619904041290283, 0.005628255844116211, 0.005628128051757813, 0.0056098241806030276, 0.005678815841674805, 0.005650432109832764, 0.005643487930297852, 0.005616415977478027, 0.005648064136505127, 0.005635935783386231, 0.005601759910583496, 0.0056418561935424804, 0.005936927795410156, 0.005642848014831543, 0.0056250238418579105, 0.005624127864837646, 0.005634560108184815, 0.005629951953887939, 0.005646336078643799, 0.005619711875915527, 0.00561359977722168, 0.005664735794067383, 0.005609471797943116, 0.0056497278213500975, 0.005613791942596436, 0.0056232957839965824, 0.005667520046234131, 0.005625984191894531, 0.005632160186767578, 0.0056638078689575195, 0.005634943962097168, 0.005625823974609375, 0.005637631893157959, 0.0056735677719116214, 0.005656576156616211, 0.005687295913696289, 0.005697535991668701, 0.00564415979385376, 0.005662112236022949, 0.005637951850891113, 0.005651360034942627, 0.005748608112335205, 0.005675136089324951, 0.0056516480445861815, 0.005632448196411133, 0.005626239776611328, 0.005699584007263184, 0.005359712123870849, 0.005623167991638184, 0.0056284480094909665, 0.0056112961769104, 0.005625088214874268, 0.005630655765533447, 0.005624095916748047, 0.005631999969482422, 0.005639872074127197, 0.0056112961769104, 0.005655072212219238, 0.005627071857452393, 0.005654911994934082, 0.005632448196411133, 0.005637951850891113, 0.005666240215301514, 0.005645055770874024, 0.0056442880630493165, 0.005633696079254151, 0.00559116792678833, 0.005650527954101562, 0.0056275839805603025, 0.005645088195800781, 0.005648032188415527, 0.005647456169128418, 0.005626783847808838, 0.005641791820526123, 0.005642687797546387, 0.005654528141021729, 0.005666719913482666, 0.005681248188018799, 0.005616703987121582, 0.005692351818084717, 0.005592864036560058, 0.0059836478233337405, 0.006132415771484375, 0.005727935791015625, 0.005654047966003418, 0.005653120040893554, 0.005643775939941406, 0.005660575866699219, 0.005641088008880615, 0.005658624172210694, 0.005666816234588623, 0.005648447990417481, 0.0056070079803466795, 0.0056733121871948245, 0.005619711875915527, 0.00564134407043457, 0.005649280071258545, 0.005631648063659668, 0.005693439960479736, 0.0056200637817382815, 0.005695712089538574, 0.005643743991851807, 0.00566918420791626, 0.00564195203781128, 0.005624032020568848, 0.005900288105010986, 0.005596640110015869, 0.005650176048278809, 0.005622432231903076, 0.0055912318229675295, 0.005355616092681884, 0.005615615844726562, 0.005631999969482422, 0.005607135772705078, 0.005611680030822754, 0.005619584083557129, 0.005613632202148437, 0.005640384197235108, 0.005605375766754151, 0.005611519813537597, 0.005601280212402344, 0.005688576221466064, 0.005614336013793945, 0.005607423782348633, 0.0056442880630493165, 0.005631999969482422, 0.0056239042282104496, 0.005652383804321289, 0.0056295042037963865, 0.0056713600158691405, 0.005662975788116455, 0.005635776042938232, 0.00560748815536499, 0.005619711875915527, 0.005683199882507324, 0.005601280212402344, 0.005617663860321045, 0.005618720054626465, 0.0055797438621521, 0.005756896018981933, 0.005601344108581543, 0.005631968021392823, 0.005629951953887939, 0.00561897611618042, 0.005646399974822998, 0.005585440158843994, 0.005641632080078125, 0.0056388797760009764, 0.005816256046295166, 0.0056341118812561035, 0.005597184181213379, 0.005629856109619141, 0.005622975826263428, 0.005854112148284912, 0.005631135940551758, 0.00562604808807373, 0.005663392066955567, 0.005687295913696289, 0.0056217598915100095, 0.0056217598915100095, 0.005758848190307617, 0.005726336002349853, 0.00560038423538208, 0.005608320236206055, 0.005636288166046143, 0.00562332820892334, 0.005646624088287353, 0.005607423782348633, 0.005659904003143311, 0.005585663795471192, 0.005621024131774902, 0.005611487865447998, 0.005690112113952637, 0.005453855991363526, 0.005606304168701172, 0.0056375679969787595, 0.005620160102844238, 0.005621183872222901, 0.005635776042938232, 0.005622367858886718, 0.005662528038024903, 0.005937280178070069, 0.00563043212890625, 0.0056130561828613285, 0.005636000156402588, 0.005609151840209961, 0.005593472003936768, 0.005627456188201904, 0.005616608142852783, 0.005656383991241455, 0.005609632015228272, 0.0055931200981140135, 0.005646336078643799, 0.005617663860321045, 0.005658624172210694, 0.0056557440757751464, 0.005640768051147461, 0.005670591831207276, 0.00563046407699585, 0.005632031917572022, 0.005650527954101562, 0.005650368213653565, 0.005703680038452149, 0.005650303840637207, 0.0056730880737304685, 0.005679103851318359, 0.005654528141021729, 0.005650368213653565, 0.0056310720443725586, 0.005663712024688721, 0.005848159790039062, 0.005644544124603272, 0.005648096084594726, 0.005657536029815674, 0.00579744005203247, 0.005595327854156494, 0.005713568210601806, 0.005626463890075683, 0.005631999969482422, 0.005636096000671387, 0.005617472171783447, 0.005642079830169678, 0.00560368013381958, 0.005652736186981202, 0.005630879878997803, 0.005639008045196533, 0.005669087886810303, 0.00566044807434082, 0.005652480125427246, 0.005661856174468994, 0.005663584232330323, 0.005618879795074463, 0.005614175796508789, 0.005652031898498535, 0.00562662410736084, 0.005636000156402588, 0.005375999927520752, 0.0056315197944641114, 0.0056713919639587405, 0.005623807907104492, 0.0056770238876342775, 0.005619743824005127, 0.0056341438293457035, 0.005662623882293701, 0.005659808158874511, 0.005697887897491455, 0.005615871906280517, 0.005660927772521972, 0.005638144016265869, 0.0056217598915100095, 0.00565177583694458, 0.005618368148803711, 0.005644320011138916, 0.0056295042037963865, 0.005616032123565674, 0.005615359783172608, 0.005607583999633789, 0.0056341438293457035, 0.005613344192504883, 0.0056304001808166505, 0.005625472068786621, 0.005598432064056397, 0.005622719764709473, 0.005629216194152832, 0.005669600009918213, 0.005627520084381103, 0.005623968124389649, 0.005648672103881836, 0.005606592178344727, 0.005630720138549805, 0.005629951953887939, 0.005588736057281494, 0.005650047779083252, 0.0055747518539428715, 0.00564031982421875, 0.005608191967010498, 0.005797120094299316, 0.005656991958618164, 0.005595136165618897, 0.005638144016265869, 0.005672192096710205, 0.005630144119262695, 0.005636191844940185, 0.005612095832824707, 0.005643455982208252, 0.005626656055450439, 0.005631872177124023, 0.005635615825653076, 0.005594816207885742, 0.005630815982818603, 0.005617728233337402, 0.005615551948547363, 0.005640192031860352, 0.005582848072052002, 0.005607423782348633, 0.005590623855590821, 0.005623839855194092, 0.0055972480773925784, 0.005695807933807373, 0.005393184185028076, 0.005619711875915527, 0.005691391944885254, 0.005646336078643799, 0.005634047985076904, 0.005646336078643799, 0.0055948801040649416, 0.005628032207489014, 0.005593215942382813, 0.005654528141021729, 0.005608672142028808, 0.005633056163787842, 0.005641759872436524, 0.005624032020568848, 0.005621503829956054, 0.00578326416015625, 0.006793248176574707, 0.005968480110168457, 0.006121535778045654, 0.006065279960632324, 0.006225823879241943, 0.005679935932159424, 0.005709824085235596, 0.005649792194366455, 0.005668672084808349, 0.005650335788726806, 0.005661824226379395, 0.005689023971557617, 0.00562992000579834, 0.005666111946105957, 0.005671743869781494, 0.005660223960876465, 0.00564899206161499, 0.005675903797149658, 0.005711935997009278, 0.00565340805053711, 0.005711775779724121, 0.005658783912658691, 0.00566864013671875, 0.00566921615600586, 0.005686079978942871, 0.0056863360404968265, 0.00565180778503418, 0.005661280155181885, 0.005638144016265869, 0.0056275839805603025, 0.005677375793457031, 0.005662720203399658, 0.005703616142272949, 0.005717376232147217, 0.005651135921478271, 0.005695487976074219, 0.005649759769439698, 0.005667232036590576, 0.00566707181930542, 0.005662720203399658, 0.0056217598915100095, 0.005629951953887939, 0.005713920116424561, 0.006149824142456055, 0.005664351940155029, 0.005663455963134766, 0.005689343929290771, 0.005354015827178955, 0.005670911788940429, 0.005636096000671387, 0.005616703987121582, 0.005655007839202881, 0.005667295932769776, 0.0056741762161254886, 0.005665599822998047, 0.0056399679183959965, 0.005656511783599854, 0.00562614393234253, 0.005640192031860352, 0.005623519897460938, 0.005597472190856934, 0.00564793586730957, 0.005618112087249756, 0.005654528141021729, 0.00565772819519043, 0.005661568164825439, 0.005623807907104492, 0.005607423782348633, 0.005684607982635498, 0.005642879962921143, 0.005670911788940429, 0.0059169921875, 0.005620960235595703, 0.0061567678451538085, 0.0056260800361633305, 0.005664544105529785, 0.005631999969482422, 0.005646336078643799, 0.005646336078643799, 0.005640192031860352, 0.005671040058135986, 0.005652160167694092, 0.005652671813964844, 0.005668416023254395, 0.005740992069244385, 0.005679103851318359, 0.005678431987762451, 0.0056900157928466795, 0.005641600131988525, 0.005773151874542236, 0.005727007865905762, 0.005675007820129394, 0.005646336078643799, 0.005645567893981933, 0.0056921601295471195, 0.0056442880630493165, 0.00566431999206543, 0.005652575969696045, 0.00563750410079956, 0.005667808055877685, 0.005656576156616211, 0.005700640201568604, 0.005724319934844971, 0.005716032028198243, 0.005663487911224365, 0.00564134407043457, 0.005685887813568115, 0.005666975975036621, 0.0056976318359375, 0.005817728042602539]",tokens/s,174.95210150115787,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,939.741184,12518.883328,0.0,12123.635712,12121.851904,s,1,7.0910576171875,7.0910576171875,0.0,7.0910576171875,7.0910576171875,7.0910576171875,7.0910576171875,[7.0910576171875],,kWh,5.995084929149167e-06,6.536890547941898e-07,3.3272248840046648e-06,9.975998867948022e-06,,MB,1327.08352,12544.049152,0.0,12138.315776,10311.21664,s,10,3.5256805114746097,0.352568051147461,0.009494322369546622,0.35446546936035156,0.3594026153564453,0.36019141998291015,0.36082246368408205,"[0.3253225402832031, 0.35563198852539063, 0.35113504028320314, 0.3545672607421875, 0.3541093444824219, 0.35436367797851565, 0.3530037536621094, 0.360980224609375, 0.3592273254394531, 0.35733935546875]",tokens/s,726.1009588555386,kWh,9.95890694555568e-06,1.0977229504491317e-06,6.659088660599758e-06,1.771571855660457e-05,tokens/kWh,14450444.060851319,MB,1356.386304,12550.340608,0.0,12144.607232,10311.2192,s,10,27.021487060546875,2.702148706054687,0.0018136617898095334,2.7023939208984373,2.7035677001953125,2.704416174316406,2.705094953613281,"[2.70197265625, 2.697788818359375, 2.702718505859375, 2.703379150390625, 2.7020693359375, 2.7013095703125, 2.7052646484375, 2.701328369140625, 2.70283154296875, 2.702824462890625]",tokens/s,23.314779034490705,kWh,7.923303308861126e-05,8.74009368760228e-06,5.241868082380049e-05,0.000140391807600014,tokens/kWh,448744.13312984305,,s,630,27.01925434875488,0.04288770531548394,0.00024527835886048937,0.042839584350585935,0.04304192733764648,0.04312481708526611,0.04451238708496094,"[0.04444156646728516, 0.04303683090209961, 0.042803009033203124, 0.0428120002746582, 0.042782943725585935, 0.042649375915527345, 0.04270486450195313, 0.04277657699584961, 0.04268854522705078, 0.042724735260009764, 0.04273401641845703, 0.04300614547729492, 0.042923038482666015, 0.043199455261230466, 0.043055103302001956, 0.04285235214233398, 0.04270406341552734, 0.04286547088623047, 0.04267827224731445, 0.04275609588623047, 0.04283737564086914, 0.043025150299072265, 0.04293519973754883, 0.043248607635498044, 0.042877086639404295, 0.04295459365844727, 0.042799102783203126, 0.04283801651000976, 0.04288716888427734, 0.04286019134521484, 0.04292233657836914, 0.04276188659667969, 0.042799423217773434, 0.04267216110229492, 0.04277657699584961, 0.04289945602416992, 0.0430571517944336, 0.04291900634765625, 0.04287376022338867, 0.04286848068237305, 0.04288332748413086, 0.042778560638427734, 0.04279062271118164, 0.04290569686889648, 0.043151649475097656, 0.043040737152099606, 0.042969120025634765, 0.042941600799560546, 0.042879806518554685, 0.04281139373779297, 0.04283801651000976, 0.04283801651000976, 0.04276547241210937, 0.042807296752929686, 0.04292620849609375, 0.0428100814819336, 0.042906654357910155, 0.04269120025634766, 0.04271299362182617, 0.04276416015625, 0.042816062927246094, 0.042839103698730466, 0.04287910461425781, 0.044458560943603516, 0.04315571212768555, 0.04294451141357422, 0.042840065002441405, 0.0427147216796875, 0.042723232269287106, 0.04269311904907227, 0.04267737579345703, 0.04273651123046875, 0.0428807373046875, 0.04280319976806641, 0.0428172492980957, 0.04302396774291992, 0.042775104522705075, 0.04268646240234375, 0.04265532684326172, 0.04262790298461914, 0.042674175262451174, 0.04271923065185547, 0.04273971176147461, 0.0426343994140625, 0.042799583435058595, 0.04283536148071289, 0.04284902572631836, 0.04273916625976563, 0.04277078247070312, 0.04264729690551758, 0.042801822662353516, 0.04277596664428711, 0.042773086547851565, 0.04273971176147461, 0.042716320037841794, 0.042713951110839844, 0.04262412643432617, 0.04272012710571289, 0.04281536102294922, 0.042848384857177735, 0.04282745742797851, 0.04278300857543945, 0.042657825469970705, 0.04272127914428711, 0.042788864135742184, 0.04270428848266602, 0.042758750915527347, 0.0429854736328125, 0.04294607925415039, 0.0429859504699707, 0.04288211059570313, 0.042883071899414066, 0.042732479095458985, 0.0428438720703125, 0.04280963134765625, 0.0428741455078125, 0.04283670425415039, 0.04280115127563477, 0.04287807846069336, 0.04272422409057617, 0.04279667282104492, 0.04280358505249023, 0.04284393692016602, 0.04274739074707031, 0.04295139312744141, 0.0428581428527832, 0.04452614212036133, 0.043184127807617184, 0.04284764862060547, 0.04288572692871094, 0.042815486907958986, 0.04268761444091797, 0.04274879837036133, 0.04273561477661133, 0.04272860717773438, 0.04280972671508789, 0.04289728164672851, 0.04288735961914063, 0.04276675033569336, 0.042950016021728515, 0.042749824523925783, 0.042758750915527347, 0.042724544525146485, 0.042816062927246094, 0.04284662246704102, 0.04281753540039063, 0.04297318267822266, 0.04300822448730469, 0.04305887985229492, 0.04292822265625, 0.04297843170166016, 0.04286064147949219, 0.0428633918762207, 0.04301824188232422, 0.042893310546875, 0.042842113494873046, 0.042782463073730466, 0.04284646224975586, 0.04274358367919922, 0.04273955154418945, 0.04284454345703125, 0.04282777786254883, 0.042832927703857424, 0.042971134185791016, 0.04305750274658203, 0.04282953643798828, 0.04278953552246094, 0.04274639892578125, 0.04281894302368164, 0.04295654296875, 0.043026496887207034, 0.04306179046630859, 0.04304006576538086, 0.04287097549438477, 0.04292659378051758, 0.042847999572753905, 0.04279510498046875, 0.0428873291015625, 0.04297727966308594, 0.042907520294189455, 0.04285248184204102, 0.04279257583618164, 0.0427196159362793, 0.042760032653808594, 0.042770401000976566, 0.04306697463989258, 0.043055713653564455, 0.04279814529418945, 0.04293523025512695, 0.04465673446655274, 0.04332950210571289, 0.04286671829223633, 0.04273971176147461, 0.04281081771850586, 0.042971710205078124, 0.04273551940917969, 0.04286268615722656, 0.04305920028686523, 0.042772159576416016, 0.042729312896728516, 0.04274428939819336, 0.042708992004394535, 0.04292144012451172, 0.042791454315185544, 0.042788864135742184, 0.04286259078979492, 0.04283801651000976, 0.04289945602416992, 0.042777854919433596, 0.04288179016113281, 0.043096065521240234, 0.04302547073364258, 0.042973407745361326, 0.04301078414916992, 0.04306470489501953, 0.04291798400878906, 0.04286019134521484, 0.04277478408813477, 0.042756736755371096, 0.042821632385253904, 0.04272515106201172, 0.04285257720947266, 0.042962944030761716, 0.042897407531738284, 0.04273107147216797, 0.042888992309570315, 0.042803230285644533, 0.04273011016845703, 0.04285440063476562, 0.042864543914794925, 0.042840160369873044, 0.04290316772460938, 0.04288550567626953, 0.043097633361816406, 0.042955135345458986, 0.042866783142089845, 0.0429035530090332, 0.04293222427368164, 0.04296908950805664, 0.043074817657470704, 0.04302710342407227, 0.042790302276611326, 0.04279536056518555, 0.04272572708129883, 0.04271462249755859, 0.04294451141357422, 0.04282624053955078, 0.04287676620483399, 0.042848289489746096, 0.04305263900756836, 0.04298761749267578, 0.04274732971191406, 0.044504478454589845, 0.043321758270263674, 0.042931713104248044, 0.04273196792602539, 0.04266009521484375, 0.04270284652709961, 0.04280428695678711, 0.04277139282226562, 0.042823680877685545, 0.042790912628173826, 0.04274380874633789, 0.04270489501953125, 0.042764190673828126, 0.04271452713012695, 0.04273011016845703, 0.04280489730834961, 0.042795425415039064, 0.042759521484375, 0.04268508911132812, 0.0426618881225586, 0.04265484619140625, 0.042801471710205076, 0.04325843048095703, 0.043374591827392575, 0.04311040115356445, 0.043054622650146486, 0.04292860794067383, 0.04284415817260742, 0.04280464172363281, 0.04297379302978516, 0.042840065002441405, 0.042872833251953124, 0.042790431976318356, 0.042834335327148435, 0.04277459335327148, 0.04281958389282227, 0.042794719696044925, 0.04278492736816406, 0.042741153717041014, 0.04291043090820312, 0.04297727966308594, 0.04278025436401367, 0.042764705657958986, 0.043034175872802734, 0.04291219329833985, 0.04300320053100586, 0.042998462677001956, 0.043036670684814454, 0.04295884704589844, 0.04306739044189453, 0.042939712524414066, 0.04281817626953125, 0.0427613754272461, 0.042824607849121094, 0.0428295669555664, 0.043014400482177736, 0.042780670166015625, 0.04286259078979492, 0.04277862548828125, 0.04272915267944336, 0.042925567626953126, 0.04286751937866211, 0.04281283187866211, 0.04451561737060547, 0.0431346549987793, 0.04294854354858398, 0.042864864349365234, 0.0428851203918457, 0.042896831512451175, 0.042818111419677736, 0.04284415817260742, 0.04273766326904297, 0.04276406478881836, 0.04275737762451172, 0.04272803115844727, 0.04288345718383789, 0.04293427276611328, 0.04278441619873047, 0.04280355072021484, 0.04278476715087891, 0.04280649566650391, 0.04267702484130859, 0.042782718658447266, 0.04291340637207031, 0.04305881500244141, 0.04311427307128906, 0.043058143615722654, 0.042891265869140625, 0.042870849609375, 0.04280928039550781, 0.042780670166015625, 0.04260825729370117, 0.042874496459960935, 0.04288710403442383, 0.04283065414428711, 0.04275609588623047, 0.04286185455322265, 0.042758880615234376, 0.04274585723876953, 0.042700801849365234, 0.04281958389282227, 0.04278179168701172, 0.0427529296875, 0.04279286575317383, 0.04288931274414062, 0.04279500961303711, 0.0429117431640625, 0.0429117431640625, 0.04288716888427734, 0.043162784576416015, 0.043117408752441404, 0.04292607879638672, 0.042921760559082034, 0.042805152893066405, 0.04279328155517578, 0.042815486907958986, 0.04276428985595703, 0.042840065002441405, 0.04279635238647461, 0.04284070587158203, 0.042876991271972656, 0.042864639282226565, 0.04275923156738281, 0.042775390625, 0.04281967926025391, 0.042812767028808596, 0.044538944244384766, 0.04325062561035156, 0.04303247833251953, 0.04281100845336914, 0.04291836929321289, 0.042889217376708984, 0.042934207916259765, 0.04292147064208984, 0.042818111419677736, 0.0429854736328125, 0.04278681564331055, 0.04283526229858398, 0.04289401626586914, 0.042848255157470705, 0.04267612838745117, 0.042840415954589844, 0.04278656005859375, 0.0427760009765625, 0.04288774490356445, 0.0429051513671875, 0.04281593704223633, 0.04326399993896484, 0.04316364669799805, 0.043107902526855466, 0.04293875122070313, 0.042934337615966794, 0.042921375274658204, 0.04276079940795898, 0.04287897491455078, 0.04289535903930664, 0.04289535903930664, 0.04274995040893555, 0.04280934524536133, 0.042786785125732425, 0.04284204864501953, 0.042817726135253906, 0.042979328155517575, 0.043005790710449215, 0.042886238098144534, 0.04282262420654297, 0.04278476715087891, 0.042853694915771484, 0.04306172943115234, 0.042938591003417965, 0.04311014556884766, 0.04309148788452148, 0.04302511978149414, 0.042997760772705076, 0.043007137298583985, 0.042903968811035156, 0.043100574493408206, 0.0428928337097168, 0.04295731353759766, 0.0429752311706543, 0.04293017578125, 0.04294819259643555, 0.04300252914428711, 0.04290848159790039, 0.04269891357421875, 0.04276508712768555, 0.042805057525634765, 0.04282726287841797, 0.04284281539916992, 0.044598560333251956, 0.04320537567138672, 0.04294652938842773, 0.042893310546875, 0.042774528503417966, 0.04274585723876953, 0.04275814437866211, 0.04275948715209961, 0.04278956985473633, 0.04282572937011719, 0.042842113494873046, 0.042735294342041014, 0.04279328155517578, 0.042858497619628906, 0.04277766418457031, 0.04283603286743164, 0.04278499221801758, 0.04268304061889648, 0.042771903991699216, 0.042775104522705075, 0.04282767868041992, 0.04283564758300781, 0.04295065689086914, 0.04320707321166992, 0.04313087844848633, 0.04301824188232422, 0.04293427276611328, 0.042872833251953124, 0.0428353271484375, 0.04278540802001953, 0.04274358367919922, 0.042901729583740236, 0.04282572937011719, 0.042853824615478514, 0.0428650894165039, 0.042821758270263674, 0.04271615982055664, 0.042801822662353516, 0.04277196884155274, 0.04275491333007812, 0.04278476715087891, 0.04281865692138672, 0.04281564712524414, 0.04299033737182617, 0.04286975860595703, 0.04286800003051758, 0.04291926574707031, 0.042920318603515625, 0.04300799942016602, 0.04294041442871094, 0.04288889694213867, 0.0428895378112793, 0.04277862548828125, 0.04277657699584961, 0.04274585723876953, 0.04268396759033203, 0.042866657257080075, 0.04283622360229492, 0.04289257431030274, 0.04286313629150391, 0.04279951858520508, 0.04279478454589844, 0.042733440399169924, 0.04454291152954101, 0.04317184066772461, 0.042891265869140625, 0.042984798431396486, 0.0427977294921875, 0.042796993255615236, 0.04275791931152344, 0.04274332809448242, 0.04279372787475586, 0.04283715057373047, 0.04287910461425781, 0.042963680267333985, 0.042798751831054686, 0.042772830963134764, 0.042774528503417966, 0.042730720520019534, 0.042736030578613284, 0.0426926383972168, 0.042655681610107424, 0.04276828765869141, 0.04301260757446289, 0.043063297271728515, 0.04310943984985351, 0.0429917106628418, 0.04308598327636719, 0.04282969665527344, 0.04291052627563476, 0.042969024658203125, 0.042993438720703124, 0.04285184097290039, 0.04280928039550781, 0.04294128036499024, 0.042816543579101564, 0.04276732635498047, 0.04271664047241211, 0.04279267120361328, 0.042762046813964845, 0.042894336700439455, 0.04282921600341797, 0.042715744018554686, 0.04268780899047851, 0.04278496170043945, 0.04280319976806641, 0.04286022567749023, 0.043175872802734376, 0.04305763244628906, 0.04308969497680664, 0.04295068740844726, 0.042947006225585935, 0.0428873291015625, 0.04284793472290039, 0.04280966567993164, 0.0428985595703125, 0.04303142547607422, 0.04288211059570313, 0.042855358123779295, 0.042902622222900394, 0.043117183685302735, 0.04273404693603516, 0.04279590225219727, 0.042838657379150394, 0.04283599853515625, 0.04287055969238281, 0.044707870483398436, 0.043235294342041014, 0.0429035530090332, 0.042829822540283204, 0.042788864135742184, 0.04274380874633789, 0.04267769622802734, 0.042693183898925784, 0.04277811050415039, 0.042732032775878906, 0.042774528503417966, 0.042772384643554685, 0.04272457504272461, 0.042756542205810544, 0.04267257690429688, 0.04265100860595703, 0.04278745651245117, 0.04279500961303711, 0.04265574264526367, 0.042788864135742184, 0.042858497619628906, 0.042898624420166016, 0.04300003051757813, 0.043012702941894534, 0.042872833251953124, 0.042942047119140625, 0.04347331237792969, 0.04296908950805664, 0.04287311935424805, 0.04278243255615234, 0.042831871032714845, 0.04279500961303711, 0.04280883026123047, 0.04277503967285156, 0.042883071899414066, 0.04278681564331055, 0.0429054069519043, 0.04280899047851563, 0.04272329711914063, 0.042695232391357425, 0.042764225006103516, 0.042925567626953126, 0.04290412902832031, 0.04292607879638672, 0.04303257751464844, 0.04301004791259765, 0.042893310546875, 0.04291788864135742, 0.043020286560058595, 0.04299980926513672, 0.04298294448852539, 0.04303071975708008, 0.04327657699584961, 0.04297068786621094, 0.04291219329833985, 0.04280313491821289, 0.04277814483642578, 0.04281331253051758, 0.04287276840209961, 0.04296777725219727, 0.04302796936035156, 0.042813953399658204, 0.04278659057617187]",tokens/s,23.31670563029553,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/05ab2ee8d6b593bdbab17d728de5c028a7a94d83/modeling_falcon.py"", line 843, in __init__ self.transformer = FalconModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/05ab2ee8d6b593bdbab17d728de5c028a7a94d83/modeling_falcon.py"", line 650, in __init__ self.h = nn.ModuleList([FalconDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/05ab2ee8d6b593bdbab17d728de5c028a7a94d83/modeling_falcon.py"", line 650, in self.h = nn.ModuleList([FalconDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/05ab2ee8d6b593bdbab17d728de5c028a7a94d83/modeling_falcon.py"", line 420, in __init__ self.mlp = FalconMLP(config) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/05ab2ee8d6b593bdbab17d728de5c028a7a94d83/modeling_falcon.py"", line 405, in __init__ self.dense_4h_to_h = FalconLinear(4 * hidden_size, hidden_size, bias=config.bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1024.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 1006.12 MiB is free. Process 199328 has 13.76 GiB memory in use. Of the allocated memory 13.64 GiB is allocated by PyTorch, and 1.50 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 990, in __init__ self.model = GemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 775, in __init__ [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 775, in [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 565, in __init__ self.mlp = GemmaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 139, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 166.12 MiB is free. Process 136962 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,783.659008,1106.182144,0.0,710.934528,657.419264,s,1,7.05105859375,7.05105859375,0.0,7.05105859375,7.05105859375,7.05105859375,7.05105859375,[7.05105859375],,kWh,2.202895187519971e-06,2.3594645826461773e-07,0.0,2.438841645784589e-06,,MB,1154.048,1150.222336,0.0,744.48896,582.372352,s,21,0.34496889877319337,0.01642709041777111,0.0007742651983539773,0.016256927490234375,0.016360639572143554,0.016419744491577147,0.01918410911560059,"[0.019875200271606445, 0.01612614440917969, 0.0162511043548584, 0.01623356819152832, 0.016419744491577147, 0.01624127960205078, 0.016256927490234375, 0.01615715217590332, 0.01620195198059082, 0.01619811248779297, 0.016309696197509764, 0.01617840003967285, 0.016360639572143554, 0.01628441619873047, 0.016258655548095705, 0.016328128814697265, 0.016202848434448244, 0.016271615982055666, 0.016319295883178712, 0.01632364845275879, 0.01617036819458008]",tokens/s,15584.013570842391,kWh,6.129782504288806e-07,6.759974087109422e-08,4.079006531176538e-07,1.0884786444176286e-06,tokens/kWh,235190650.0994958,MB,1165.881344,1171.193856,0.0,765.46048,582.374912,s,21,9.861971099853514,0.46961767142159594,0.0015522680961810973,0.46928976440429687,0.4714923095703125,0.4716328125,0.47389763183593747,"[0.47017724609375, 0.46980416870117186, 0.47446383666992187, 0.4695044860839844, 0.4688964538574219, 0.469240234375, 0.4675565490722656, 0.4714923095703125, 0.46747354125976565, 0.4716328125, 0.47092059326171876, 0.47007916259765625, 0.46998968505859373, 0.46915087890625, 0.4682867736816406, 0.46879379272460936, 0.46901904296875, 0.46928976440429687, 0.4695080261230469, 0.46755807495117185, 0.4691336669921875]",tokens/s,134.15168089669734,kWh,1.3432311777348723e-05,1.481352660628371e-06,6.6641552690726686e-06,2.1577819707049762e-05,tokens/kWh,2919664.7694399385,,s,1323,9.856276663780205,0.007449944568239011,0.0001204959173266853,0.007427680015563965,0.007527481746673584,0.0076103262424469,0.007899578285217285,"[0.007407072067260742, 0.007580192089080811, 0.007585792064666748, 0.007608160018920899, 0.007400864124298095, 0.007393919944763184, 0.007436351776123047, 0.0074158720970153805, 0.007421567916870118, 0.007475840091705322, 0.007452415943145752, 0.007387135982513428, 0.007468255996704101, 0.007508768081665039, 0.007419648170471191, 0.007436575889587402, 0.007427296161651611, 0.0074882559776306154, 0.007559167861938477, 0.007354368209838867, 0.007419424057006836, 0.007387616157531738, 0.007409664154052734, 0.007437983989715576, 0.007419360160827637, 0.007389535903930664, 0.007455264091491699, 0.007455840110778809, 0.007428864002227783, 0.00744217586517334, 0.00741212797164917, 0.0073975038528442385, 0.007477119922637939, 0.00740556812286377, 0.007437695980072021, 0.007452864170074463, 0.007414112091064453, 0.0073966398239135745, 0.0076471037864685055, 0.007437248229980469, 0.00738099193572998, 0.00740556812286377, 0.007440447807312012, 0.0074584641456604, 0.0074263358116149905, 0.007433504104614258, 0.007424960136413574, 0.00747708797454834, 0.007441823959350586, 0.007471648216247559, 0.007431551933288574, 0.007453311920166015, 0.00745027208328247, 0.007756256103515625, 0.00747705602645874, 0.007513567924499511, 0.007440832138061524, 0.0074683837890625, 0.007492288112640381, 0.007512191772460938, 0.007458816051483155, 0.007505375862121582, 0.007615007877349853, 0.007315360069274903, 0.007415040016174317, 0.007428864002227783, 0.007435872077941895, 0.0074817600250244145, 0.007426047801971435, 0.007429887771606445, 0.00740169620513916, 0.007442463874816894, 0.007539807796478271, 0.007417984008789062, 0.007439136028289795, 0.007409023761749268, 0.007393407821655274, 0.007364416122436523, 0.007374911785125732, 0.0074117441177368164, 0.007432000160217285, 0.0074202880859375, 0.007436831951141357, 0.007401343822479248, 0.007411712169647216, 0.007417856216430664, 0.007487520217895508, 0.007441760063171387, 0.0074369277954101564, 0.007448351860046386, 0.007407584190368652, 0.007387392044067383, 0.007401472091674805, 0.007441855907440185, 0.007426623821258545, 0.007428095817565918, 0.007431263923645019, 0.007513311862945557, 0.007406655788421631, 0.007678592205047607, 0.007600128173828125, 0.007513887882232666, 0.007694560050964356, 0.007522175788879395, 0.007507264137268066, 0.00761033582687378, 0.007455584049224854, 0.007415359973907471, 0.007388959884643555, 0.007451295852661133, 0.007460864067077637, 0.007479296207427978, 0.007480800151824951, 0.007470623970031738, 0.007416831970214844, 0.007391232013702393, 0.007452672004699707, 0.007497727870941162, 0.007448095798492432, 0.007465439796447754, 0.007469056129455566, 0.007407616138458252, 0.007442431926727295, 0.007532735824584961, 0.007550784111022949, 0.007448768138885498, 0.007382847785949707, 0.00748035192489624, 0.00744543981552124, 0.007409599781036377, 0.007370751857757568, 0.007407616138458252, 0.0074301438331604, 0.007458943843841553, 0.0074852161407470705, 0.007488768100738525, 0.007443295955657959, 0.007393280029296875, 0.007415840148925781, 0.007446335792541504, 0.007454912185668946, 0.007443903923034668, 0.007442848205566406, 0.007511648178100586, 0.0074287037849426266, 0.007440063953399658, 0.007423423767089844, 0.007391327857971191, 0.007450975894927978, 0.0074225602149963375, 0.007393055915832519, 0.00738099193572998, 0.007368351936340332, 0.0074568638801574706, 0.007427616119384766, 0.007444384098052978, 0.007439551830291748, 0.0074460158348083495, 0.007510144233703614, 0.007405663967132568, 0.007434144020080567, 0.007522592067718506, 0.007433951854705811, 0.007462528228759766, 0.008919424057006835, 0.009375519752502441, 0.008029664039611816, 0.007535359859466553, 0.0074992961883544925, 0.00748528003692627, 0.007486239910125732, 0.0074750399589538575, 0.007434400081634521, 0.0074217281341552735, 0.007513311862945557, 0.007480160236358642, 0.007527488231658936, 0.007543200016021728, 0.007480991840362549, 0.007480192184448242, 0.0075920639038085935, 0.007440256118774414, 0.007401472091674805, 0.0074629120826721195, 0.007831552028656007, 0.007510015964508057, 0.0076249918937683105, 0.007566112041473389, 0.007506879806518554, 0.007452640056610107, 0.007468959808349609, 0.007427807807922363, 0.007417952060699463, 0.007442527770996093, 0.007432415962219239, 0.007439839839935303, 0.007483551979064942, 0.007399807929992676, 0.007415808200836181, 0.007389023780822754, 0.007397535800933838, 0.00771398401260376, 0.007424543857574463, 0.007419392108917237, 0.007381792068481445, 0.007391232013702393, 0.007407616138458252, 0.007442048072814941, 0.007442815780639649, 0.007460864067077637, 0.007447711944580078, 0.007435103893280029, 0.007383039951324463, 0.007362559795379638, 0.007517248153686523, 0.007460063934326172, 0.0074237117767333985, 0.007442431926727295, 0.007400544166564942, 0.007368800163269043, 0.007379776000976563, 0.007421887874603271, 0.007423615932464599, 0.007440832138061524, 0.007419551849365234, 0.00740835189819336, 0.0074297599792480466, 0.007420928001403809, 0.007437312126159668, 0.007411712169647216, 0.007753727912902832, 0.007716127872467041, 0.007525087833404541, 0.007485439777374267, 0.0074235520362854, 0.007436736106872559, 0.007527455806732177, 0.007490528106689453, 0.00747276782989502, 0.007473536014556885, 0.0074601278305053715, 0.007395423889160156, 0.007387775897979737, 0.007400864124298095, 0.007414400100708008, 0.007458591938018799, 0.007426239967346191, 0.007427807807922363, 0.007409887790679932, 0.00737286376953125, 0.00744652795791626, 0.007483391761779785, 0.007422143936157226, 0.007585951805114746, 0.007640575885772705, 0.00750438404083252, 0.00743612813949585, 0.007407616138458252, 0.00738099193572998, 0.007406816005706787, 0.007391615867614746, 0.007399648189544678, 0.007426080226898194, 0.007413919925689697, 0.007360191822052002, 0.0073173117637634275, 0.007405600070953369, 0.007395328044891358, 0.007776735782623291, 0.007437664031982422, 0.007567903995513916, 0.007434368133544922, 0.0074670081138610836, 0.007802879810333252, 0.007406847953796387, 0.00744649600982666, 0.007453472137451172, 0.0074301438331604, 0.007444447994232177, 0.007536672115325928, 0.007374847888946533, 0.007376575946807861, 0.007460319995880127, 0.007406271934509277, 0.0074403839111328125, 0.007419871807098389, 0.007452191829681397, 0.007372767925262451, 0.00735097599029541, 0.0074301438331604, 0.007407360076904297, 0.007417247772216797, 0.007450719833374023, 0.007387807846069336, 0.0074011521339416506, 0.007398848056793213, 0.0073769278526306156, 0.0075560321807861324, 0.007428095817565918, 0.007411903858184814, 0.00736627197265625, 0.0073118720054626465, 0.007388768196105957, 0.007389279842376709, 0.007413856029510498, 0.0073827199935913084, 0.007421664237976074, 0.007391007900238037, 0.007389120101928711, 0.007427999973297119, 0.007391808032989502, 0.007438079833984375, 0.007660096168518067, 0.007426047801971435, 0.007370751857757568, 0.007354207992553711, 0.007397600173950195, 0.007415584087371826, 0.007527455806732177, 0.007455711841583252, 0.0074481601715087895, 0.007407519817352295, 0.0073751678466796875, 0.007334080219268799, 0.007368288040161133, 0.007378528118133545, 0.007381984233856201, 0.00741974401473999, 0.007387423992156982, 0.007290080070495606, 0.00737334394454956, 0.00769209623336792, 0.00739958381652832, 0.007368703842163086, 0.007409664154052734, 0.007308928012847901, 0.007321983814239502, 0.007374335765838623, 0.007381504058837891, 0.007369984149932862, 0.007940864086151123, 0.007437952041625976, 0.007358880043029785, 0.007376863956451416, 0.007382847785949707, 0.00743228816986084, 0.007380256175994873, 0.007414591789245605, 0.007385024070739746, 0.0074263038635253905, 0.007372608184814453, 0.007368703842163086, 0.007439616203308105, 0.007448895931243897, 0.007502272129058838, 0.007419072151184082, 0.007472991943359375, 0.0074122557640075686, 0.007456416130065918, 0.007459424018859863, 0.007434271812438965, 0.007768159866333008, 0.007684160232543946, 0.007593503952026367, 0.007549407958984375, 0.007499872207641601, 0.007534143924713135, 0.007540863990783691, 0.007573215961456299, 0.007495520114898682, 0.0075000319480896, 0.007479712009429932, 0.007510015964508057, 0.007399456024169922, 0.007374815940856934, 0.007408991813659668, 0.007397215843200683, 0.007425983905792236, 0.00738047981262207, 0.007551487922668457, 0.007374847888946533, 0.007343552112579346, 0.007354015827178955, 0.007488416194915771, 0.007394591808319092, 0.007375296115875244, 0.007397247791290283, 0.007342495918273926, 0.007376383781433105, 0.007406079769134521, 0.007397247791290283, 0.007417503833770752, 0.007436768054962158, 0.007397408008575439, 0.007335360050201416, 0.007328288078308105, 0.007387487888336181, 0.0076836800575256345, 0.007445792198181153, 0.00743609619140625, 0.007431136131286621, 0.0073842878341674805, 0.007342879772186279, 0.007415808200836181, 0.007376128196716308, 0.007487711906433106, 0.00743887996673584, 0.007378943920135498, 0.007329792022705078, 0.007319551944732666, 0.007378592014312744, 0.007395679950714111, 0.007423615932464599, 0.007360896110534668, 0.007347936153411865, 0.007358304023742676, 0.007383488178253174, 0.007364607810974121, 0.007419904232025146, 0.007387135982513428, 0.007358335971832276, 0.007286911964416504, 0.007387135982513428, 0.007447999954223633, 0.007424255847930908, 0.007395135879516602, 0.007380671977996826, 0.007723328113555908, 0.007463263988494873, 0.007483551979064942, 0.007490943908691406, 0.007450496196746826, 0.0074531202316284175, 0.007446847915649414, 0.007388288021087646, 0.00737779188156128, 0.007606272220611572, 0.0076943359375, 0.007422175884246826, 0.007450399875640869, 0.007413760185241699, 0.007372960090637207, 0.0077940158843994144, 0.007420576095581054, 0.007388319969177246, 0.007392096042633057, 0.007423999786376953, 0.007378655910491944, 0.0074932479858398435, 0.007708928108215332, 0.008310527801513671, 0.008026783943176269, 0.007952415943145752, 0.007511168003082275, 0.007591904163360596, 0.007392288208007813, 0.007366655826568603, 0.007411392211914063, 0.0074544320106506344, 0.007393343925476074, 0.00742416000366211, 0.007653600215911866, 0.007419392108917237, 0.0073671360015869145, 0.007442080020904541, 0.007440767765045166, 0.007423679828643798, 0.007395648002624512, 0.007377920150756836, 0.007325695991516113, 0.00733900785446167, 0.007447648048400879, 0.007426271915435791, 0.007381696224212647, 0.007378496170043945, 0.0073482880592346195, 0.0073381118774414065, 0.007358719825744629, 0.007440224170684815, 0.007422111988067627, 0.008077312469482421, 0.007475200176239013, 0.0074035201072692874, 0.0073424320220947265, 0.007351583957672119, 0.007418240070343017, 0.007383264064788818, 0.007390463829040527, 0.0073794879913330075, 0.007421440124511719, 0.007358975887298584, 0.007361599922180176, 0.007543680191040039, 0.00767955207824707, 0.00768665599822998, 0.0075304961204528805, 0.0074563522338867184, 0.007442431926727295, 0.007427775859832763, 0.0073796801567077635, 0.007462207794189453, 0.007448863983154297, 0.00746127986907959, 0.007429696083068847, 0.007374752044677735, 0.007442368030548096, 0.007401408195495606, 0.007373760223388672, 0.007421919822692871, 0.007397408008575439, 0.00738918399810791, 0.007427680015563965, 0.007356832027435303, 0.007309567928314209, 0.007367616176605225, 0.007445312023162841, 0.007370751857757568, 0.007423647880554199, 0.00739686393737793, 0.007344992160797119, 0.007358560085296631, 0.0074198079109191895, 0.0074301438331604, 0.007413760185241699, 0.007540736198425293, 0.0074035201072692874, 0.007390336036682129, 0.007358496189117432, 0.007430592060089111, 0.007410079956054688, 0.007412864208221436, 0.007410560131072998, 0.007407616138458252, 0.007406720161437988, 0.007391520023345947, 0.007400383949279785, 0.007418591976165772, 0.007455679893493652, 0.007460864067077637, 0.0073400321006774905, 0.007350143909454346, 0.007530623912811279, 0.007385087966918945, 0.007391488075256348, 0.0074254398345947265, 0.007417600154876709, 0.007342688083648682, 0.0074629120826721195, 0.007442431926727295, 0.0074301438331604, 0.007433311939239502, 0.0074208321571350095, 0.007413536071777344, 0.00737718391418457, 0.007407551765441895, 0.007442431926727295, 0.007521279811859131, 0.007668928146362304, 0.007501023769378662, 0.007489920139312744, 0.007422175884246826, 0.00738918399810791, 0.0073619518280029295, 0.007420032024383545, 0.007465312004089356, 0.007407519817352295, 0.007407839775085449, 0.007373792171478271, 0.007370463848114014, 0.0073883838653564455, 0.007637824058532715, 0.007456768035888672, 0.007451968193054199, 0.00745472002029419, 0.0074414081573486324, 0.007372576236724854, 0.007309279918670654, 0.007415743827819824, 0.007435840129852295, 0.007417856216430664, 0.0073855361938476565, 0.007383039951324463, 0.007337120056152344, 0.00753872013092041, 0.007477119922637939, 0.007472064018249512, 0.00745472002029419, 0.007441792011260986, 0.007864960193634033, 0.00751529598236084, 0.007526527881622314, 0.008338144302368164, 0.007725056171417236, 0.007474431991577149, 0.007439104080200196, 0.007466271877288818, 0.007449312210083008, 0.007481344223022461, 0.0074503359794616695, 0.0074570560455322265, 0.0074275522232055665, 0.007432735919952392, 0.007501823902130127, 0.00740556812286377, 0.007450623989105225, 0.007444543838500977, 0.007413119792938233, 0.007363135814666748, 0.007387135982513428, 0.007434239864349365, 0.007563551902770996, 0.007470816135406494, 0.007419904232025146, 0.007391232013702393, 0.007383039951324463, 0.007472447872161865, 0.007438047885894776, 0.007432608127593994, 0.007492479801177979, 0.007435967922210693, 0.007408927917480468, 0.007396063804626465, 0.0073842878341674805, 0.007424799919128418, 0.007815167903900147, 0.007796735763549805, 0.007600128173828125, 0.007546879768371582, 0.007561215877532959, 0.0074670081138610836, 0.00742195177078247, 0.00738262414932251, 0.007324128150939941, 0.007419839859008789, 0.007368703842163086, 0.007436287879943848, 0.007368703842163086, 0.00738108777999878, 0.008138655662536621, 0.008627679824829102, 0.007445024013519287, 0.00740556812286377, 0.00739737606048584, 0.007443456172943115, 0.007414783954620361, 0.0074035201072692874, 0.007380256175994873, 0.00740825605392456, 0.007333759784698486, 0.007383296012878418, 0.0073862080574035646, 0.007526912212371826, 0.007465343952178955, 0.007472832202911377, 0.007377120018005371, 0.007395008087158203, 0.007360447883605957, 0.007432960033416748, 0.007436351776123047, 0.007419551849365234, 0.007577600002288819, 0.0074301438331604, 0.0074113597869873045, 0.007399775981903076, 0.007409664154052734, 0.007395328044891358, 0.007401792049407959, 0.0074011521339416506, 0.007358463764190673, 0.00739081621170044, 0.007375264167785644, 0.007415808200836181, 0.00740499210357666, 0.00742790412902832, 0.00745907211303711, 0.007436607837677002, 0.0073975038528442385, 0.007505824089050293, 0.007488800048828125, 0.007597152233123779, 0.0076203842163085934, 0.007491583824157715, 0.007473152160644531, 0.007438208103179932, 0.0075285758972167965, 0.0075304961204528805, 0.007903232097625732, 0.00749289608001709, 0.007549920082092285, 0.007505663871765137, 0.00744217586517334, 0.00740172815322876, 0.00739686393737793, 0.007305471897125244, 0.007360320091247558, 0.007437856197357178, 0.007431136131286621, 0.007433504104614258, 0.0074700479507446285, 0.0074135041236877445, 0.007395328044891358, 0.008044544219970704, 0.007440159797668457, 0.007446559906005859, 0.00750816011428833, 0.00744652795791626, 0.00745472002029419, 0.0074702720642089845, 0.007406400203704834, 0.00738646411895752, 0.007617184162139893, 0.007479296207427978, 0.007491583824157715, 0.0075304961204528805, 0.007475200176239013, 0.007409023761749268, 0.007414400100708008, 0.007385087966918945, 0.007485439777374267, 0.007526400089263916, 0.0074338879585266115, 0.007626431941986084, 0.007477536201477051, 0.007487872123718262, 0.007419904232025146, 0.007427680015563965, 0.0074203200340271, 0.0073994240760803225, 0.007412064075469971, 0.007441376209259033, 0.007369408130645752, 0.007415167808532715, 0.007383679866790772, 0.007423999786376953, 0.007428095817565918, 0.007405600070953369, 0.007391200065612793, 0.007455935955047607, 0.0074225602149963375, 0.007565248012542725, 0.0074336638450622555, 0.007439199924468994, 0.007491583824157715, 0.007419904232025146, 0.007391232013702393, 0.007406943798065185, 0.007446944236755371, 0.007446752071380615, 0.0074486079216003415, 0.007439487934112548, 0.0074720001220703125, 0.007372479915618896, 0.007389503955841065, 0.0074403839111328125, 0.0074711360931396485, 0.007798367977142334, 0.007540287971496582, 0.007563712120056153, 0.007497727870941162, 0.007479296207427978, 0.007411231994628906, 0.007533023834228516, 0.00742195177078247, 0.007458816051483155, 0.007550655841827392, 0.007440703868865967, 0.007460031986236572, 0.007408671855926514, 0.007374623775482178, 0.007401408195495606, 0.0074169921875, 0.007418496131896973, 0.0074304318428039555, 0.007526656150817871, 0.007386879920959473, 0.007339583873748779, 0.007367104053497314, 0.007415520191192627, 0.007423871994018554, 0.007408031940460205, 0.00745030403137207, 0.007413919925689697, 0.007388864040374756, 0.007479775905609131, 0.007452672004699707, 0.007448575973510742, 0.007458367824554443, 0.007560736179351807, 0.007478176116943359, 0.007426047801971435, 0.0074217281341552735, 0.00746723222732544, 0.007437952041625976, 0.0076282558441162105, 0.007475776195526123, 0.007463263988494873, 0.007477375984191894, 0.00736243200302124, 0.007403295993804931, 0.007423327922821045, 0.007402368068695068, 0.007419904232025146, 0.007438240051269532, 0.007412064075469971, 0.007378464221954346, 0.0075155520439147945, 0.007463871955871582, 0.007443456172943115, 0.007430751800537109, 0.007419616222381592, 0.007465536117553711, 0.007428095817565918, 0.007413760185241699, 0.007446656227111817, 0.0076102399826049804, 0.007510015964508057, 0.0076574721336364745, 0.007514111995697022, 0.007511392116546631, 0.007523071765899658, 0.007569407939910889, 0.007491583824157715, 0.00743833589553833, 0.007493631839752197, 0.0074403839111328125, 0.007436287879943848, 0.007428095817565918, 0.007425568103790283, 0.007390687942504883, 0.007561215877532959, 0.00744755220413208, 0.007400703907012939, 0.00744649600982666, 0.007418144226074219, 0.007456607818603515, 0.007346848011016846, 0.007398655891418457, 0.007400383949279785, 0.007483104228973389, 0.0074170241355896, 0.0074085121154785155, 0.0073825597763061525, 0.0074611520767211914, 0.007413983821868897, 0.007434239864349365, 0.007411712169647216, 0.007398719787597656, 0.007439040184020996, 0.007444447994232177, 0.007393311977386474, 0.007379199981689453, 0.007575295925140381, 0.007439871788024902, 0.0074882559776306154, 0.0074237117767333985, 0.007401855945587158, 0.007413407802581787, 0.007401472091674805, 0.0074997758865356446, 0.007466047763824463, 0.007564223766326904, 0.007479296207427978, 0.007432191848754883, 0.007407264232635498, 0.0073885760307312014, 0.007574463844299316, 0.0074479360580444335, 0.007455359935760498, 0.007464799880981445, 0.007559328079223633, 0.0074405760765075685, 0.007398655891418457, 0.0074143362045288085, 0.00740880012512207, 0.0074577279090881346, 0.007437376022338868, 0.007437151908874512, 0.007385087966918945, 0.00743552017211914, 0.007395296096801758, 0.007406623840332032, 0.0074319357872009275, 0.00738486385345459, 0.007407360076904297, 0.007766496181488037, 0.0076943359375, 0.00752569580078125, 0.007405663967132568, 0.007405824184417724, 0.007588160037994385, 0.007437439918518066, 0.007469984054565429, 0.0074237117767333985, 0.007376448154449463, 0.007404255867004395, 0.0073690562248229985, 0.007399072170257568, 0.007444575786590576, 0.007448480129241943, 0.007468704223632812, 0.007481152057647705, 0.00737337589263916, 0.007390655994415283, 0.007387680053710937, 0.0073990077972412105, 0.007410079956054688, 0.00742195177078247, 0.007417856216430664, 0.007344128131866455, 0.00739247989654541, 0.007396128177642822, 0.007407616138458252, 0.007449600219726562, 0.007411903858184814, 0.007356959819793701, 0.007446815967559814, 0.007407616138458252, 0.007407616138458252, 0.007437632083892822, 0.007407680034637452, 0.007431871891021728, 0.0074559998512268065, 0.00749241590499878, 0.0074126081466674805, 0.007424191951751709, 0.007415616035461426, 0.00746073579788208, 0.00737395191192627, 0.007387872219085693, 0.007395167827606201, 0.007418303966522217, 0.00740067195892334, 0.00740835189819336, 0.007394368171691894, 0.007358719825744629, 0.007348991870880127, 0.007358367919921875, 0.007444575786590576, 0.00742195177078247, 0.00742412805557251, 0.007561183929443359, 0.007392831802368164, 0.0074915518760681156, 0.007392928123474121, 0.007399648189544678, 0.00737065601348877, 0.007585311889648437, 0.007486015796661377, 0.007456831932067871, 0.007435840129852295, 0.007395423889160156, 0.007447135925292969, 0.007476736068725586, 0.007402847766876221, 0.007405824184417724, 0.007419519901275635, 0.007384031772613525, 0.007343520164489746, 0.007360127925872803, 0.007427040100097656, 0.007428319931030274, 0.007886623859405518, 0.0074668159484863285, 0.0074544639587402345, 0.0074592318534851074, 0.007425151824951172, 0.007443359851837158, 0.007409664154052734, 0.00738918399810791, 0.007421088218688965, 0.007418303966522217, 0.007317920207977295, 0.007359712123870849, 0.007398176193237305, 0.007395328044891358, 0.0074297599792480466, 0.007418240070343017, 0.007505919933319092, 0.007425407886505127, 0.007376704216003418, 0.007424831867218018, 0.007641088008880615, 0.007438623905181884, 0.0074503359794616695, 0.007475391864776611, 0.007395103931427002, 0.0073554558753967285, 0.007379936218261719, 0.007421599864959717, 0.007389408111572265, 0.0074154877662658695, 0.007376319885253906, 0.007459839820861816, 0.0075071358680725096, 0.007457087993621827, 0.007444128036499024, 0.007396192073822022, 0.007419360160827637, 0.007414271831512451, 0.007413792133331299, 0.007429471969604492, 0.0074349122047424315, 0.007423999786376953, 0.0074301438331604, 0.007482528209686279, 0.007555935859680176, 0.00745472002029419, 0.007386528015136719, 0.007298751831054688, 0.0074652800559997555, 0.0074193282127380375, 0.0074471039772033695, 0.007477248191833496, 0.007444223880767822, 0.007436543941497803, 0.007573503971099854, 0.007650911808013916, 0.0076763200759887695, 0.007480639934539795, 0.0076622719764709475, 0.007489568233489991, 0.007480319976806641, 0.0074572482109069825, 0.007378687858581543, 0.007402239799499511, 0.007432447910308838, 0.00743398380279541, 0.007485439777374267, 0.0074403839111328125, 0.007407360076904297, 0.007409920215606689, 0.007356416225433349, 0.0073744959831237794, 0.007479648113250733, 0.007458816051483155, 0.00740556812286377, 0.007411712169647216, 0.007346496105194091, 0.007355743885040283, 0.007385183811187744, 0.007421567916870118, 0.007430784225463867, 0.007426047801971435, 0.007415103912353516, 0.00738483190536499, 0.007377855777740479, 0.007409664154052734, 0.007395328044891358, 0.007437664031982422, 0.007402143955230713, 0.007478816032409668, 0.0074572482109069825, 0.007405151844024658, 0.0074551358222961425, 0.0074301438331604, 0.007538591861724854, 0.007511616230010986, 0.007451168060302734, 0.007403456211090088, 0.007400896072387695, 0.007399487972259522, 0.0074654722213745115, 0.007441855907440185, 0.007434879779815674, 0.007417856216430664, 0.0074059200286865235, 0.007390143871307373, 0.007428800106048584, 0.0074108800888061524, 0.0074422721862792965, 0.007484384059906006, 0.007384768009185791, 0.00740556812286377, 0.007364607810974121, 0.007553311824798584, 0.007501535892486573, 0.007477248191833496, 0.007483200073242187, 0.007476672172546387, 0.007478015899658203, 0.00744652795791626, 0.007552927970886231, 0.007425183773040771, 0.007475776195526123, 0.007471487998962402, 0.007505472183227539, 0.007452864170074463, 0.007397823810577393, 0.0073807997703552244, 0.007491583824157715, 0.007438496112823486, 0.007454815864562988, 0.007425087928771973, 0.007422656059265137, 0.0074217281341552735, 0.007438208103179932, 0.0073753600120544435, 0.0074074559211730956, 0.007362368106842041, 0.0074180479049682614, 0.0074033279418945315, 0.007390399932861328, 0.007400415897369385, 0.007370783805847168, 0.007423391819000244, 0.007436895847320556, 0.007414944171905517, 0.007420767784118652, 0.007516160011291504, 0.0074035201072692874, 0.007380415916442871, 0.007457344055175781, 0.007866112232208252, 0.007475456237792969, 0.007447999954223633, 0.007459392070770263, 0.007374080181121826, 0.007372896194458008, 0.007385695934295654, 0.007450816154479981, 0.0074626879692077635, 0.00753059196472168, 0.007440320014953613, 0.007413824081420898, 0.0073722882270812985, 0.007404032230377197, 0.007432191848754883, 0.007458816051483155, 0.0074414401054382325, 0.007435232162475586, 0.007406688213348388, 0.0073693118095397945, 0.007425504207611084, 0.007719776153564453, 0.007370719909667969, 0.0073920321464538575, 0.007440032005310059, 0.007444479942321777, 0.007384575843811035, 0.007323647975921631, 0.007406079769134521, 0.007419904232025146, 0.007425536155700683, 0.007435935974121094, 0.007397280216217041, 0.007416768074035645, 0.007402624130249023, 0.0078017921447753906, 0.007726367950439453, 0.007510111808776855, 0.007517983913421631, 0.0075541439056396485, 0.00749126386642456, 0.007434239864349365, 0.00739737606048584, 0.007385087966918945, 0.007434239864349365, 0.007497727870941162, 0.007429279804229736, 0.007428959846496582, 0.007427999973297119, 0.007390528202056885, 0.007346816062927246, 0.0074057278633117675, 0.007405151844024658, 0.007436384201049804, 0.007452991962432862, 0.007391232013702393, 0.007507967948913574, 0.0074301438331604, 0.0074301438331604, 0.00763862419128418, 0.007478847980499268, 0.007473311901092529, 0.007442527770996093, 0.007415775775909424, 0.007389120101928711, 0.007461567878723144, 0.007417856216430664, 0.007423327922821045, 0.0074349122047424315, 0.00741103982925415, 0.007395999908447266, 0.00737721586227417, 0.007433919906616211, 0.007442463874816894, 0.0074254398345947265, 0.0074834561347961424, 0.007457024097442627, 0.007503712177276611, 0.007458752155303955, 0.007427807807922363, 0.007461311817169189, 0.007455039978027344, 0.0074824318885803225, 0.007519167900085449, 0.007429471969604492, 0.007389535903930664, 0.007477248191833496, 0.0074141759872436526, 0.007448575973510742, 0.007409952163696289, 0.007431903839111328, 0.007427519798278809, 0.007442527770996093, 0.007365087985992432, 0.0073702077865600586, 0.007425759792327881, 0.007432064056396484, 0.007401855945587158, 0.007418432235717773, 0.007424032211303711, 0.00736681604385376, 0.007321407794952392, 0.007401023864746094, 0.007405663967132568, 0.007436575889587402, 0.007403584003448487, 0.007368703842163086, 0.00739302396774292, 0.007388480186462402, 0.007915743827819824, 0.007415616035461426, 0.0074126400947570805, 0.007550816059112549, 0.007433728218078613, 0.007363232135772705, 0.007391232013702393, 0.007391232013702393, 0.007413760185241699, 0.0073935680389404295, 0.0074563841819763185, 0.007415904045104981, 0.00738918399810791, 0.00734611177444458, 0.0074271678924560545, 0.007408607959747315, 0.00742195177078247, 0.007436287879943848, 0.007361792087554932, 0.007377247810363769, 0.0073703999519348145, 0.007486207962036133, 0.0073985280990600585, 0.007390207767486572, 0.0073788161277770995, 0.007368703842163086, 0.007464831829071045, 0.0074460158348083495, 0.0074154877662658695, 0.007406527996063232, 0.0073994240760803225, 0.007427807807922363, 0.007379231929779052, 0.007372799873352051, 0.007428095817565918, 0.007441792011260986, 0.007406208038330078, 0.007455904006958008, 0.007374720096588135, 0.0074290881156921385, 0.007436287879943848, 0.007396416187286377, 0.007447487831115722, 0.007448768138885498, 0.007438144207000732, 0.007436287879943848, 0.007382656097412109, 0.007389567852020264, 0.007370751857757568, 0.007396383762359619, 0.007451615810394287, 0.007420928001403809, 0.007565695762634278, 0.007374591827392578, 0.007317696094512939, 0.007388959884643555, 0.007411808013916016, 0.007525184154510498, 0.007819263935089112, 0.007655168056488037, 0.007550687789916992, 0.007457312107086182, 0.007350272178649903, 0.007395328044891358, 0.007411392211914063, 0.007461184024810791, 0.007456768035888672, 0.007456768035888672, 0.007401472091674805, 0.007415808200836181, 0.007362559795379638, 0.007639039993286132, 0.007485439777374267, 0.007468639850616455, 0.007450047969818115, 0.007506912231445313, 0.007417119979858398, 0.007436160087585449, 0.007389344215393066, 0.0074124159812927245, 0.007407904148101807, 0.007435999870300293, 0.0074035201072692874, 0.007391488075256348, 0.007367487907409668, 0.007447648048400879, 0.007462080001831055, 0.007447199821472168, 0.007439455986022949, 0.007427072048187256, 0.007501120090484619, 0.007405856132507324, 0.00743996810913086, 0.0074124479293823245, 0.007433919906616211, 0.0074693760871887204, 0.007474559783935547, 0.007369344234466553, 0.007403071880340576, 0.0074514241218566896, 0.007447711944580078, 0.007429855823516846]",tokens/s,134.2291866523748,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1234, in __init__ self.transformer = DbrxModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1009, in __init__ self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1009, in self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 784, in __init__ self.norm_attn_norm = DbrxNormAttentionNorm( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 596, in __init__ self.attn = DBRX_ATTENTION_CLASSES[config._attn_implementation]( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 288, in __init__ self.Wqkv = nn.Linear( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 190.12 MiB is free. Process 97205 has 14.55 GiB memory in use. Of the allocated memory 14.44 GiB is allocated by PyTorch, and 1.55 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,784.490496,1835.99104,0.0,1440.743424,1427.743744,s,1,7.05362646484375,7.05362646484375,0.0,7.05362646484375,7.05362646484375,7.05362646484375,7.05362646484375,[7.05362646484375],,kWh,3.1605274458532523e-06,3.4151649806889433e-07,1.0275008220025916e-06,4.529544765924738e-06,,MB,1122.238464,1905.197056,0.0,1499.46368,1436.386304,s,12,0.5033521575927734,0.041946013132731115,0.002437993024933074,0.041618270874023436,0.042279608917236325,0.04542972679138183,0.048501982345581056,"[0.04927004623413086, 0.0406033935546875, 0.04117107009887695, 0.041630622863769534, 0.04228764724731445, 0.04175177764892578, 0.0382490234375, 0.041605918884277344, 0.04119152069091797, 0.042180385589599606, 0.04120348739624023, 0.0422072639465332]",tokens/s,6103.083007911408,kWh,1.6151644267725357e-06,1.7801160114274064e-07,1.0779382472486129e-06,2.871114275163889e-06,tokens/kWh,89163988.425848,MB,1134.297088,1947.140096,0.0,1541.40672,1461.974016,s,12,10.205782958984374,0.8504819132486979,0.004450718362695324,0.8499222717285156,0.8550165710449218,0.8571534606933594,0.8591819616699219,"[0.8453379516601562, 0.8471861572265625, 0.84700537109375, 0.8516913452148438, 0.855078857421875, 0.8440980834960937, 0.8474777221679688, 0.8596890869140625, 0.8511321411132813, 0.8539178466796875, 0.8544559936523437, 0.84871240234375]",tokens/s,74.07564936842759,kWh,2.4443933527395122e-05,2.6958722395343424e-06,1.371781433008491e-05,4.0857620097014364e-05,tokens/kWh,1541940.031025049,,s,756,10.199776342391967,0.013491767648666624,0.0003095512837554598,0.013427152156829835,0.013713024139404298,0.013854999780654909,0.014646281766891499,"[0.01340822410583496, 0.013647263526916503, 0.013458016395568848, 0.01344332790374756, 0.013385472297668457, 0.013342720031738281, 0.013332480430603028, 0.014006624221801758, 0.013272831916809082, 0.013309856414794922, 0.013288928031921386, 0.013208095550537109, 0.013518560409545898, 0.013271231651306153, 0.013410400390625, 0.013361215591430664, 0.013408479690551757, 0.01322979164123535, 0.013428735733032226, 0.013270591735839844, 0.013275584220886231, 0.013336031913757325, 0.01352348804473877, 0.013321375846862793, 0.013333344459533691, 0.013344127655029297, 0.013388416290283204, 0.01339577579498291, 0.013289376258850098, 0.013285663604736328, 0.01333078384399414, 0.01328444766998291, 0.01333420753479004, 0.013455391883850098, 0.013326592445373535, 0.01332694435119629, 0.013494272232055664, 0.013565664291381835, 0.013385120391845704, 0.013366144180297851, 0.013340703964233399, 0.013662176132202148, 0.01349619197845459, 0.013391231536865235, 0.013453920364379883, 0.013270719528198242, 0.014025343894958495, 0.014036255836486817, 0.013308608055114746, 0.013387455940246581, 0.01335324764251709, 0.01330515193939209, 0.013455039978027343, 0.013410816192626953, 0.013345439910888672, 0.013300895690917969, 0.013264479637145997, 0.01327945613861084, 0.013420639991760254, 0.013355263710021973, 0.013418944358825683, 0.013468671798706054, 0.01372809600830078, 0.01319696044921875, 0.013486720085144044, 0.013742143630981446, 0.013416128158569336, 0.013461471557617188, 0.013457375526428223, 0.013316415786743164, 0.013350879669189453, 0.01339395236968994, 0.016293600082397462, 0.014012319564819336, 0.013524864196777344, 0.013332127571105957, 0.013531040191650391, 0.013469951629638672, 0.013308735847473144, 0.01332038402557373, 0.013338080406188965, 0.013319519996643067, 0.013375840187072753, 0.013427328109741211, 0.013338527679443359, 0.013627391815185547, 0.0134269437789917, 0.013291359901428223, 0.013361056327819825, 0.013373056411743163, 0.013236288070678712, 0.0133220796585083, 0.013337408065795898, 0.013377183914184571, 0.013356191635131836, 0.013342816352844239, 0.013502304077148437, 0.013654175758361817, 0.013277952194213866, 0.013316448211669923, 0.013428383827209473, 0.013324480056762695, 0.013279040336608887, 0.013289471626281739, 0.013314047813415527, 0.013381855964660645, 0.013432607650756836, 0.013284768104553223, 0.013314656257629395, 0.013371135711669922, 0.013319647789001465, 0.013308256149291993, 0.013320575714111329, 0.013322208404541016, 0.013276960372924805, 0.013396415710449219, 0.013391103744506836, 0.013347455978393555, 0.013483807563781739, 0.013443296432495117, 0.013432479858398438, 0.013350496292114258, 0.013302528381347656, 0.013484383583068848, 0.01351030445098877, 0.013400159835815429, 0.01305628776550293, 0.013363200187683106, 0.013541407585144043, 0.01337936019897461, 0.013598912239074707, 0.013531423568725586, 0.013371104240417481, 0.013404159545898438, 0.013520895957946777, 0.013326335906982421, 0.013740032196044923, 0.013465023994445801, 0.013394495964050294, 0.013385472297668457, 0.01391641616821289, 0.013385727882385253, 0.013502464294433594, 0.013448543548583984, 0.013494688034057617, 0.013322400093078614, 0.01336508846282959, 0.01334502410888672, 0.013443072319030762, 0.013334527969360351, 0.013436927795410156, 0.013346816062927246, 0.013340543746948243, 0.013359392166137695, 0.013380479812622071, 0.013283712387084961, 0.01342131233215332, 0.01337123203277588, 0.013348863601684571, 0.013537280082702637, 0.013381695747375489, 0.01333891201019287, 0.013424287796020508, 0.013277376174926758, 0.013459263801574707, 0.01361315155029297, 0.013475104331970215, 0.013499072074890137, 0.013500288009643554, 0.013486080169677735, 0.013534496307373047, 0.013417247772216798, 0.013428288459777832, 0.013474464416503906, 0.013387776374816895, 0.013714495658874512, 0.013518912315368652, 0.013438719749450684, 0.01352079963684082, 0.013454336166381836, 0.01346560001373291, 0.01348198413848877, 0.01336729621887207, 0.013351072311401367, 0.013444255828857422, 0.013365728378295898, 0.013443296432495117, 0.01343283176422119, 0.013365023612976075, 0.013445152282714843, 0.013670368194580078, 0.01389363193511963, 0.013668000221252441, 0.013844415664672852, 0.013506655693054198, 0.013579808235168456, 0.01340681552886963, 0.013412544250488281, 0.013544544219970703, 0.01354640007019043, 0.013340671539306641, 0.013625344276428223, 0.013496128082275391, 0.013654208183288575, 0.013518143653869628, 0.013585087776184081, 0.01368883228302002, 0.013862175941467285, 0.013517536163330079, 0.013381983757019043, 0.013548928260803223, 0.013458047866821289, 0.013364288330078125, 0.013541983604431153, 0.013402112007141113, 0.013481344223022461, 0.013352704048156739, 0.013351743698120117, 0.013287487983703614, 0.013450240135192871, 0.013369791984558106, 0.01331884765625, 0.013378623962402345, 0.013407039642333984, 0.013369471549987792, 0.013414239883422852, 0.013477055549621583, 0.01341648006439209, 0.013447999954223633, 0.013365344047546386, 0.013301952362060547, 0.013393183708190919, 0.013314496040344239, 0.013352095603942871, 0.013357919692993164, 0.013282655715942383, 0.013346752166748048, 0.013581024169921876, 0.013391039848327636, 0.013487008094787598, 0.01352079963684082, 0.013613056182861329, 0.013782143592834473, 0.013744000434875489, 0.0137739839553833, 0.01427228832244873, 0.01369491195678711, 0.01354150390625, 0.0134717435836792, 0.013450592041015626, 0.013504223823547363, 0.013583295822143555, 0.01328774356842041, 0.013508607864379883, 0.013430111885070801, 0.013505184173583984, 0.013428447723388671, 0.013557279586791992, 0.013537887573242188, 0.013537535667419434, 0.013779935836791992, 0.01362831974029541, 0.01411689567565918, 0.013744256019592286, 0.013943872451782226, 0.01387609577178955, 0.013835455894470215, 0.013697792053222656, 0.013537599563598632, 0.013730496406555175, 0.01357852840423584, 0.013553407669067383, 0.01427295970916748, 0.013732383728027344, 0.013717503547668456, 0.013940799713134765, 0.014055328369140625, 0.013815839767456055, 0.014133248329162598, 0.013613056182861329, 0.013536704063415528, 0.013923935890197754, 0.01353212833404541, 0.0134717435836792, 0.01345529556274414, 0.013439040184020996, 0.013359135627746582, 0.013436896324157715, 0.013484064102172852, 0.013417856216430664, 0.013386336326599121, 0.013424223899841308, 0.013386143684387206, 0.01343283176422119, 0.013434687614440917, 0.013345184326171875, 0.01337116813659668, 0.013613056182861329, 0.013378687858581543, 0.013348896026611328, 0.013351743698120117, 0.013328415870666504, 0.013459456443786622, 0.013330431938171386, 0.013371392250061035, 0.013540736198425293, 0.013556351661682129, 0.013577471733093262, 0.013585503578186036, 0.013409279823303222, 0.01340073585510254, 0.013334624290466309, 0.01334876823425293, 0.013356672286987305, 0.013375871658325196, 0.013189120292663574, 0.01337929630279541, 0.013275424003601074, 0.013559679985046386, 0.013383808135986329, 0.013305855751037597, 0.013293408393859863, 0.013280768394470215, 0.013607487678527833, 0.013598464012145995, 0.013479488372802734, 0.013421183586120605, 0.013342944145202637, 0.013311936378479003, 0.013330431938171386, 0.013222975730895996, 0.01327785587310791, 0.013295904159545898, 0.013545503616333008, 0.013604576110839844, 0.013579520225524902, 0.013306879997253418, 0.013303263664245606, 0.013306400299072266, 0.014053279876708985, 0.01340544033050537, 0.013426976203918457, 0.013216320037841798, 0.013614527702331543, 0.013408672332763672, 0.013406368255615234, 0.013430784225463867, 0.013444543838500977, 0.013392448425292969, 0.013703328132629394, 0.013373279571533203, 0.013720735549926758, 0.01353164768218994, 0.013404671669006347, 0.013342559814453125, 0.013473024368286133, 0.013214495658874512, 0.01333347225189209, 0.013247488021850586, 0.01329315185546875, 0.01334928035736084, 0.013363455772399902, 0.013242303848266601, 0.013319168090820312, 0.013223936080932617, 0.013257599830627442, 0.013401439666748047, 0.013256608009338379, 0.013238975524902344, 0.013502816200256347, 0.0133024320602417, 0.013310144424438477, 0.013302304267883302, 0.013311615943908691, 0.013327008247375488, 0.013455360412597657, 0.013381631851196289, 0.013362591743469238, 0.01315881633758545, 0.013442655563354493, 0.013373855590820313, 0.01336025619506836, 0.013410464286804199, 0.013302080154418945, 0.013273664474487305, 0.013451104164123534, 0.01343446445465088, 0.013415936470031739, 0.013366175651550292, 0.013602815628051757, 0.013381119728088378, 0.013449983596801758, 0.013314047813415527, 0.013498111724853516, 0.013358847618103027, 0.013291040420532226, 0.013425375938415527, 0.013330207824707032, 0.013285056114196777, 0.013379936218261718, 0.013373439788818359, 0.013357248306274414, 0.013774847984313965, 0.013658016204833985, 0.01364521598815918, 0.013893919944763184, 0.013737631797790526, 0.013673215866088868, 0.013637632369995116, 0.013557600021362306, 0.013457695960998535, 0.013634783744812012, 0.013416192054748536, 0.013355936050415039, 0.013399040222167969, 0.01359769630432129, 0.013463071823120117, 0.013420096397399902, 0.013398943901062011, 0.013385727882385253, 0.01359017562866211, 0.013733375549316406, 0.01360588836669922, 0.0133855037689209, 0.013579520225524902, 0.013425472259521485, 0.01345695972442627, 0.013344863891601562, 0.01333091163635254, 0.013332351684570313, 0.013292896270751952, 0.013351584434509277, 0.013404159545898438, 0.013340031623840332, 0.013387968063354492, 0.013299455642700195, 0.013388480186462402, 0.013527039527893067, 0.013305024147033691, 0.013390496253967285, 0.013375264167785644, 0.013187935829162598, 0.013324288368225098, 0.013229920387268066, 0.013330592155456542, 0.01332953643798828, 0.01343564796447754, 0.013451680183410645, 0.013403871536254882, 0.0133471040725708, 0.013383392333984376, 0.013256064414978027, 0.013317055702209472, 0.013346495628356934, 0.013234272003173828, 0.013270751953125, 0.013400128364562988, 0.013412287712097168, 0.013514687538146972, 0.013644351959228515, 0.013603584289550781, 0.013654975891113282, 0.013507967948913574, 0.013492287635803223, 0.013406304359436036, 0.013365728378295898, 0.01333897590637207, 0.013368960380554198, 0.013288800239562987, 0.013314463615417481, 0.014231295585632324, 0.015139583587646485, 0.014092320442199708, 0.014249728202819825, 0.013601792335510255, 0.013624320030212403, 0.013719552040100098, 0.013667424201965333, 0.01382691192626953, 0.014200063705444336, 0.014885536193847656, 0.013934752464294434, 0.01367676830291748, 0.013635199546813965, 0.013492095947265624, 0.013604831695556641, 0.013545791625976562, 0.013523072242736816, 0.013444992065429687, 0.013432479858398438, 0.014450528144836427, 0.016224992752075194, 0.014380831718444825, 0.01373369598388672, 0.013545663833618164, 0.013573439598083496, 0.0135600004196167, 0.013368160247802734, 0.013286111831665039, 0.013406271934509278, 0.013334783554077148, 0.01372873592376709, 0.013534879684448243, 0.013379584312438965, 0.013154335975646973, 0.013436767578125, 0.013322400093078614, 0.013309568405151366, 0.013239680290222168, 0.013417471885681152, 0.013260767936706544, 0.01321894359588623, 0.013327263832092285, 0.013352800369262695, 0.013333919525146485, 0.013380319595336913, 0.013496352195739745, 0.013463520050048828, 0.01349465560913086, 0.013536928176879882, 0.013527039527893067, 0.013529088020324707, 0.013454591751098633, 0.013445152282714843, 0.013431232452392578, 0.013770432472229003, 0.013558431625366211, 0.01348761558532715, 0.013430303573608398, 0.013540255546569823, 0.013412256240844727, 0.013521023750305176, 0.013347135543823243, 0.013308768272399902, 0.01348691177368164, 0.013363295555114747, 0.013340479850769043, 0.013470975875854493, 0.013453920364379883, 0.013558015823364258, 0.01375267219543457, 0.013517663955688477, 0.013513759613037109, 0.01379139232635498, 0.013650976181030273, 0.013588864326477051, 0.013482208251953126, 0.013535231590270995, 0.013499903678894042, 0.013358912467956542, 0.013498656272888184, 0.01354793643951416, 0.013576319694519043, 0.01362502384185791, 0.013687232017517089, 0.013771776199340821, 0.01354422378540039, 0.013483231544494628, 0.013433792114257813, 0.013432640075683594, 0.013468768119812012, 0.01371337604522705, 0.013863615989685059, 0.013946304321289063, 0.013582367897033692, 0.013761311531066895, 0.013766143798828125, 0.017545183181762694, 0.014981120109558106, 0.013744128227233888, 0.01373139190673828, 0.013594847679138183, 0.013659520149230957, 0.013460639953613282, 0.013712672233581543, 0.01352131175994873, 0.013421792030334473, 0.013508671760559083, 0.013365983963012695, 0.01340403175354004, 0.013718879699707032, 0.013368063926696778, 0.013304863929748535, 0.013448127746582031, 0.013428223609924317, 0.013513248443603515, 0.013458944320678711, 0.013468000411987304, 0.01358403205871582, 0.013468192100524903, 0.013364607810974122, 0.013478752136230468, 0.013426464080810547, 0.013340319633483887, 0.013387743949890136, 0.013408448219299316, 0.013535743713378906, 0.01342835235595703, 0.013518912315368652, 0.013524991989135742, 0.013370495796203613, 0.01336793613433838, 0.013439104080200196, 0.013313183784484863, 0.013307200431823731, 0.013407551765441894, 0.013386079788208008, 0.013381631851196289, 0.013327360153198242, 0.013468192100524903, 0.0135665283203125, 0.013418399810791015, 0.013371392250061035, 0.013445440292358399, 0.013379263877868652, 0.013270688056945801, 0.013412416458129883, 0.013650208473205567, 0.013378879547119141, 0.013341376304626465, 0.01328553581237793, 0.013424480438232422, 0.013518815994262695, 0.013494175910949707, 0.013553055763244629, 0.013540063858032227, 0.013414400100708008, 0.013497823715209962, 0.01349891185760498, 0.013346015930175781, 0.013265983581542969, 0.01369388771057129, 0.013721599578857421, 0.013452896118164063, 0.013446944236755371, 0.013460096359252929, 0.013307904243469238, 0.013406016349792481, 0.013371583938598632, 0.013271039962768554, 0.013409631729125977, 0.013331071853637695, 0.013559712409973144, 0.016088479995727538, 0.014421792030334472, 0.013710335731506347, 0.013552895545959473, 0.013431776046752929, 0.013673760414123536, 0.013530591964721679, 0.013454303741455079, 0.013502495765686035, 0.013475808143615722, 0.013440959930419922, 0.01350214385986328, 0.01333523178100586, 0.01333625602722168, 0.013387776374816895, 0.013421919822692871, 0.013480607986450194, 0.013439264297485352, 0.01340608024597168, 0.013406047821044922, 0.013414400100708008, 0.013426688194274903, 0.013448320388793945, 0.013335424423217774, 0.013364992141723632, 0.013394304275512695, 0.01339788818359375, 0.01346889591217041, 0.014002079963684083, 0.013525664329528808, 0.0135863676071167, 0.015020319938659668, 0.013725695610046386, 0.013516799926757812, 0.013505536079406738, 0.013541567802429199, 0.013402303695678712, 0.013486592292785645, 0.013396096229553223, 0.013385727882385253, 0.013565695762634278, 0.013357312202453613, 0.013466976165771484, 0.013393856048583985, 0.013417247772216798, 0.013852607727050782, 0.013526399612426758, 0.013433792114257813, 0.013498047828674316, 0.013420415878295898, 0.013270591735839844, 0.013412799835205078, 0.01350607967376709, 0.013365728378295898, 0.013545056343078614, 0.013685152053833008, 0.013596672058105469, 0.013448863983154298, 0.013770591735839844, 0.01367910385131836, 0.013684736251831055, 0.01406771183013916, 0.013525247573852539, 0.013422335624694824, 0.013424639701843261, 0.013389823913574218, 0.013389408111572266, 0.01344758415222168, 0.013356287956237793, 0.013380640029907227, 0.013453023910522461, 0.013321503639221192, 0.013586496353149414, 0.013456031799316407, 0.013325887680053711, 0.013464287757873535, 0.0134550724029541, 0.01358847999572754, 0.01370307159423828, 0.01363270378112793, 0.013635680198669434, 0.01342950439453125, 0.013340736389160156, 0.013404159545898438, 0.013271072387695312, 0.01328438377380371, 0.013447360038757324, 0.013423616409301758, 0.013270848274230957, 0.013330623626708985, 0.013438495635986328, 0.013371616363525391, 0.013604864120483399, 0.01338761615753174, 0.013346976280212402, 0.013465344429016113, 0.013562111854553223, 0.013656384468078613, 0.013416128158569336, 0.013345888137817383, 0.01353324794769287, 0.013357536315917969, 0.013326560020446777, 0.013439135551452637, 0.013342720031738281, 0.013330528259277344, 0.013381535530090333, 0.013297663688659669, 0.013287487983703614, 0.013334464073181153, 0.013524991989135742, 0.013743103981018067, 0.013544608116149902]",tokens/s,74.11927228815188,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1133, in __init__ self.model = StableLmModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 914, in __init__ [StableLmDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 914, in [StableLmDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 689, in __init__ self.mlp = StableLmMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 273, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 270.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 188577 has 14.72 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 1.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 718, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 400.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 324.12 MiB is free. Process 40005 has 14.42 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 12.96 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,790.032384,1128.136704,0.0,725.614592,666.338304,s,1,7.60452978515625,7.60452978515625,0.0,7.60452978515625,7.60452978515625,7.60452978515625,7.60452978515625,[7.60452978515625],,kWh,2.653236949974295e-06,2.853269001876932e-07,8.597229099877035e-07,3.798286760149692e-06,,MB,1163.034624,1144.91392,0.0,731.906048,601.370624,s,17,0.28188111686706546,0.016581242168650905,0.0002503556381412495,0.016536575317382812,0.016665868377685546,0.016871347427368164,0.01738535499572754,"[0.017513856887817383, 0.016582176208496092, 0.01635910415649414, 0.016343839645385744, 0.016610431671142577, 0.01645587158203125, 0.01646246337890625, 0.016441535949707032, 0.016536575317382812, 0.016573408126831054, 0.01671072006225586, 0.016489248275756836, 0.016576927185058595, 0.016531007766723633, 0.016557024002075194, 0.016500959396362303, 0.016635967254638673]",tokens/s,15439.132810206635,kWh,5.893779522345633e-07,6.498720842991387e-08,3.922768595201214e-07,1.0466420201845986e-06,tokens/kWh,244591746.80837744,MB,1175.40864,1191.051264,0.0,778.043392,601.373184,s,17,9.7625556640625,0.5742679802389706,0.01288782132209082,0.5754475708007812,0.5897221313476563,0.5909682983398438,0.5927246655273438,"[0.5680513916015625, 0.5611118774414062, 0.5660057373046875, 0.56514013671875, 0.5539595336914063, 0.5611557006835938, 0.5547848510742187, 0.5644985961914063, 0.5865242309570312, 0.5892572631835937, 0.576564453125, 0.584846923828125, 0.589202880859375, 0.59041943359375, 0.5931637573242188, 0.5824213256835937, 0.5754475708007812]",tokens/s,109.70488024386064,kWh,1.5994909506562307e-05,1.7637408013391486e-06,7.014040793925019e-06,2.4772691101826472e-05,tokens/kWh,2543122.9792937213,,s,1071,9.754285382270812,0.009107642747218312,0.00030025609356596947,0.009130528450012207,0.009402624130249023,0.009475120067596435,0.00995367040634155,"[0.008843263626098634, 0.009051136016845703, 0.009049087524414063, 0.009132032394409179, 0.009074175834655761, 0.009213727951049805, 0.009165535926818848, 0.009005056381225587, 0.00885142421722412, 0.008812576293945313, 0.008744959831237792, 0.008759296417236329, 0.008881664276123047, 0.00907478427886963, 0.009023136138916015, 0.008960576057434081, 0.00899830436706543, 0.009306912422180175, 0.00901734447479248, 0.009023072242736817, 0.008797599792480468, 0.008735648155212402, 0.008663007736206055, 0.00866316795349121, 0.008826815605163574, 0.008824671745300293, 0.008827008247375489, 0.008941632270812988, 0.008937503814697266, 0.008984416007995606, 0.008998751640319824, 0.00895417594909668, 0.009180383682250976, 0.009241375923156738, 0.009205568313598632, 0.009158528327941895, 0.00923680019378662, 0.009498271942138672, 0.00917743968963623, 0.0090316801071167, 0.009181183815002441, 0.00909721565246582, 0.009039039611816407, 0.009525407791137695, 0.008966143608093263, 0.00910108757019043, 0.0090632963180542, 0.009183232307434081, 0.009133952140808105, 0.009066816329956054, 0.008885248184204102, 0.008813632011413575, 0.00875001621246338, 0.008872063636779785, 0.009038528442382813, 0.009463583946228028, 0.009239808082580566, 0.00902182388305664, 0.00900806427001953, 0.00892073631286621, 0.00885747241973877, 0.008757632255554199, 0.00879958438873291, 0.009334591865539551, 0.008986399650573731, 0.00895631980895996, 0.009011199951171875, 0.009074687957763672, 0.009369600296020507, 0.009103391647338867, 0.009091039657592774, 0.009043007850646973, 0.008938336372375488, 0.009035712242126464, 0.009697440147399903, 0.009075776100158691, 0.009081600189208984, 0.009173279762268066, 0.00904319953918457, 0.009001728057861327, 0.00897804832458496, 0.008953408241271972, 0.008896960258483886, 0.008954143524169922, 0.00895094394683838, 0.008820608139038085, 0.008801247596740723, 0.00903987216949463, 0.009170944213867188, 0.009271295547485351, 0.009340352058410644, 0.009150336265563965, 0.009091584205627442, 0.00897987174987793, 0.00897103977203369, 0.008953856468200684, 0.008679391860961914, 0.008581151962280273, 0.008588512420654297, 0.008648960113525391, 0.008716832160949707, 0.008723456382751465, 0.008733247756958008, 0.008693887710571288, 0.008933695793151855, 0.008929280281066895, 0.00886963176727295, 0.008675583839416504, 0.008962112426757813, 0.008684543609619141, 0.008712960243225097, 0.008642335891723633, 0.008649120330810547, 0.008740127563476563, 0.008671968460083009, 0.008695551872253418, 0.008716544151306153, 0.008816639900207519, 0.008687616348266602, 0.008625344276428222, 0.008661503791809083, 0.008579392433166504, 0.00865231990814209, 0.008731103897094727, 0.008670592308044433, 0.00881932830810547, 0.008792063713073731, 0.00888319969177246, 0.008780799865722656, 0.008814592361450196, 0.008813568115234375, 0.008918016433715821, 0.008849408149719238, 0.00880025577545166, 0.008752320289611816, 0.008743552207946778, 0.008865983963012695, 0.008781824111938476, 0.008984576225280762, 0.008982879638671875, 0.00901206398010254, 0.008784319877624512, 0.008751487731933594, 0.008773632049560547, 0.008867839813232421, 0.009095168113708496, 0.009066495895385742, 0.008988672256469727, 0.009133952140808105, 0.009228032112121582, 0.009191807746887206, 0.009203712463378906, 0.009139840126037597, 0.009148127555847168, 0.009511584281921387, 0.009163935661315918, 0.009218912124633789, 0.009140224456787109, 0.009072640419006347, 0.009042207717895507, 0.009125760078430176, 0.00898201560974121, 0.009025888442993164, 0.009064448356628419, 0.009059391975402831, 0.00914735984802246, 0.009138143539428711, 0.0091178560256958, 0.009289567947387695, 0.009234527587890624, 0.009155488014221192, 0.009210880279541016, 0.009003007888793945, 0.009150527954101562, 0.008939200401306153, 0.008796416282653808, 0.008699904441833496, 0.008834464073181152, 0.00878444766998291, 0.00877552032470703, 0.008947744369506835, 0.008828543663024902, 0.008783840179443359, 0.008825183868408203, 0.009506815910339356, 0.008833248138427735, 0.008700127601623536, 0.008752927780151368, 0.008658559799194335, 0.008607999801635742, 0.008702079772949218, 0.008809663772583009, 0.00883187198638916, 0.008835071563720704, 0.008932767868041992, 0.009183839797973633, 0.009035776138305664, 0.00912384033203125, 0.009067680358886719, 0.009253727912902831, 0.00905014419555664, 0.009387968063354492, 0.009086591720581055, 0.009231776237487792, 0.009154815673828125, 0.009076671600341797, 0.009403200149536132, 0.008914943695068359, 0.008896512031555176, 0.008853504180908203, 0.008814751625061035, 0.008826720237731933, 0.009037823677062988, 0.008871935844421386, 0.008810048103332519, 0.008608511924743652, 0.008592512130737305, 0.008544575691223145, 0.008786175727844238, 0.008712191581726075, 0.00863804817199707, 0.008706111907958985, 0.00866921615600586, 0.008669280052185058, 0.008773504257202148, 0.008927200317382812, 0.008796480178833008, 0.008767135620117187, 0.008825247764587402, 0.008781248092651368, 0.009156864166259765, 0.009031999588012695, 0.00901743984222412, 0.009122719764709472, 0.009274368286132812, 0.00921395206451416, 0.00925875186920166, 0.009044256210327149, 0.009012991905212403, 0.009049568176269531, 0.008969056129455566, 0.009078432083129882, 0.009121279716491699, 0.009249343872070312, 0.009093215942382812, 0.009027520179748534, 0.008904864311218261, 0.008980159759521484, 0.0090830078125, 0.009119935989379883, 0.009009152412414552, 0.0091910400390625, 0.009070048332214356, 0.009130528450012207, 0.00892262363433838, 0.008939935684204101, 0.008847455978393554, 0.008820735931396484, 0.008859647750854491, 0.008789024353027343, 0.008746175765991212, 0.008722335815429687, 0.008697471618652343, 0.008742176055908202, 0.008733280181884765, 0.008636032104492188, 0.008600128173828126, 0.008681471824645997, 0.00890880012512207, 0.008765631675720215, 0.008665087699890137, 0.008806048393249511, 0.00864857578277588, 0.00856112003326416, 0.008581088066101075, 0.008613375663757325, 0.008601920127868652, 0.008620256423950196, 0.008580608367919922, 0.00862217617034912, 0.008678879737854004, 0.008718560218811035, 0.00870473575592041, 0.008662112236022949, 0.008604576110839843, 0.008736767768859864, 0.008591296195983887, 0.008828991889953614, 0.00862217617034912, 0.008838144302368164, 0.008627231597900391, 0.00868511962890625, 0.008753791809082031, 0.009408191680908203, 0.008939040184020997, 0.00880463981628418, 0.008804224014282226, 0.008812224388122559, 0.008872384071350098, 0.008841471672058105, 0.008734399795532227, 0.008797823905944824, 0.008735360145568848, 0.008801376342773438, 0.00877222442626953, 0.008845600128173828, 0.009237824440002441, 0.0088787841796875, 0.008771583557128907, 0.008816639900207519, 0.009049599647521972, 0.009044480323791505, 0.008912896156311035, 0.008797280311584473, 0.008902784347534179, 0.008859264373779296, 0.009044256210327149, 0.009026911735534668, 0.00901155185699463, 0.008955360412597656, 0.008888352394104003, 0.008817567825317382, 0.008845312118530273, 0.008873984336853028, 0.008716064453125, 0.009050175666809082, 0.008883359909057618, 0.008752127647399903, 0.00872447967529297, 0.008697728157043456, 0.008932543754577636, 0.008786879539489745, 0.008894463539123536, 0.008919039726257324, 0.008883904457092286, 0.008837151527404784, 0.00888371181488037, 0.008909600257873535, 0.008871935844421386, 0.008884223937988281, 0.008961024284362793, 0.008861984252929687, 0.00891158390045166, 0.008895584106445312, 0.009345952033996583, 0.009066304206848145, 0.00888441562652588, 0.008939040184020997, 0.00894979190826416, 0.008779392242431641, 0.008667072296142579, 0.008760191917419434, 0.00883078384399414, 0.00889260768890381, 0.00882688045501709, 0.008796159744262694, 0.008757375717163085, 0.008783743858337403, 0.00868556785583496, 0.0097259521484375, 0.009012543678283691, 0.008966848373413086, 0.00887174415588379, 0.008881440162658691, 0.00889129638671875, 0.008889344215393067, 0.008876640319824219, 0.008851872444152833, 0.008972224235534667, 0.009023008346557617, 0.00887833595275879, 0.008730912208557129, 0.00932204818725586, 0.008783647537231445, 0.008772255897521973, 0.00887993621826172, 0.008904895782470702, 0.009003007888793945, 0.008730879783630371, 0.008775327682495117, 0.008816896438598633, 0.008926048278808593, 0.008675488471984863, 0.008808863639831544, 0.008884608268737793, 0.008943679809570313, 0.008851455688476563, 0.008848544120788573, 0.008957856178283692, 0.008872896194458009, 0.00880617618560791, 0.008988896369934081, 0.008793120384216308, 0.008657183647155762, 0.008702655792236329, 0.008564736366271973, 0.00866057586669922, 0.008796575546264649, 0.008783359527587891, 0.008658816337585449, 0.008792736053466798, 0.008683135986328125, 0.008630592346191407, 0.009519136428833008, 0.008759296417236329, 0.008738752365112304, 0.008771648406982422, 0.008791040420532227, 0.008651359558105469, 0.008622112274169922, 0.008642047882080077, 0.00867414379119873, 0.008583200454711915, 0.008625951766967774, 0.008688159942626953, 0.008879648208618165, 0.008855711936950684, 0.008814592361450196, 0.008823936462402343, 0.009087871551513672, 0.009003007888793945, 0.00904368019104004, 0.00924617576599121, 0.009042752265930177, 0.008926431655883789, 0.008831232070922852, 0.008843839645385742, 0.008730591773986817, 0.008638751983642579, 0.008664544105529786, 0.0087424955368042, 0.008768159866333008, 0.008763199806213378, 0.008708288192749023, 0.008663040161132812, 0.009404416084289552, 0.00863548755645752, 0.008727456092834473, 0.008707743644714355, 0.008669535636901855, 0.008607744216918945, 0.008610976219177247, 0.008676192283630372, 0.008730688095092774, 0.00871116828918457, 0.008647616386413575, 0.008759072303771973, 0.008679136276245118, 0.008632831573486328, 0.00861190414428711, 0.00870803165435791, 0.008622079849243165, 0.008719391822814941, 0.008896672248840332, 0.008843520164489745, 0.008974911689758301, 0.008898655891418457, 0.008699007987976074, 0.008690431594848632, 0.008844608306884766, 0.008889056205749512, 0.00892300796508789, 0.008841343879699707, 0.008964096069335938, 0.0090415678024292, 0.008906111717224122, 0.00887222385406494, 0.008841856002807617, 0.008982368469238281, 0.00892751979827881, 0.008959936141967774, 0.009179136276245118, 0.00911359977722168, 0.009168543815612794, 0.009205344200134278, 0.009191712379455567, 0.009161184310913086, 0.00902143955230713, 0.009166784286499023, 0.009078271865844726, 0.009142848014831543, 0.009082880020141602, 0.009068544387817384, 0.00903987216949463, 0.009051263809204102, 0.009019359588623047, 0.009048095703125, 0.008978464126586915, 0.009115872383117675, 0.008979071617126464, 0.009043968200683594, 0.009052160263061524, 0.008957951545715333, 0.008927040100097657, 0.00899500846862793, 0.009041919708251953, 0.009050271987915038, 0.009211487770080566, 0.009091327667236328, 0.00905951976776123, 0.008970527648925782, 0.009140480041503906, 0.00938969612121582, 0.009126336097717284, 0.009198176383972167, 0.009336031913757325, 0.009255840301513671, 0.009143168449401856, 0.009040639877319337, 0.008958047866821289, 0.00889782428741455, 0.009737088203430175, 0.009060352325439454, 0.009148639678955078, 0.009149791717529297, 0.009200063705444336, 0.009226240158081055, 0.009091072082519532, 0.009198847770690919, 0.009332863807678223, 0.009210495948791505, 0.009324543952941895, 0.009285120010375977, 0.00938646411895752, 0.009416671752929687, 0.00937990379333496, 0.009438528060913085, 0.00937235164642334, 0.00923846435546875, 0.009230400085449219, 0.009313759803771972, 0.009185824394226074, 0.009187007904052734, 0.009191264152526855, 0.009325023651123046, 0.00923027229309082, 0.009247039794921875, 0.009322239875793456, 0.009294976234436035, 0.009318431854248048, 0.009298015594482421, 0.009786111831665039, 0.009338879585266113, 0.00986019229888916, 0.00936406421661377, 0.009333056449890137, 0.009361408233642577, 0.009308159828186035, 0.009414655685424805, 0.009172127723693848, 0.009272159576416016, 0.009518112182617187, 0.009345824241638184, 0.009425087928771973, 0.00935696029663086, 0.009373408317565918, 0.009403167724609375, 0.009309632301330566, 0.009267616271972656, 0.009234432220458985, 0.00922976016998291, 0.009344863891601562, 0.009321184158325195, 0.00937548828125, 0.009402624130249023, 0.009352543830871581, 0.009325216293334961, 0.009060352325439454, 0.009263104438781738, 0.009248767852783203, 0.00921724796295166, 0.009224991798400878, 0.009294079780578614, 0.009207424163818359, 0.009193856239318848, 0.009189120292663574, 0.009234432220458985, 0.009160384178161621, 0.00934124755859375, 0.009371199607849122, 0.009398528099060059, 0.009468095779418945, 0.00940771198272705, 0.009392928123474122, 0.009936896324157715, 0.009504768371582031, 0.009459712028503419, 0.010164223670959472, 0.009231840133666992, 0.00935580825805664, 0.009175040245056153, 0.00921395206451416, 0.00951260757446289, 0.009367263793945313, 0.009411199569702149, 0.009305248260498047, 0.009431615829467773, 0.009333024024963379, 0.009332736015319825, 0.009267200469970703, 0.009265151977539063, 0.009278911590576172, 0.0091911678314209, 0.009202495574951172, 0.009259008407592773, 0.009302016258239745, 0.009227999687194824, 0.009220383644104003, 0.009283583641052246, 0.009377696037292481, 0.009406559944152832, 0.009320287704467774, 0.009333056449890137, 0.009365344047546386, 0.009371647834777832, 0.009285632133483887, 0.009517056465148926, 0.009938943862915038, 0.009333888053894042, 0.00933568000793457, 0.009279071807861328, 0.009501088142395019, 0.009373087882995606, 0.009286144256591796, 0.009252960205078126, 0.00923033618927002, 0.00922646427154541, 0.009232159614562988, 0.00929587173461914, 0.009474047660827637, 0.009329695701599122, 0.009262016296386719, 0.009466719627380372, 0.009211487770080566, 0.009183648109436036, 0.00919273567199707, 0.009286368370056152, 0.009236479759216308, 0.009198847770690919, 0.0091428804397583, 0.008982655525207519, 0.00902956771850586, 0.009011167526245117, 0.00902780818939209, 0.009056159973144531, 0.009099264144897461, 0.009184991836547852, 0.0092511043548584, 0.00920076847076416, 0.009243519783020019, 0.009289728164672852, 0.009365504264831542, 0.00929212760925293, 0.009192704200744629, 0.009086784362792968, 0.009071200370788575, 0.009065759658813477, 0.009052191734313965, 0.008979328155517578, 0.008908160209655762, 0.008870464324951172, 0.008922112464904786, 0.008962944030761718, 0.00903987216949463, 0.008968319892883301, 0.008976256370544434, 0.008927392005920411, 0.008894304275512695, 0.00896003246307373, 0.009005023956298827, 0.00901910400390625, 0.00917148780822754, 0.009148159980773925, 0.009108960151672363, 0.0091461763381958, 0.009100000381469726, 0.009091039657592774, 0.009183263778686524, 0.009150464057922364, 0.009241791725158691, 0.009298751831054688, 0.009407839775085448, 0.009351840019226074, 0.009236288070678712, 0.009238719940185547, 0.009244671821594238, 0.009251872062683105, 0.009093503952026367, 0.009022047996520996, 0.009426048278808594, 0.009313152313232423, 0.009168895721435547, 0.009172991752624511, 0.009240544319152832, 0.009267231941223145, 0.009235936164855957, 0.009320992469787598, 0.009207232475280762, 0.009179648399353027, 0.009088064193725586, 0.009101344108581542, 0.009021471977233887, 0.009024127960205078, 0.008927488327026367, 0.008931520462036133, 0.009031488418579101, 0.008894047737121581, 0.008929759979248046, 0.009136128425598144, 0.009283359527587891, 0.009388383865356445, 0.00915555191040039, 0.009106335639953613, 0.009080351829528809, 0.009060832023620605, 0.009162752151489258, 0.009160223960876466, 0.00921571159362793, 0.009095935821533204, 0.009145664215087891, 0.009224896430969238, 0.009143712043762207, 0.009236288070678712, 0.010053536415100098, 0.009185312271118164, 0.009250975608825684, 0.009476799964904786, 0.009250816345214843, 0.009086367607116699, 0.009090815544128418, 0.009122528076171876, 0.009166975975036622, 0.009052160263061524, 0.009107295989990234, 0.00915231990814209, 0.009650527954101562, 0.009558015823364258, 0.010307456016540527, 0.009439519882202149, 0.010375040054321289, 0.009510880470275878, 0.009535488128662109, 0.00943449592590332, 0.00947212791442871, 0.009255680084228515, 0.009209600448608399, 0.0093306884765625, 0.009232128143310547, 0.009263263702392579, 0.009203807830810547, 0.00930406379699707, 0.00932249641418457, 0.009465215682983398, 0.009596863746643066, 0.009339679718017578, 0.009450495719909668, 0.009347552299499512, 0.009407999992370606, 0.009316767692565918, 0.00932863998413086, 0.009367360115051269, 0.009198783874511719, 0.00913913631439209, 0.009444416046142578, 0.009297183990478516, 0.009279199600219726, 0.009297216415405273, 0.009255616188049317, 0.009352191925048828, 0.009382911682128906, 0.009397855758666992, 0.00942841625213623, 0.009388223648071289, 0.009306495666503906, 0.009988032341003417, 0.009517536163330079, 0.009848832130432129, 0.009545632362365723, 0.00942089557647705, 0.009416704177856445, 0.009320192337036133, 0.009273504257202149, 0.009364607810974122, 0.009634783744812012, 0.009551872253417968, 0.009383935928344727, 0.009345151901245118, 0.009526592254638672, 0.00931670379638672, 0.009199423789978027, 0.009148223876953125, 0.009125856399536133, 0.00901366424560547, 0.008959744453430176, 0.008892895698547364, 0.008807519912719726, 0.008835647583007812, 0.00869820785522461, 0.008753151893615722, 0.008857600212097168, 0.009015392303466797, 0.009041824340820313, 0.009069567680358886, 0.009292799949645996, 0.009289728164672852, 0.009376895904541016, 0.009479040145874024, 0.009302111625671386, 0.009414239883422852, 0.009337408065795898, 0.011687295913696289, 0.009564543724060058, 0.009414560317993164, 0.009418848037719727, 0.009480192184448242, 0.009539584159851074, 0.009453568458557129, 0.009406463623046875, 0.009329983711242676, 0.009358847618103027, 0.009447936058044434, 0.009467904090881347, 0.009461759567260742, 0.009363455772399902, 0.009465503692626952, 0.00928598403930664, 0.009993375778198242, 0.009456480026245117, 0.009388031959533692, 0.009355263710021973, 0.00949350357055664, 0.009350144386291503, 0.009333760261535644, 0.0093024320602417, 0.009283295631408691, 0.009361344337463379, 0.009445599555969238, 0.00929593563079834, 0.009493151664733886, 0.00923795223236084, 0.009218303680419922, 0.009398591995239259, 0.00961023998260498, 0.009350048065185548, 0.00933897590637207, 0.009197567939758301, 0.009193471908569336, 0.00922003173828125, 0.009300031661987306, 0.009339936256408691, 0.009333727836608886, 0.009315967559814453, 0.009221983909606933, 0.009316831588745118, 0.009297183990478516, 0.009179936408996583, 0.00928767967224121, 0.009283807754516602, 0.00940003204345703, 0.009230624198913575, 0.009563103675842285, 0.009403200149536132, 0.00941859245300293, 0.00939020824432373, 0.00934505558013916, 0.009400320053100587, 0.00923033618927002, 0.009309951782226562, 0.00946787166595459, 0.009378080368041991, 0.009357312202453612, 0.009375743865966797, 0.00934502410888672, 0.009375679969787597, 0.009388256072998046, 0.009297887802124023, 0.009305983543395997, 0.009279040336608887, 0.009374143600463868, 0.009572352409362793, 0.009358559608459473, 0.009230175971984863, 0.009275296211242675, 0.009281632423400878, 0.00928767967224121, 0.009318400382995605, 0.009214207649230957, 0.009332480430603028, 0.009158880233764648, 0.009246496200561524, 0.009164799690246582, 0.00928172779083252, 0.009358943939208985, 0.009374208450317383, 0.009281375885009765, 0.009320320129394532, 0.009215871810913085, 0.009228320121765136, 0.009252863883972168, 0.009375840187072755, 0.009201215744018556, 0.009285568237304687, 0.009320287704467774, 0.009292448043823242, 0.009365504264831542, 0.01021241569519043, 0.012585920333862305, 0.009631039619445801, 0.009286335945129395, 0.00926633644104004, 0.009296607971191407, 0.009336959838867188, 0.009375743865966797, 0.009425984382629395, 0.009279647827148437, 0.009394975662231445, 0.009408063888549804, 0.009253151893615722, 0.009289407730102539, 0.009548383712768555, 0.009378848075866699, 0.009339743614196778, 0.009361408233642577, 0.009254015922546387, 0.009446271896362305, 0.009416704177856445, 0.009308032035827637, 0.009312383651733399, 0.009335904121398926, 0.009311136245727538, 0.009359456062316895, 0.010034239768981934, 0.009411423683166505, 0.009441280364990234, 0.009310208320617675, 0.009439231872558594, 0.009383935928344727, 0.00934502410888672, 0.009256959915161133, 0.009226240158081055, 0.00923190402984619, 0.009363936424255372, 0.009349023818969727, 0.009476192474365234, 0.009426719665527343, 0.00920576000213623, 0.009297599792480469, 0.00935968017578125, 0.009250816345214843, 0.009291680335998535, 0.009375295639038085, 0.00940220832824707, 0.00928223991394043, 0.00924880027770996, 0.009165056228637695, 0.009198847770690919, 0.009243103981018066, 0.009266271591186523, 0.009255007743835449, 0.010013504028320313, 0.009302047729492188, 0.009279104232788086, 0.009193216323852539, 0.009158944129943848, 0.00921836757659912, 0.009232383728027344, 0.009270751953125, 0.00912822437286377, 0.009201184272766113, 0.009272031784057617, 0.009221183776855468, 0.008981568336486816, 0.008934335708618164, 0.009167584419250488, 0.009241824150085448, 0.009163519859313966, 0.009328895568847657, 0.00922396755218506, 0.009287903785705567, 0.009266240119934082, 0.009202207565307617, 0.009181695938110352, 0.009289119720458984, 0.009091584205627442, 0.009093215942382812, 0.009057951927185058, 0.009036031723022461, 0.009052160263061524, 0.00910921573638916, 0.00897590446472168, 0.009072992324829101, 0.009218463897705078, 0.009338879585266113, 0.009438528060913085, 0.009408351898193359, 0.009371647834777832, 0.009321311950683593, 0.009181247711181641, 0.009279423713684083, 0.009232224464416504, 0.009218208312988281, 0.009193471908569336, 0.00935644817352295, 0.009292415618896484, 0.00916044807434082, 0.009265631675720214, 0.009230624198913575, 0.009240287780761719, 0.009056544303894043, 0.009211423873901368, 0.00921110439300537, 0.009253151893615722, 0.009239232063293457, 0.009291775703430176, 0.009268927574157714, 0.009292096138000487, 0.009277440071105958, 0.009254912376403808, 0.00926700782775879, 0.009280960083007812, 0.009343744277954102, 0.009312255859375, 0.009338399887084962, 0.009508447647094726, 0.00913219165802002, 0.009136863708496094, 0.009172863960266113, 0.009300095558166504, 0.009158656120300293, 0.00904736042022705, 0.008934080123901367, 0.009140224456787109, 0.00902348804473877, 0.008912832260131835, 0.008935487747192383, 0.008939519882202148, 0.009534751892089844, 0.008997599601745605, 0.009215999603271484, 0.009263104438781738, 0.009296992301940917, 0.009198271751403808, 0.009208127975463868, 0.00931123161315918, 0.00925158405303955, 0.009128095626831055, 0.009158207893371581, 0.009107904434204101, 0.009183551788330078, 0.008954815864562989, 0.008962176322937011, 0.009005696296691894, 0.009297087669372558, 0.00896070384979248, 0.008865152359008789, 0.008884991645812988, 0.008924896240234376, 0.008964127540588378, 0.009094400405883789, 0.009071616172790528, 0.009084480285644531, 0.008843551635742188, 0.008841376304626464, 0.008830975532531739, 0.009013248443603515, 0.008935615539550782, 0.009034943580627442, 0.009201727867126464, 0.009091263771057128, 0.009019776344299317, 0.00901734447479248]",tokens/s,109.797894774191,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 711, in __init__ self.transformer = CodeGenModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 472, in __init__ self.h = nn.ModuleList([CodeGenBlock(config, layer_idx=i) for i in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 472, in self.h = nn.ModuleList([CodeGenBlock(config, layer_idx=i) for i in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 303, in __init__ self.mlp = CodeGenMLP(inner_dim, config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 281, in __init__ self.fc_in = nn.Linear(embed_dim, intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 82.12 MiB is free. Process 89340 has 14.66 GiB memory in use. Of the allocated memory 14.54 GiB is allocated by PyTorch, and 2.49 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,818.118656,2127.495168,0.0,1732.247552,1728.316416,s,1,7.90771826171875,7.90771826171875,0.0,7.90771826171875,7.90771826171875,7.90771826171875,7.90771826171875,[7.90771826171875],,kWh,9.499211254145242e-06,1.040675102756e-06,3.082502466000836e-06,1.3622388822902077e-05,,MB,1110.9376,2328.82176,0.0,1923.088384,1891.2,s,10,0.48508626937866217,0.048508626937866214,0.0029105012856569454,0.047908863067626956,0.04953007087707519,0.05320087604522704,0.05613752017974854,"[0.056871681213378905, 0.048147838592529295, 0.04766988754272461, 0.0456190071105957, 0.04682697677612305, 0.04732592010498047, 0.048253406524658204, 0.047424320220947266, 0.04871433639526367, 0.04823289489746094]",tokens/s,5277.411795800891,kWh,1.8499921940927998e-06,2.040192039538802e-07,1.2289935992278652e-06,3.2830049972745454e-06,tokens/kWh,77977340.94603075,MB,1120.669696,2328.82176,0.0,1923.088384,1895.80032,s,10,12.342516357421875,1.2342516357421873,0.010719832370455143,1.2334990234375,1.2469368408203125,1.2498820556640624,1.2522382275390624,"[1.24167626953125, 1.2302158203125, 1.239170654296875, 1.2462823486328125, 1.2367822265625, 1.2528272705078125, 1.228632080078125, 1.227983154296875, 1.2144898681640626, 1.2244566650390625]",tokens/s,51.04307596247703,kWh,3.597472274965803e-05,3.967591333625105e-06,1.8983994793373237e-05,5.892630887665637e-05,tokens/kWh,1069131.9582204039,,s,630,12.339980672836315,0.019587270909263974,0.00045158069869127306,0.019530303955078127,0.01990556468963623,0.020072346115112303,0.02086324857711792,"[0.019812416076660157, 0.019705663681030272, 0.019674335479736328, 0.01954867172241211, 0.019330944061279297, 0.020469568252563478, 0.02086579132080078, 0.019900543212890625, 0.01983852767944336, 0.01953638458251953, 0.019448863983154298, 0.0195001277923584, 0.019581663131713868, 0.019593408584594726, 0.01961859130859375, 0.019644416809082032, 0.01983888053894043, 0.019578336715698242, 0.01976179122924805, 0.019737920761108398, 0.019557056427001954, 0.020711423873901368, 0.021198816299438476, 0.019617824554443358, 0.019466239929199217, 0.019545856475830077, 0.019523839950561523, 0.01981439971923828, 0.019611648559570313, 0.019566848754882814, 0.019605119705200194, 0.019908992767333985, 0.019617183685302735, 0.01945840072631836, 0.019310592651367187, 0.019538047790527344, 0.019540128707885743, 0.019650495529174805, 0.01965648078918457, 0.01973446464538574, 0.019793983459472655, 0.01971548843383789, 0.019530176162719726, 0.019464351654052733, 0.01949894332885742, 0.01960870361328125, 0.01951584053039551, 0.01951590347290039, 0.019480064392089845, 0.019564447402954103, 0.019626592636108397, 0.019834880828857423, 0.01981177520751953, 0.019692256927490236, 0.01969136047363281, 0.019765247344970704, 0.019911712646484375, 0.019792863845825195, 0.01966640090942383, 0.019493408203125, 0.019562496185302734, 0.01957606315612793, 0.01969843292236328, 0.019678848266601563, 0.019524063110351562, 0.019511295318603517, 0.019324928283691405, 0.01941663932800293, 0.019515104293823242, 0.01956732749938965, 0.019517440795898438, 0.019328479766845704, 0.019384735107421874, 0.019412960052490234, 0.019599103927612306, 0.01948099136352539, 0.019326175689697266, 0.01927779197692871, 0.01957356834411621, 0.01943280029296875, 0.019587743759155275, 0.02006220817565918, 0.019517440795898438, 0.01957587242126465, 0.019624895095825195, 0.019646368026733398, 0.01965411186218262, 0.019597087860107422, 0.0193670711517334, 0.019575519561767576, 0.019530431747436523, 0.019435808181762694, 0.01941209602355957, 0.01928598403930664, 0.019555103302001952, 0.01952479934692383, 0.019637216567993165, 0.019605472564697267, 0.019529792785644533, 0.019593151092529296, 0.01942118453979492, 0.01951091194152832, 0.019318304061889648, 0.0194747200012207, 0.019573343276977538, 0.01951299285888672, 0.019451488494873048, 0.019372127532958985, 0.019557088851928712, 0.019744672775268556, 0.019781631469726564, 0.019568384170532225, 0.01992483139038086, 0.019501472473144533, 0.01936720085144043, 0.019409631729125975, 0.019597503662109376, 0.01942310333251953, 0.019425472259521483, 0.019443456649780273, 0.01956003189086914, 0.019443391799926758, 0.019442399978637694, 0.02000486373901367, 0.019402463912963866, 0.0195382080078125, 0.019469087600708007, 0.019619840621948242, 0.01944576072692871, 0.01977289581298828, 0.019482431411743165, 0.020191776275634767, 0.02085702323913574, 0.019685375213623048, 0.01961369514465332, 0.01950627136230469, 0.019452192306518554, 0.019260063171386718, 0.019414655685424803, 0.01946454429626465, 0.019826688766479493, 0.01928390312194824, 0.01918777656555176, 0.019335168838500977, 0.019343360900878907, 0.01927529525756836, 0.019466720581054687, 0.019537919998168944, 0.019728384017944335, 0.019806367874145508, 0.019565792083740235, 0.01952422332763672, 0.019351232528686525, 0.019390783309936523, 0.01956211280822754, 0.019507007598876955, 0.01941766357421875, 0.019363840103149413, 0.01927577590942383, 0.0194969596862793, 0.019306495666503908, 0.01927987289428711, 0.019414688110351564, 0.0196713924407959, 0.019738624572753907, 0.019647680282592773, 0.019612031936645506, 0.01945849609375, 0.01945599937438965, 0.02042790412902832, 0.019753856658935545, 0.01964195251464844, 0.019612064361572267, 0.019688608169555664, 0.01966374397277832, 0.020831615447998046, 0.02157606315612793, 0.020117727279663086, 0.019996480941772463, 0.019928319931030274, 0.020118463516235353, 0.019701984405517577, 0.01965648078918457, 0.019777536392211914, 0.01984547233581543, 0.019679967880249023, 0.019657663345336914, 0.01961759948730469, 0.019537471771240236, 0.019827552795410156, 0.01962940788269043, 0.019669696807861327, 0.019953056335449217, 0.019628543853759766, 0.019597375869750976, 0.019766271591186522, 0.019853311538696287, 0.01990518379211426, 0.019775903701782227, 0.019691328048706054, 0.019640064239501952, 0.01958540725708008, 0.01982796859741211, 0.019710847854614258, 0.019709823608398437, 0.019568479537963868, 0.019514751434326173, 0.019859903335571288, 0.019670751571655272, 0.019626720428466797, 0.019706975936889647, 0.019616479873657226, 0.019805503845214845, 0.01995475196838379, 0.019903488159179687, 0.019913440704345704, 0.019910655975341796, 0.020494335174560546, 0.019853311538696287, 0.01973587226867676, 0.019932064056396484, 0.019552095413208008, 0.01975699234008789, 0.019773439407348634, 0.019877824783325195, 0.0201646728515625, 0.02000486373901367, 0.020002815246582033, 0.020330495834350586, 0.019965951919555663, 0.019933183670043944, 0.019937280654907227, 0.01986355209350586, 0.019958944320678712, 0.01985174369812012, 0.019871551513671874, 0.019755456924438478, 0.019779136657714844, 0.019685407638549805, 0.019509183883666993, 0.01955718421936035, 0.019519264221191407, 0.019793920516967774, 0.019775199890136718, 0.019566335678100587, 0.01959494400024414, 0.01967804718017578, 0.01963212776184082, 0.01963212776184082, 0.019525632858276368, 0.019501056671142578, 0.01982259178161621, 0.019886079788208007, 0.019711999893188475, 0.01964851188659668, 0.019625120162963867, 0.019503007888793944, 0.01937504005432129, 0.01930201530456543, 0.019267135620117188, 0.01944403266906738, 0.019579231262207033, 0.019525184631347656, 0.01957539176940918, 0.01967513656616211, 0.019684831619262697, 0.019632671356201174, 0.019599552154541015, 0.019656192779541014, 0.01986137580871582, 0.019732032775878906, 0.01990233612060547, 0.019679231643676756, 0.019647487640380858, 0.019512928009033204, 0.01971241569519043, 0.01960550308227539, 0.020002815246582033, 0.019564544677734375, 0.019486719131469727, 0.019441471099853516, 0.019546112060546874, 0.01964771270751953, 0.019746816635131836, 0.019745567321777343, 0.01970195198059082, 0.01979167938232422, 0.019599552154541015, 0.019695615768432616, 0.019762752532958984, 0.019816896438598634, 0.02004787254333496, 0.01954310417175293, 0.01939708709716797, 0.019503583908081056, 0.019451776504516603, 0.019779136657714844, 0.0198699836730957, 0.019851200103759764, 0.019763551712036132, 0.019659839630126952, 0.019915103912353516, 0.019663455963134766, 0.019548160552978516, 0.019475616455078126, 0.019438175201416014, 0.019484928131103516, 0.01945756721496582, 0.01946054458618164, 0.01955574417114258, 0.01959766387939453, 0.01952195167541504, 0.019593088150024415, 0.019492416381835936, 0.019562944412231446, 0.020164575576782227, 0.01981439971923828, 0.01967513656616211, 0.01957475280761719, 0.01961974334716797, 0.020203647613525392, 0.027420448303222655, 0.021888511657714844, 0.019779680252075195, 0.020435232162475586, 0.019833183288574217, 0.019912704467773438, 0.019854623794555663, 0.020156768798828124, 0.020091264724731446, 0.019713247299194336, 0.01957484817504883, 0.01959017562866211, 0.019619232177734376, 0.019826208114624023, 0.01963033676147461, 0.019452415466308593, 0.019442911148071288, 0.019478368759155273, 0.019630624771118165, 0.019357791900634767, 0.019720512390136717, 0.020288639068603516, 0.019825504302978515, 0.019886112213134764, 0.019459999084472657, 0.01944585609436035, 0.019501056671142578, 0.01960515213012695, 0.02037705612182617, 0.01965679931640625, 0.01939740753173828, 0.01939263916015625, 0.019355199813842774, 0.01946451187133789, 0.019494911193847657, 0.01927724838256836, 0.019249727249145508, 0.01951670455932617, 0.019647199630737303, 0.019689472198486328, 0.01943552017211914, 0.01941913604736328, 0.020133184432983398, 0.021340959548950194, 0.019939231872558593, 0.01986284828186035, 0.019925376892089845, 0.019730655670166016, 0.01960540771484375, 0.019660991668701173, 0.019572736740112305, 0.019382272720336914, 0.019288063049316406, 0.019517440795898438, 0.020083711624145507, 0.020059135437011717, 0.019608671188354493, 0.01992176055908203, 0.019710016250610352, 0.019525568008422853, 0.019580703735351562, 0.019800191879272462, 0.019455904006958007, 0.019377952575683595, 0.0193089599609375, 0.019570688247680663, 0.019535680770874024, 0.019398847579956056, 0.01927577590942383, 0.019697664260864257, 0.01932195281982422, 0.019790752410888672, 0.0196997127532959, 0.019552255630493166, 0.019525568008422853, 0.01940275192260742, 0.019412479400634765, 0.019446176528930666, 0.01942252731323242, 0.019389280319213865, 0.019316736221313476, 0.01922867202758789, 0.01941094398498535, 0.019343360900878907, 0.01939241600036621, 0.019341407775878908, 0.01947644805908203, 0.019638303756713868, 0.019689151763916016, 0.019580352783203126, 0.01968124771118164, 0.019585952758789063, 0.019597312927246095, 0.01947161674499512, 0.019518016815185547, 0.019476512908935546, 0.019436800003051757, 0.01948054313659668, 0.01955936050415039, 0.0193832950592041, 0.019497983932495116, 0.01960140800476074, 0.01985740852355957, 0.019805952072143553, 0.019699296951293944, 0.019505023956298828, 0.01946659278869629, 0.019496927261352538, 0.0194069766998291, 0.01936345672607422, 0.019314783096313477, 0.019286655426025392, 0.01952902412414551, 0.01948847961425781, 0.01943440055847168, 0.019360927581787108, 0.019380352020263673, 0.019385120391845704, 0.019429567337036133, 0.019350431442260743, 0.01984921646118164, 0.019939327239990236, 0.019949567794799804, 0.019838016510009767, 0.01976211166381836, 0.019789663314819336, 0.019663007736206054, 0.019542015075683594, 0.019352832794189454, 0.01931248092651367, 0.019272607803344728, 0.01934115219116211, 0.02026019287109375, 0.019282751083374024, 0.01927577590942383, 0.01935478401184082, 0.01928201675415039, 0.019244096755981446, 0.01928876876831055, 0.019339935302734375, 0.019280223846435546, 0.019454015731811523, 0.01953171157836914, 0.0195379524230957, 0.019353567123413087, 0.019331071853637697, 0.01942732810974121, 0.019529727935791014, 0.019331071853637697, 0.02199519920349121, 0.019581247329711914, 0.019335168838500977, 0.019664896011352538, 0.019359743118286133, 0.01929360008239746, 0.019278047561645507, 0.01938470458984375, 0.019363840103149413, 0.019455808639526367, 0.019504640579223635, 0.01944646453857422, 0.019466239929199217, 0.019357696533203125, 0.019171327590942384, 0.019269567489624023, 0.019229791641235353, 0.019346399307250975, 0.01921558380126953, 0.01924892807006836, 0.019259584426879882, 0.019102527618408204, 0.019264896392822264, 0.019452512741088866, 0.01922380828857422, 0.019234943389892577, 0.019174272537231446, 0.020500255584716798, 0.019840576171875, 0.01998054313659668, 0.01927596855163574, 0.019269632339477538, 0.01919811248779297, 0.01927939224243164, 0.019228479385375977, 0.019286144256591798, 0.019247167587280272, 0.01930031967163086, 0.019257375717163086, 0.019080543518066408, 0.019159711837768555, 0.019212223052978514, 0.01912166404724121, 0.019226528167724608, 0.019077791213989257, 0.019147775650024415, 0.019212383270263672, 0.019465120315551757, 0.01920204734802246, 0.019164255142211914, 0.01913484764099121, 0.01951798439025879, 0.019194976806640625, 0.019213375091552735, 0.019060575485229492, 0.019191808700561523, 0.01923891258239746, 0.01924870491027832, 0.01927212715148926, 0.019197887420654296, 0.01929427146911621, 0.019451904296875, 0.019400703430175782, 0.019234560012817384, 0.01920742416381836, 0.019194879531860352, 0.019224576950073242, 0.019258752822875976, 0.019260032653808594, 0.019247007369995118, 0.01925948715209961, 0.01920796775817871, 0.019334688186645507, 0.01926144027709961, 0.019251903533935546, 0.019167327880859376, 0.019211551666259766, 0.019399391174316407, 0.019759008407592774, 0.01940787124633789, 0.019243808746337892, 0.01916486358642578, 0.019185344696044923, 0.019573600769042968, 0.019156991958618166, 0.01922662353515625, 0.01920204734802246, 0.019277023315429687, 0.019385120391845704, 0.02008064079284668, 0.01925065612792969, 0.019330656051635742, 0.019352224349975584, 0.019329536437988282, 0.019299552917480468, 0.01925155258178711, 0.019240192413330078, 0.019551744461059572, 0.019479360580444336, 0.01919990348815918, 0.019531871795654295, 0.019371936798095703, 0.019929183959960937, 0.019296255111694336, 0.019318784713745117, 0.019320831298828126, 0.01934262466430664, 0.01942710494995117, 0.019311552047729493, 0.01943731117248535, 0.019506591796875, 0.019460960388183592, 0.019404800415039062, 0.019191808700561523, 0.0191441593170166, 0.019188255310058595, 0.0196011848449707, 0.01970812797546387, 0.019826688766479493, 0.019688608169555664, 0.019453887939453126, 0.019518367767333983, 0.019314687728881837, 0.019416767120361327, 0.01938163185119629, 0.01936684799194336, 0.020107616424560548, 0.01946998405456543, 0.01966080093383789, 0.0192491512298584, 0.019220224380493166, 0.019259647369384767, 0.019277824401855468, 0.019663999557495117, 0.01927891159057617, 0.019241823196411132, 0.019403968811035156, 0.0192325439453125, 0.019158559799194334, 0.019164703369140626, 0.019177471160888672, 0.01919276809692383, 0.019180864334106446, 0.019253952026367187, 0.019406688690185546, 0.019194015502929686, 0.019238304138183594, 0.019276384353637696, 0.01958502388000488, 0.01934329605102539, 0.019250560760498046, 0.019255136489868162, 0.019677728652954102, 0.019296575546264648, 0.019269632339477538, 0.020627231597900392, 0.020057695388793945, 0.019626623153686525, 0.01962188720703125, 0.01959446334838867]",tokens/s,51.05356456406804,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 510, in __init__ self.mlp = MistralMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 150, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 170.12 MiB is free. Process 526911 has 14.57 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 508, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 199, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 367556 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 510, in __init__ self.mlp = MistralMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 150, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 170.12 MiB is free. Process 367198 has 14.57 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 508, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 278, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 199, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 368343 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 508, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 199, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 366831 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 510, in __init__ self.mlp = MistralMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 150, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 224.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 170.12 MiB is free. Process 366070 has 14.57 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.46 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 508, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 278, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 199, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 368709 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 508, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 199, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 367984 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 711, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 508, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 199, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 366448 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 891, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 823, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 374, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 117, in __init__ self.q_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 162.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 120.12 MiB is free. Process 126091 has 14.62 GiB memory in use. Of the allocated memory 14.51 GiB is allocated by PyTorch, and 2.29 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1121, in __init__ self.embed_out = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 592.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 300.12 MiB is free. Process 192895 has 14.45 GiB memory in use. Of the allocated memory 14.33 GiB is allocated by PyTorch, and 1.52 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,739.06176,6315.507712,0.0,5920.260096,5695.433728,s,1,7.27074365234375,7.27074365234375,0.0,7.27074365234375,7.27074365234375,7.27074365234375,7.27074365234375,[7.27074365234375],,kWh,8.849139250005086e-06,9.601664135668122e-07,3.5350028280006773e-06,1.3344308491572575e-05,,MB,1099.22304,6330.187776,0.0,5922.357248,5577.220096,s,10,0.9366836853027344,0.09366836853027345,0.0030483283167822284,0.09478851318359374,0.09545802764892578,0.09604574813842773,0.09651592453002929,"[0.08512000274658203, 0.0924968032836914, 0.09479142761230469, 0.09525901031494141, 0.09663346862792968, 0.09478559875488281, 0.09532742309570312, 0.09490019226074219, 0.09411737823486328, 0.09325238037109375]",tokens/s,2733.0464276983885,kWh,2.7320048962226894e-06,3.012278133645236e-07,1.8089526413458205e-06,4.8421853509330335e-06,tokens/kWh,52868690.7762983,MB,1132.351488,6330.187776,0.0,5922.357248,5663.963136,s,10,20.591097900390626,2.0591097900390625,0.007451632914587384,2.0561278076171874,2.071295068359375,2.0724998291015626,2.0734636376953124,"[2.0528154296875, 2.0534404296875, 2.0631806640625, 2.055403564453125, 2.05194140625, 2.052717529296875, 2.060014892578125, 2.07370458984375, 2.07102734375, 2.05685205078125]",tokens/s,30.595745940678984,kWh,6.005933179169355e-05,6.624405903750141e-06,3.9696497229654024e-05,0.00010638023492509772,tokens/kWh,592215.2742411058,,s,630,20.58804969024657,0.032679443952772357,0.0004708449742492666,0.032596769332885744,0.033009909057617186,0.033275018310546876,0.03555262310028076,"[0.03432243347167969, 0.03297459030151367, 0.032454910278320315, 0.03223551940917969, 0.032292385101318356, 0.03224812698364258, 0.03224387359619141, 0.03234201431274414, 0.032307201385498044, 0.032263648986816405, 0.0322911376953125, 0.03226144027709961, 0.03232364654541016, 0.032174945831298825, 0.03224576187133789, 0.03224316787719726, 0.032309791564941404, 0.03238284683227539, 0.03230847930908203, 0.03240419387817383, 0.03236054229736328, 0.03234796905517578, 0.03244800186157227, 0.03227507019042969, 0.03243171310424805, 0.03240512084960938, 0.03257436752319336, 0.0323583984375, 0.03238281631469726, 0.03249987030029297, 0.03256320190429687, 0.033054271697998044, 0.03274911880493164, 0.03263577651977539, 0.03254886245727539, 0.03263897705078125, 0.0328007698059082, 0.03266892623901367, 0.03268703842163086, 0.03300947189331055, 0.03318937683105469, 0.032669952392578125, 0.03259827041625977, 0.03242598342895508, 0.03255091094970703, 0.032415744781494144, 0.032557056427001956, 0.03248252868652344, 0.03248617553710938, 0.03242393493652344, 0.03273318481445313, 0.03280486297607422, 0.032851966857910156, 0.032745471954345705, 0.0327823371887207, 0.03270041656494141, 0.03265945434570312, 0.032729087829589845, 0.03288883209228516, 0.032849918365478514, 0.03283148956298828, 0.03287449645996094, 0.03319388961791992, 0.03557328033447266, 0.03347711944580078, 0.032763870239257815, 0.03256115341186523, 0.03227606582641602, 0.03232400131225586, 0.03228684616088867, 0.032153472900390626, 0.03226009750366211, 0.03231868743896484, 0.03229977416992187, 0.03213520050048828, 0.0321712646484375, 0.03218035125732422, 0.03226483154296875, 0.03253657531738281, 0.03257539367675781, 0.03239945602416992, 0.03248537445068359, 0.032337024688720704, 0.0324554557800293, 0.03232688140869141, 0.03245555114746094, 0.03251980972290039, 0.03249728012084961, 0.03241241455078125, 0.032437950134277346, 0.032640960693359374, 0.03255043029785156, 0.032537345886230466, 0.03254595184326172, 0.032607135772705076, 0.032732383728027344, 0.0325599365234375, 0.03255302429199219, 0.03277536010742187, 0.03246697616577148, 0.032507648468017576, 0.0325022087097168, 0.03244086456298828, 0.03241923141479492, 0.03241427230834961, 0.032585536956787106, 0.03244815826416016, 0.03255558395385742, 0.03244230270385742, 0.03282134246826172, 0.03263654327392578, 0.03261270523071289, 0.03256729507446289, 0.03261561584472656, 0.03268281555175781, 0.03265327835083008, 0.03275503921508789, 0.03256390380859375, 0.03253644943237305, 0.03253216171264649, 0.03258188629150391, 0.032825023651123046, 0.0328279037475586, 0.03304150390625, 0.03306588745117187, 0.03305472183227539, 0.03558595275878906, 0.03328992080688477, 0.03272745513916016, 0.03239731216430664, 0.032384864807128905, 0.03238723373413086, 0.032284671783447266, 0.032148799896240234, 0.03224031829833984, 0.03232153701782227, 0.03247420883178711, 0.032462753295898435, 0.032508926391601564, 0.03259187316894531, 0.032546142578125, 0.03241846466064453, 0.03237273788452148, 0.032376094818115236, 0.03255574417114258, 0.03249356842041016, 0.03260211181640625, 0.032449920654296874, 0.032487808227539064, 0.03277350234985352, 0.03250627136230469, 0.032541152954101565, 0.03249116897583008, 0.03248777770996094, 0.03259187316894531, 0.0328803825378418, 0.03286412811279297, 0.032883071899414064, 0.032860160827636715, 0.032794624328613284, 0.03405619049072266, 0.03339059066772461, 0.032616127014160154, 0.032549182891845704, 0.03267379379272461, 0.03277619171142578, 0.032718017578125, 0.032645118713378905, 0.03276473617553711, 0.03261439895629883, 0.03263071823120117, 0.032706623077392576, 0.0328243522644043, 0.032611297607421874, 0.03273241424560547, 0.03259059143066406, 0.03261439895629883, 0.03278643035888672, 0.03346803283691406, 0.03329840087890625, 0.033012126922607424, 0.032896736145019534, 0.032792865753173826, 0.03270655822753906, 0.032868350982666016, 0.033073150634765625, 0.03308297729492188, 0.032864063262939454, 0.03275632095336914, 0.0354752311706543, 0.03328752136230469, 0.03272998428344726, 0.032495201110839846, 0.0325184326171875, 0.032315521240234374, 0.0325522575378418, 0.03220908737182617, 0.032323455810546874, 0.03230374526977539, 0.03227238464355469, 0.03222118377685547, 0.032315391540527344, 0.03239731216430664, 0.03265068817138672, 0.032490047454833984, 0.03262464141845703, 0.0324956169128418, 0.03246249771118164, 0.03266185760498047, 0.03256524658203125, 0.03259088134765625, 0.0324884147644043, 0.03246080017089844, 0.03266940689086914, 0.0325819206237793, 0.03255014419555664, 0.03246771240234375, 0.032589824676513675, 0.032589824676513675, 0.032745471954345705, 0.03327084732055664, 0.032604415893554686, 0.032662208557128904, 0.03268198394775391, 0.03256320190429687, 0.03266355133056641, 0.032471038818359374, 0.03244646453857422, 0.032290817260742184, 0.03241187286376953, 0.032383937835693356, 0.03241244888305664, 0.03257759857177735, 0.032601089477539064, 0.03262102508544922, 0.03244287872314453, 0.032489246368408206, 0.03265766525268555, 0.032595966339111326, 0.03279667282104492, 0.0327344970703125, 0.03267068862915039, 0.03264281463623047, 0.03253225708007813, 0.032620319366455076, 0.03264956665039063, 0.03267139053344727, 0.03266521453857422, 0.032694366455078124, 0.03285385513305664, 0.03281711959838867, 0.0328262710571289, 0.035722145080566405, 0.03348278427124023, 0.03267334365844726, 0.032811454772949215, 0.03270870590209961, 0.03228457641601563, 0.03223302459716797, 0.032284862518310545, 0.03231564712524414, 0.032126976013183595, 0.032405502319335935, 0.03233792114257812, 0.03232745742797852, 0.0329475212097168, 0.032904094696044925, 0.03233996963500976, 0.03255897521972656, 0.032221057891845706, 0.03231769561767578, 0.032368640899658206, 0.03230713653564453, 0.03232361602783203, 0.032274463653564456, 0.03225190353393555, 0.03236025619506836, 0.03247446441650391, 0.03244323348999024, 0.03240755081176758, 0.03241068649291992, 0.03254278564453125, 0.03257228851318359, 0.03262464141845703, 0.0325591049194336, 0.032546817779541014, 0.03254476928710937, 0.03247459030151367, 0.032485919952392577, 0.03246284866333008, 0.03245414352416992, 0.03239350509643555, 0.03236044692993164, 0.03247305679321289, 0.03244857788085938, 0.03234998321533203, 0.03244668960571289, 0.03227865600585938, 0.03259603118896484, 0.032456703186035156, 0.032544670104980467, 0.032503326416015624, 0.0326682243347168, 0.032530433654785154, 0.03283763122558594, 0.032812896728515624, 0.03256083297729492, 0.0325882568359375, 0.03263059234619141, 0.03255519866943359, 0.032589824676513675, 0.03275702285766602, 0.03285475158691406, 0.032755680084228515, 0.032784416198730466, 0.03550204849243164, 0.033290241241455076, 0.03271692657470703, 0.03255699157714844, 0.03241542434692383, 0.03225798416137695, 0.03220012664794922, 0.032194561004638675, 0.03265951919555664, 0.03214416122436523, 0.032214366912841796, 0.032432830810546875, 0.032368640899658206, 0.03225804901123047, 0.03240716934204101, 0.03221952056884766, 0.03280879974365234, 0.032851486206054686, 0.03239619064331055, 0.03235523223876953, 0.03241862487792969, 0.032411102294921876, 0.03236713409423828, 0.03234611129760742, 0.03228672027587891, 0.032350208282470705, 0.03233187103271484, 0.03240950393676758, 0.032513729095458986, 0.0326690559387207, 0.032722080230712894, 0.0327083854675293, 0.03268175888061523, 0.03269859313964844, 0.03265945434570312, 0.03254441452026367, 0.032522590637207034, 0.032481151580810545, 0.032444000244140625, 0.03240131378173828, 0.032411903381347654, 0.03254719924926758, 0.03260950469970703, 0.03251279830932617, 0.032505855560302735, 0.03282665634155273, 0.032602848052978514, 0.0324587516784668, 0.032530433654785154, 0.03243417739868164, 0.03256489562988281, 0.03260860824584961, 0.03256524658203125, 0.03266355133056641, 0.032589824676513675, 0.03258569717407227, 0.03262262344360352, 0.03256115341186523, 0.03279872131347656, 0.03269734573364258, 0.032796897888183595, 0.03281999969482422, 0.03288604736328125, 0.03569382476806641, 0.03369647979736328, 0.032696319580078126, 0.032513473510742186, 0.03242969512939453, 0.032325950622558594, 0.03236108779907226, 0.03225980758666992, 0.03257503890991211, 0.03226454544067383, 0.03242409515380859, 0.032293087005615236, 0.03227852630615234, 0.03238054275512695, 0.03228652954101562, 0.032260673522949215, 0.032400928497314456, 0.032362911224365236, 0.032428096771240235, 0.03233990478515625, 0.03244022369384766, 0.03230326461791992, 0.03249356842041016, 0.032540672302246096, 0.032454303741455075, 0.03233827209472656, 0.032419551849365236, 0.03240784072875977, 0.03265705490112305, 0.03257276916503906, 0.032799232482910154, 0.03291596984863281, 0.032952320098876955, 0.03299123382568359, 0.032794017791748044, 0.03273174285888672, 0.03266300964355469, 0.0325453109741211, 0.03266336059570313, 0.0324568977355957, 0.03249151992797852, 0.03301580810546875, 0.032599903106689455, 0.032503231048583985, 0.03260079956054687, 0.03258755111694336, 0.03273292922973633, 0.03260435104370117, 0.03263449478149414, 0.03273795318603516, 0.032683967590332035, 0.03279264068603516, 0.03292111968994141, 0.0332784309387207, 0.032991008758544924, 0.03290544128417969, 0.03294822311401367, 0.03300966262817383, 0.0330189437866211, 0.03315603256225586, 0.03301375961303711, 0.03303184127807617, 0.03300128173828125, 0.03610348892211914, 0.03358787155151367, 0.033070465087890626, 0.03280140686035156, 0.032742401123046876, 0.032653343200683596, 0.03257648086547851, 0.03250790405273438, 0.03259801483154297, 0.032573089599609376, 0.032565601348876955, 0.03263692855834961, 0.033966079711914066, 0.03266764831542969, 0.032630561828613285, 0.032731361389160156, 0.03261161422729492, 0.03257110214233398, 0.03269734573364258, 0.03252364730834961, 0.032696670532226565, 0.03280310440063477, 0.03273318481445313, 0.03277536010742187, 0.032670528411865234, 0.03248534393310547, 0.03259532928466797, 0.03263555145263672, 0.03274137496948242, 0.033972225189208984, 0.03308889770507813, 0.03305971145629883, 0.03284966278076172, 0.032935455322265626, 0.03305065536499024, 0.03283808135986328, 0.03305446243286133, 0.032727294921875, 0.032718849182128903, 0.03279052734375, 0.03271424102783203, 0.033012161254882814, 0.03276332855224609, 0.03273587036132813, 0.03295356750488281, 0.032887584686279295, 0.03278035354614258, 0.03269740676879883, 0.033089534759521484, 0.032912254333496094, 0.032911361694335936, 0.03285606384277344, 0.03295641708374023, 0.033091583251953126, 0.032925697326660154, 0.03282329559326172, 0.03284377670288086, 0.032871681213378905, 0.03293056106567383, 0.032884735107421875, 0.03312607955932617, 0.033259136199951175, 0.0332848014831543, 0.03605833435058594, 0.033718048095703126, 0.03292873764038086, 0.032722782135009766, 0.03274716949462891, 0.03256086349487305, 0.03277494430541992, 0.03317878341674805, 0.0325384635925293, 0.03249049758911133, 0.032568801879882814, 0.03249615859985352, 0.03261644744873047, 0.03250102233886719, 0.032654048919677735, 0.03252617645263672, 0.0326657600402832, 0.03254476928710937, 0.03262259292602539, 0.032659263610839845, 0.03259616088867188, 0.03265331268310547, 0.03271225738525391, 0.03264556884765625, 0.03266969680786133, 0.032622337341308594, 0.032702720642089844, 0.03276595306396484, 0.03285932922363281, 0.03355433654785156, 0.033325984954833986, 0.03310182571411133, 0.032950271606445314, 0.03278643035888672, 0.03289817428588867, 0.03281798553466797, 0.032736961364746096, 0.032688350677490235, 0.032729248046875, 0.032778240203857424, 0.032833534240722655, 0.032699649810791015, 0.03275635147094726, 0.032745601654052735, 0.033252574920654296, 0.03287324905395508, 0.03330047988891602, 0.032806686401367184, 0.03267606353759766, 0.032591552734375, 0.03268025588989258, 0.03278643035888672, 0.032833534240722655, 0.032982078552246094, 0.03280499267578125, 0.03272582244873047, 0.0328600959777832, 0.03286227035522461, 0.033414337158203126, 0.03298537445068359, 0.03310851287841797, 0.03306496047973633, 0.0329090576171875, 0.036259166717529295, 0.03353593444824219, 0.03302060699462891, 0.03259737777709961, 0.03249379348754883, 0.03247964859008789, 0.03244879913330078, 0.0325013427734375, 0.03274150466918945, 0.03235363388061523, 0.032301055908203126, 0.03225228881835938, 0.03242790222167969, 0.03234857559204102, 0.032468353271484375, 0.03225254440307617, 0.03228876876831055, 0.0323133430480957, 0.03251718521118164, 0.03238547134399414, 0.03253299331665039, 0.032405502319335935, 0.03236454391479492, 0.03273510360717773, 0.03264313507080078, 0.03253359985351562, 0.03265840148925781, 0.032464897155761716, 0.03249356842041016, 0.03262464141845703, 0.032737281799316405, 0.03264716720581055, 0.03266463851928711, 0.032591934204101565, 0.03250166320800781, 0.032414688110351565, 0.03245843124389648, 0.032358497619628904, 0.03253190231323242, 0.03243907165527344, 0.032411361694335936, 0.032319774627685545, 0.03244851303100586, 0.03265945434570312, 0.03250175857543945, 0.032589824676513675, 0.032486976623535155, 0.03242438507080078, 0.03246284866333008, 0.03255817413330078, 0.03261123275756836, 0.03262156677246094, 0.03268505477905274, 0.03261439895629883, 0.03279433441162109, 0.03260649490356445, 0.032894977569580076, 0.03278646469116211, 0.033670398712158205, 0.032911327362060545, 0.03290995025634766, 0.03292787170410156, 0.032839744567871094]",tokens/s,30.600275862868994,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,880.263168,14942.076928,0.0,14539.554816,14487.489536,s,1,7.20202783203125,7.20202783203125,0.0,7.20202783203125,7.20202783203125,7.20202783203125,7.20202783203125,[7.20202783203125],,kWh,9.544210520819737e-06,1.0453438027580422e-06,4.8586149980212046e-06,1.5448169321598983e-05,,MB,1336.979456,14958.854144,0.0,14543.74912,13944.436736,s,10,1.7613059387207028,0.17613059387207033,0.006973249101296131,0.17776191711425782,0.1799143051147461,0.182922354888916,0.18532879470825195,"[0.1569965057373047, 0.17839500427246094, 0.17782380676269532, 0.17554425048828126, 0.17580831909179687, 0.179245849609375, 0.17587699890136718, 0.1777000274658203, 0.17798477172851562, 0.18593040466308594]",tokens/s,1453.4669665960562,kWh,5.098395018318957e-06,5.622643186450879e-07,3.379950021965982e-06,9.040609358930028e-06,tokens/kWh,28316675.329758752,MB,1370.058752,14969.339904,0.0,14552.137728,13944.439296,s,10,44.99649951171875,4.499649951171874,0.004944781299041992,4.4990305175781256,4.505482958984375,4.508174829101563,4.510328325195312,"[4.504884765625, 4.494296875, 4.495734375, 4.4950927734375, 4.51086669921875, 4.5018310546875, 4.49962939453125, 4.49522705078125, 4.498431640625, 4.5005048828125]",tokens/s,14.001089125520194,kWh,0.00013150065673167826,1.4504948982938488e-05,8.742709484583346e-05,0.00023343270056045022,tokens/kWh,269885.0668682788,,s,630,44.99345115661621,0.07141817643907335,0.0003163343163715937,0.07135105895996094,0.07168768310546875,0.0719116813659668,0.07270385749816895,"[0.0726087646484375, 0.0715666275024414, 0.07153839874267579, 0.07149308776855469, 0.07132009887695312, 0.07149362945556641, 0.07315455627441406, 0.07142809295654297, 0.071525634765625, 0.07171558380126954, 0.07152358245849609, 0.07130944061279297, 0.07141645050048828, 0.07141580963134765, 0.07149954986572266, 0.0715, 0.07143590545654296, 0.07156774139404297, 0.07131488037109375, 0.07152505493164063, 0.07155699157714844, 0.0715665283203125, 0.07146790313720704, 0.07146412658691406, 0.07166242980957031, 0.07129078674316407, 0.071259521484375, 0.07136281585693359, 0.07139107513427734, 0.07275548553466797, 0.07139046478271484, 0.07150262451171875, 0.07127267456054688, 0.07133344268798827, 0.0712155532836914, 0.07122930908203125, 0.07129071807861329, 0.07130111694335937, 0.07153897857666015, 0.07146905517578125, 0.07161606597900391, 0.07172345733642578, 0.07150208282470703, 0.07127817535400391, 0.07130537414550782, 0.07161360168457032, 0.07156130981445312, 0.07141248321533203, 0.07153663635253907, 0.07127049255371094, 0.07141705322265625, 0.07123833465576172, 0.07124585723876953, 0.07129084777832032, 0.0711674575805664, 0.07152710723876952, 0.07123673248291015, 0.07131209564208985, 0.0722138214111328, 0.07158649444580079, 0.07140351867675782, 0.07125794982910157, 0.07119596862792969, 0.07232717132568359, 0.07139231872558593, 0.07122998046875, 0.07115203094482422, 0.07112499237060547, 0.07133507537841798, 0.0712401885986328, 0.07112528228759765, 0.07133618927001953, 0.07190128326416016, 0.071304931640625, 0.07119795227050782, 0.07121587371826171, 0.07128047943115234, 0.07136067199707032, 0.07105667114257813, 0.07122402954101563, 0.07116799926757812, 0.071087646484375, 0.07101468658447266, 0.0710535659790039, 0.07115299224853516, 0.07109078216552735, 0.07117964935302734, 0.07117014312744141, 0.07107193756103515, 0.07114173126220703, 0.07113922882080079, 0.071253662109375, 0.07127823638916016, 0.07128102111816406, 0.07160652923583985, 0.0731240005493164, 0.07137260437011719, 0.07126649475097656, 0.07180636596679688, 0.07131401824951172, 0.07121836853027344, 0.07134844970703125, 0.0712968978881836, 0.07127279663085938, 0.07137664031982421, 0.07131609344482422, 0.07133999633789062, 0.07129065704345704, 0.07114911651611328, 0.07117219543457032, 0.07113686370849609, 0.07119513702392578, 0.07112140655517578, 0.07120281219482422, 0.07117529296875, 0.07198595428466797, 0.07137904357910156, 0.07138047790527344, 0.07120317077636719, 0.07130947113037109, 0.07194624328613282, 0.07155097961425781, 0.07138508605957031, 0.07130521392822266, 0.07133615875244141, 0.07122921752929688, 0.07272140502929687, 0.07155216217041016, 0.07125897979736329, 0.071229248046875, 0.07111721801757813, 0.07108585357666015, 0.07135619354248048, 0.0713177261352539, 0.07119257354736327, 0.07119667053222656, 0.07121887969970703, 0.07124150085449218, 0.07125564575195313, 0.07140249633789063, 0.07142307281494141, 0.07137366485595703, 0.07134416198730469, 0.07149398040771485, 0.07116352081298828, 0.07122124481201172, 0.0716118392944336, 0.07133856201171875, 0.07113318634033203, 0.0710778579711914, 0.07106153869628906, 0.07116185760498046, 0.07130425262451172, 0.07142495727539062, 0.0713359375, 0.07133366394042968, 0.07129132843017578, 0.07118022155761719, 0.07126140594482422, 0.07126911926269532, 0.0711863021850586, 0.07134979248046874, 0.07126188659667969, 0.0716069107055664, 0.07168630218505859, 0.07202604675292969, 0.07130528259277344, 0.07137417602539063, 0.07140172576904297, 0.07121552276611329, 0.07122025299072265, 0.07153968048095703, 0.07112448120117187, 0.07123149108886719, 0.07123609924316407, 0.07152982330322266, 0.07145263671875, 0.07140013122558594, 0.07123353576660156, 0.07130265808105468, 0.07132595062255859, 0.07140172576904297, 0.07167801666259765, 0.07179788970947265, 0.07135088348388671, 0.07128598022460937, 0.07130451202392578, 0.07137042999267579, 0.0712847671508789, 0.07240294647216797, 0.071505859375, 0.07133766174316407, 0.07138861083984376, 0.07155193328857422, 0.07139907073974609, 0.071157470703125, 0.07126850891113282, 0.0711292495727539, 0.07117241668701171, 0.07125401306152344, 0.07106537628173829, 0.07119280242919922, 0.0714110107421875, 0.07129769897460937, 0.07138502502441406, 0.07116121673583985, 0.07116896057128906, 0.07137407684326172, 0.07156585693359375, 0.07123558044433594, 0.07107965087890625, 0.07111666870117188, 0.07109833526611328, 0.07115821075439453, 0.07121305847167969, 0.07101241302490234, 0.07127769470214844, 0.0715006103515625, 0.07135641479492187, 0.0712416000366211, 0.07120703887939453, 0.07105964660644531, 0.07125138854980469, 0.07124639892578125, 0.07116575622558594, 0.07111254119873046, 0.07112515258789062, 0.07126630401611328, 0.07138690948486329, 0.07132182312011719, 0.0722033920288086, 0.07155136108398437, 0.07127299499511719, 0.07166973114013672, 0.07145187377929688, 0.07123638153076171, 0.07155506896972656, 0.07140975952148437, 0.07123289489746094, 0.07118899536132813, 0.071106689453125, 0.0713004150390625, 0.0714402847290039, 0.07144108581542968, 0.07147110748291016, 0.07131257629394532, 0.07159040069580078, 0.07134550476074218, 0.07140828704833985, 0.07125030517578125, 0.07225084686279297, 0.07144806671142578, 0.0725852813720703, 0.0726320037841797, 0.07140998077392578, 0.07132879638671875, 0.0713912353515625, 0.0725387191772461, 0.07165167999267578, 0.07172217559814453, 0.07175199890136719, 0.07178425598144532, 0.07165523529052735, 0.07167398071289062, 0.07160031890869141, 0.07156797027587891, 0.07167542266845703, 0.07169404602050782, 0.07172102355957032, 0.07143660736083984, 0.07141824340820313, 0.0714567642211914, 0.0718207015991211, 0.07156169891357422, 0.07153421020507812, 0.07161264038085938, 0.07153897857666015, 0.07160028839111328, 0.07156217956542969, 0.071662109375, 0.07157577514648437, 0.07162076568603516, 0.07157949066162109, 0.07151139068603515, 0.07155526733398437, 0.0715802230834961, 0.0715849609375, 0.0719012451171875, 0.07179740905761718, 0.07161257934570313, 0.07155817413330078, 0.07144956970214844, 0.07174143981933594, 0.07180083465576172, 0.07153446197509766, 0.07159136199951172, 0.07145763397216796, 0.07135158538818359, 0.07127490997314453, 0.07117769622802735, 0.07256543731689453, 0.07158751678466797, 0.07123999786376953, 0.0710814437866211, 0.07115225219726562, 0.0725991668701172, 0.0714232940673828, 0.07122633361816406, 0.07140557098388672, 0.07134207916259766, 0.07128195190429687, 0.07121891021728516, 0.07109894561767578, 0.07123363494873047, 0.07124521636962891, 0.0725196762084961, 0.07151324462890625, 0.07130403137207031, 0.07119462585449218, 0.07118029022216797, 0.0712943344116211, 0.07138384246826172, 0.07135123443603515, 0.07148636627197266, 0.07143424224853516, 0.07148544311523437, 0.07138703918457032, 0.07145686340332032, 0.07147110748291016, 0.07154688262939453, 0.07144982147216797, 0.07151900482177734, 0.07141375732421874, 0.07149362945556641, 0.07138918304443359, 0.07143209838867187, 0.07147321319580079, 0.07120655822753906, 0.07141948699951171, 0.07131609344482422, 0.07141187286376953, 0.0713172836303711, 0.07172496032714844, 0.0718780517578125, 0.0715849609375, 0.07158092498779296, 0.07155760192871094, 0.07135574340820312, 0.07139523315429687, 0.07168701171875, 0.0715263671875, 0.07141471862792968, 0.07133081817626953, 0.07120588684082031, 0.07143116760253906, 0.07153282928466796, 0.0716164779663086, 0.07154252624511719, 0.07141785430908203, 0.07135846710205078, 0.0712806396484375, 0.071301025390625, 0.07110050964355469, 0.07160626983642578, 0.07127811431884766, 0.07266089630126953, 0.07130194854736328, 0.07131520080566406, 0.0715387191772461, 0.07150732421875, 0.07120751953125, 0.07137273406982422, 0.0714339828491211, 0.07144025421142577, 0.07182278442382813, 0.07119471740722656, 0.0710555191040039, 0.07111347198486329, 0.0723704605102539, 0.07152297973632812, 0.07129657745361329, 0.07125856018066407, 0.07145823669433594, 0.0719201889038086, 0.07147042846679688, 0.07151801300048828, 0.07169091033935547, 0.0712419204711914, 0.0721506576538086, 0.07162303924560547, 0.07145878601074218, 0.07129090881347656, 0.0713912353515625, 0.07141948699951171, 0.0713076171875, 0.07138515472412109, 0.07138310241699218, 0.07131881713867187, 0.07128336334228516, 0.07117174530029297, 0.07139158630371094, 0.07108758544921875, 0.07097609710693359, 0.07125593566894531, 0.07112207794189453, 0.07120921325683593, 0.07155152130126953, 0.07153257751464843, 0.07138729858398438, 0.07119586944580078, 0.07127740478515625, 0.07138658905029296, 0.07151261138916015, 0.07153062438964844, 0.07172412872314453, 0.07182335662841798, 0.0715271987915039, 0.07142201232910156, 0.07131731414794922, 0.07138041687011719, 0.07139794921875, 0.07160626983642578, 0.07138918304443359, 0.07145881652832031, 0.07124979400634765, 0.07159180450439454, 0.07144009399414063, 0.0711951675415039, 0.07162675476074219, 0.07116515350341797, 0.0710613784790039, 0.07118755340576172, 0.0711654052734375, 0.0711805419921875, 0.07121724700927734, 0.0728616943359375, 0.07135382080078125, 0.07127299499511719, 0.0712273941040039, 0.07107730865478516, 0.071085693359375, 0.07253449249267578, 0.07136061096191407, 0.07129283142089844, 0.07101837158203125, 0.0711251220703125, 0.07112064361572265, 0.07107405090332031, 0.07091964721679687, 0.07103654479980469, 0.07117865753173828, 0.07107023620605468, 0.07106966400146485, 0.07141375732421874, 0.07140086364746094, 0.07128534698486329, 0.07133184051513672, 0.07108367919921875, 0.07119484710693359, 0.0724104995727539, 0.07158246612548828, 0.07170368194580078, 0.07142063903808593, 0.07118045043945312, 0.07131731414794922, 0.07112889862060547, 0.07142546844482422, 0.07125440216064453, 0.07129964447021485, 0.07128268432617188, 0.07119833374023438, 0.07112322998046874, 0.07113919830322266, 0.07158806610107422, 0.07126044464111328, 0.07128240203857422, 0.07117967987060547, 0.07178300476074219, 0.07145683288574219, 0.07127442932128907, 0.07131340789794922, 0.07144038391113282, 0.07137014770507813, 0.07128704071044922, 0.07125241851806641, 0.07118978881835937, 0.07114611053466798, 0.07114332580566406, 0.07163718414306641, 0.07136656188964843, 0.07124323272705078, 0.07124956512451172, 0.07123040008544922, 0.071274658203125, 0.07148659515380859, 0.07161273956298828, 0.07154108428955078, 0.07141171264648437, 0.07138918304443359, 0.07127257537841797, 0.0718787841796875, 0.07147920227050782, 0.07136406707763672, 0.07147984313964843, 0.07325743865966797, 0.0717475814819336, 0.07137840270996093, 0.07144297790527344, 0.07155023956298828, 0.07118463897705078, 0.0712232666015625, 0.07125363159179687, 0.07115795135498047, 0.07117225646972657, 0.07117407989501953, 0.0712733154296875, 0.0712925796508789, 0.07143228912353515, 0.07210393524169922, 0.07155206298828125, 0.07148242950439453, 0.07129280090332031, 0.07105535888671875, 0.07260332489013673, 0.07148332977294922, 0.07121139526367187, 0.07121478271484374, 0.07118265533447266, 0.07124582672119141, 0.07127040100097656, 0.07142301177978516, 0.07147609710693359, 0.07174726104736329, 0.071512451171875, 0.07135823822021484, 0.07147267150878907, 0.07132028961181641, 0.07130681610107421, 0.07122969818115234, 0.07132383728027344, 0.07121721649169922, 0.07135842895507813, 0.07119193267822266, 0.07113993835449219, 0.07132281494140626, 0.07161734771728516, 0.07128678131103515, 0.0713543701171875, 0.07128678131103515, 0.071325439453125, 0.07129138946533203, 0.07118150329589844, 0.07134265899658203, 0.07120486450195312, 0.07160124969482422, 0.07126512145996093, 0.07139949035644531, 0.07122089385986329, 0.07128713226318359, 0.07114125061035156, 0.07130738830566406, 0.07175167846679688, 0.07129497528076172, 0.07141375732421874, 0.07138623809814452, 0.07127945709228516, 0.0712642593383789, 0.07261196899414063, 0.07343116760253907, 0.07144992065429688, 0.07119478607177734, 0.07131375885009765, 0.07106371307373047, 0.0714032974243164, 0.07119667053222656, 0.07125628662109375, 0.0712742691040039, 0.07116422271728516, 0.07136224365234375, 0.07136447906494141, 0.07158386993408203, 0.07146086120605469, 0.07168732452392577, 0.07157231903076172, 0.07134003448486329, 0.07123967742919922, 0.07147481536865234, 0.07136431884765625, 0.07130300903320312, 0.07124870300292968, 0.07109398651123047, 0.07129526519775391, 0.07125606536865234, 0.07145590209960938, 0.0713345947265625, 0.07119667053222656, 0.07140278625488282, 0.07160924530029297, 0.07154895782470704, 0.07135225677490234, 0.07139289855957032, 0.07172329711914062, 0.07144662475585938, 0.0717597427368164, 0.07130918121337891, 0.07129737854003906, 0.07123958587646484, 0.07179468536376953, 0.07161804962158203, 0.0713729248046875, 0.07136726379394531, 0.07134102630615234, 0.0716951675415039, 0.07195875549316406, 0.0714310073852539, 0.07138191986083985, 0.07121849822998047, 0.07127670288085937, 0.07125885009765626, 0.0713419189453125, 0.07128054046630859, 0.07117027282714844, 0.07125788879394532, 0.07125199890136719, 0.07137619018554688, 0.07132032012939453, 0.0713710708618164, 0.07122825622558594, 0.07141410827636718, 0.0714263687133789]",tokens/s,14.002037714489914,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.462016,13880.918016,0.0,13478.395904,13476.849152,s,1,7.5127373046875,7.5127373046875,0.0,7.5127373046875,7.5127373046875,7.5127373046875,7.5127373046875,[7.5127373046875],,kWh,8.448187008358824e-06,9.244879588549403e-07,4.340281250000966e-06,1.371295621721473e-05,,MB,1312.055296,14117.896192,0.0,13702.791168,13671.637504,s,10,2.142603302001953,0.2142603302001953,0.0032002173542180906,0.21508135986328125,0.21687847747802735,0.21736887283325196,0.21776118911743164,"[0.20613471984863282, 0.21500837707519532, 0.21630178833007813, 0.21676950073242188, 0.21639010620117188, 0.21418392944335937, 0.21267900085449218, 0.21785926818847656, 0.21212226867675782, 0.21515434265136718]",tokens/s,1194.808202530095,kWh,6.200510575607786e-06,6.836348524032833e-07,4.122908390916807e-06,1.1007053818927877e-05,tokens/kWh,23257813.054368734,MB,1368.502272,14119.993344,0.0,13702.791168,13671.640064,s,10,37.22296093749999,3.7222960937500007,0.009062326274916746,3.7242001953125,3.7331612304687503,3.733816455078125,3.734340634765625,"[3.705755615234375, 3.71341015625, 3.7137265625, 3.71710986328125, 3.7257529296875, 3.722679443359375, 3.733015625, 3.7344716796875, 3.731318115234375, 3.725720947265625]",tokens/s,16.92503723864995,kWh,0.0001092476945518876,1.2050020720617328e-05,7.260618077008299e-05,0.00019390389604258786,tokens/kWh,324903.2189954712,,s,630,37.21966119003297,0.0590788272857666,0.0005726381996114669,0.059006175994873046,0.05951906929016113,0.05966496410369873,0.06263012916564942,"[0.06160947036743164, 0.05885203170776367, 0.05832908630371094, 0.05827289581298828, 0.05827824020385742, 0.05835830307006836, 0.05838848114013672, 0.05847449493408203, 0.058243072509765625, 0.05830553436279297, 0.05839974212646484, 0.058912769317626956, 0.0587407341003418, 0.058638206481933595, 0.05865484619140625, 0.058845184326171876, 0.059109375, 0.059109375, 0.05896761703491211, 0.058910369873046875, 0.0586923828125, 0.058449920654296876, 0.0584192008972168, 0.05849702453613281, 0.05853308868408203, 0.05853817749023438, 0.058571361541748045, 0.05873395156860352, 0.05873027038574219, 0.05873955154418945, 0.05864812850952148, 0.058698177337646484, 0.058877952575683595, 0.05896806335449219, 0.058982398986816405, 0.059010112762451175, 0.05892192077636719, 0.058926239013671874, 0.058778465270996096, 0.05866291046142578, 0.05873049545288086, 0.05882265472412109, 0.05876502227783203, 0.05863862228393555, 0.05876531219482422, 0.05883699035644531, 0.058787841796875, 0.05885030364990235, 0.05900377655029297, 0.05912345504760742, 0.05923263931274414, 0.05914828872680664, 0.05909065628051758, 0.05913961410522461, 0.059415294647216794, 0.05895568084716797, 0.05872572708129883, 0.059232959747314455, 0.05877542495727539, 0.05880031967163086, 0.05891439819335938, 0.05898076629638672, 0.0589760627746582, 0.06244979095458984, 0.059692577362060545, 0.05884719848632813, 0.05847702407836914, 0.05840281677246094, 0.0587503662109375, 0.05862051010131836, 0.05847177505493164, 0.05853046417236328, 0.058568702697753904, 0.0586217269897461, 0.05862403106689453, 0.058617408752441404, 0.05851609420776367, 0.05861171340942383, 0.05886771011352539, 0.059201377868652344, 0.05917228698730469, 0.05907939147949219, 0.05895727920532227, 0.05885551834106445, 0.05867555236816406, 0.05864572906494141, 0.058862014770507814, 0.05869750213623047, 0.05865852737426758, 0.058628128051757815, 0.05859145736694336, 0.05865132904052734, 0.05867270278930664, 0.05867305755615235, 0.0587883186340332, 0.05920272064208985, 0.059435520172119144, 0.05935968017578125, 0.059174209594726565, 0.05912396621704102, 0.05932806396484375, 0.05886243057250977, 0.05866700744628906, 0.05875711822509765, 0.05869158554077149, 0.05871155166625976, 0.05898060989379883, 0.059015422821044924, 0.05911097717285156, 0.05891116714477539, 0.05889023971557617, 0.05905817413330078, 0.05938585662841797, 0.05918076705932617, 0.05897449493408203, 0.05893529510498047, 0.058993663787841794, 0.05885030364990235, 0.05889228820800781, 0.05900207901000976, 0.05901391983032227, 0.059000831604003906, 0.05901689529418945, 0.05897248077392578, 0.05902511978149414, 0.05911171340942383, 0.0625830078125, 0.05954140853881836, 0.058599040985107424, 0.05837257766723633, 0.0583636474609375, 0.058441215515136716, 0.05844863891601562, 0.058449920654296876, 0.05851955032348633, 0.05852931213378906, 0.05842172622680664, 0.05846835327148438, 0.058396671295166014, 0.0585145263671875, 0.05855324935913086, 0.05895951843261719, 0.059343231201171874, 0.0594595832824707, 0.059281375885009764, 0.059057857513427736, 0.05886598587036133, 0.05859507369995117, 0.05843584060668945, 0.05849292755126953, 0.05863423919677734, 0.05856249618530274, 0.05860755157470703, 0.05868966293334961, 0.0586130256652832, 0.058697662353515624, 0.058818687438964845, 0.05894588851928711, 0.05901753616333008, 0.05921996688842773, 0.05920534515380859, 0.05928521728515625, 0.05924710464477539, 0.059033374786376956, 0.05891443252563477, 0.05874867248535156, 0.05870236968994141, 0.05881894302368164, 0.058810367584228515, 0.05882060623168945, 0.05894937515258789, 0.05876556777954101, 0.058666240692138674, 0.059039775848388674, 0.059140830993652346, 0.059338752746582034, 0.059448448181152344, 0.059355358123779296, 0.059685569763183594, 0.05951679992675781, 0.059188896179199216, 0.059184768676757815, 0.058983230590820314, 0.05902950286865234, 0.0590561294555664, 0.059049537658691406, 0.0588900146484375, 0.05904655838012695, 0.059015167236328124, 0.06309932708740235, 0.06004988861083985, 0.05903564834594727, 0.05856662368774414, 0.05845734405517578, 0.05851990509033203, 0.05844220733642578, 0.058456062316894535, 0.05857865524291992, 0.058517791748046874, 0.05851119995117188, 0.0584436149597168, 0.058472320556640624, 0.05854048156738281, 0.05857791900634766, 0.05928860855102539, 0.0594923210144043, 0.05971753692626953, 0.059496543884277345, 0.05901286315917969, 0.05872844696044922, 0.05870150375366211, 0.058608192443847656, 0.0585747184753418, 0.05869785690307617, 0.05868492889404297, 0.05872076797485352, 0.05883270263671875, 0.05875321578979492, 0.05871590423583985, 0.05901337432861328, 0.05892675018310547, 0.059084190368652346, 0.05929260635375976, 0.059308032989501956, 0.05929983901977539, 0.059243648529052735, 0.059004993438720704, 0.0588787841796875, 0.05881651306152344, 0.05895126342773437, 0.058929569244384764, 0.05895372772216797, 0.05904540634155273, 0.058882526397705075, 0.05870320129394531, 0.05872092819213867, 0.05883679962158203, 0.05910547256469727, 0.059254783630371094, 0.05937136077880859, 0.05949577713012695, 0.05945625686645508, 0.05932790374755859, 0.05911004638671875, 0.05896768188476562, 0.05889814376831055, 0.058900127410888674, 0.05879043197631836, 0.05886812973022461, 0.059090625762939455, 0.05900735855102539, 0.05896806335449219, 0.06305996704101563, 0.060028926849365234, 0.05902092742919922, 0.05882918548583985, 0.05866684722900391, 0.0587672004699707, 0.05877382278442383, 0.05864198303222656, 0.05855680084228516, 0.058484798431396486, 0.05860287857055664, 0.0584505615234375, 0.05858041763305664, 0.05856313705444336, 0.05857484817504883, 0.059025409698486325, 0.05957427215576172, 0.059635711669921876, 0.05935923385620117, 0.05896787261962891, 0.058777729034423826, 0.05867116928100586, 0.05856972885131836, 0.058718238830566406, 0.058708961486816404, 0.05865574264526367, 0.058605918884277346, 0.05864089584350586, 0.05882281494140625, 0.05902336120605469, 0.05887516784667969, 0.058914752960205076, 0.05916511917114258, 0.059604961395263674, 0.05953779220581055, 0.05966979217529297, 0.05942959976196289, 0.05926291275024414, 0.059038944244384765, 0.05918396759033203, 0.059098464965820316, 0.05913212966918945, 0.05897177505493164, 0.05895660781860351, 0.05902950286865234, 0.05917491149902344, 0.059154430389404294, 0.05907660675048828, 0.05922611236572266, 0.05933260726928711, 0.059453441619873044, 0.05966438293457031, 0.05963359832763672, 0.05954729461669922, 0.05941494369506836, 0.059291648864746097, 0.059350879669189456, 0.05941823959350586, 0.0593554573059082, 0.059354526519775394, 0.05928019332885742, 0.059130977630615235, 0.05927167892456055, 0.06264937591552734, 0.05975126266479492, 0.05892051315307617, 0.05882870483398438, 0.05887209701538086, 0.05896012878417969, 0.058973888397216796, 0.05903814315795899, 0.05895782470703125, 0.05889023971557617, 0.05886361694335938, 0.05873455810546875, 0.05869097518920898, 0.05897052764892578, 0.058894561767578124, 0.0590274543762207, 0.05963164901733398, 0.05950870513916016, 0.05924227142333984, 0.059111137390136716, 0.05891884613037109, 0.05897654342651367, 0.05854345703125, 0.05858195114135742, 0.058607009887695315, 0.05872700881958008, 0.058722270965576175, 0.058713729858398435, 0.05874918365478515, 0.058775489807128906, 0.05886751937866211, 0.05892057418823242, 0.059200286865234375, 0.0591646728515625, 0.05901839828491211, 0.058913440704345704, 0.05893734359741211, 0.05892319869995117, 0.05884928131103516, 0.058799808502197265, 0.05896633529663086, 0.05909052658081055, 0.05892115020751953, 0.05891644668579102, 0.05886800003051758, 0.05893564987182617, 0.05872751998901367, 0.058820575714111326, 0.05907551956176758, 0.059203136444091795, 0.05922860717773438, 0.05927936172485351, 0.05933443069458008, 0.05935331344604492, 0.05941862487792969, 0.059299713134765626, 0.05935030364990234, 0.059380577087402346, 0.05938345718383789, 0.05944332885742187, 0.059267295837402346, 0.05933260726928711, 0.059248126983642575, 0.0627578239440918, 0.05985603332519531, 0.058906494140625, 0.058665313720703126, 0.05866681671142578, 0.058778430938720705, 0.058793983459472655, 0.05873049545288086, 0.05874038314819336, 0.058829151153564456, 0.05878169631958008, 0.058963966369628903, 0.05893075180053711, 0.0590074234008789, 0.05904793548583984, 0.05935059356689453, 0.05976655960083008, 0.05981865692138672, 0.059627521514892576, 0.059412479400634766, 0.05911859130859375, 0.058840065002441405, 0.05879808044433594, 0.0587955207824707, 0.05898291015625, 0.058959873199462894, 0.05895782470703125, 0.05897216033935547, 0.059049983978271485, 0.05909503936767578, 0.05913324737548828, 0.05933644866943359, 0.05945033645629883, 0.059700286865234375, 0.059587486267089845, 0.05944319915771484, 0.05917900848388672, 0.05912496185302735, 0.059232959747314455, 0.05924774551391602, 0.05906070327758789, 0.059063873291015624, 0.0591200942993164, 0.059138526916503904, 0.05904383850097656, 0.05915177536010742, 0.059386463165283204, 0.05902336120605469, 0.059342079162597657, 0.05984131240844726, 0.059574241638183596, 0.0595722541809082, 0.0594813117980957, 0.05933692932128906, 0.059466270446777346, 0.059344894409179685, 0.05937152099609375, 0.059308032989501956, 0.05928345489501953, 0.05926816177368164, 0.059362239837646484, 0.05933574295043945, 0.05935539245605469, 0.06297183990478515, 0.059966880798339846, 0.05914278411865234, 0.05877254486083985, 0.05857510375976562, 0.058718463897705075, 0.05878572845458984, 0.058699745178222656, 0.05891052627563476, 0.05889228820800781, 0.05897289657592773, 0.058949630737304685, 0.05901302337646484, 0.05910095977783203, 0.05916678237915039, 0.059353248596191406, 0.05952105712890625, 0.05951884841918945, 0.05936352157592773, 0.05919251251220703, 0.058966846466064454, 0.05892505645751953, 0.05890662384033203, 0.05889148712158203, 0.05894582366943359, 0.05905846405029297, 0.059214046478271484, 0.059082401275634765, 0.05893769454956055, 0.05903564834594727, 0.05903974533081055, 0.059211360931396485, 0.059541919708251956, 0.05941846466064453, 0.059420833587646486, 0.05947964859008789, 0.059302303314208986, 0.059356414794921875, 0.05920025634765625, 0.05911347198486328, 0.0590431022644043, 0.059179744720458984, 0.059230209350585934, 0.05932207870483398, 0.05931977462768555, 0.059147071838378903, 0.059240447998046876, 0.059254783630371094, 0.0595777587890625, 0.059550304412841794, 0.059578369140625, 0.059516128540039064, 0.05962627029418945, 0.05953305435180664, 0.05945779037475586, 0.05939548873901367, 0.05931068801879883, 0.059254783630371094, 0.059215873718261716, 0.059426815032958984, 0.05948825454711914, 0.059543422698974606, 0.059248767852783206, 0.06286083221435547, 0.05990447998046875, 0.05908067321777344, 0.05896188735961914, 0.05882796859741211, 0.05893820953369141, 0.05884511947631836, 0.058906688690185546, 0.05872758483886719, 0.05887062454223633, 0.058947582244873044, 0.059118942260742186, 0.05894124984741211, 0.058864479064941404, 0.05913600158691406, 0.05948806381225586, 0.05981203079223633, 0.05995040130615235, 0.05956639862060547, 0.059348384857177736, 0.059112415313720704, 0.059092609405517575, 0.05890022277832031, 0.0589315185546875, 0.05898044967651367, 0.05907440185546875, 0.059048320770263674, 0.05925888061523438, 0.05916262435913086, 0.05921177673339844, 0.05924179077148437, 0.05939459228515625, 0.059563838958740234, 0.059765087127685544, 0.05966543960571289, 0.059377918243408205, 0.05905433654785156, 0.05902121734619141, 0.05894406509399414, 0.058799198150634766, 0.05874764633178711, 0.05887606430053711, 0.05887376022338867, 0.05901855850219727, 0.058872608184814455, 0.05884636688232422, 0.058951553344726564, 0.05895471954345703, 0.05915990447998047, 0.05924854278564453, 0.059381633758544924, 0.05952934265136719, 0.059412574768066405, 0.05931280136108399, 0.059140094757080076, 0.058982398986816405, 0.059205631256103515, 0.05953059387207031, 0.0590588493347168, 0.05942639923095703, 0.059205665588378906, 0.059297534942626955, 0.05923487854003906, 0.06285004806518554, 0.05984374237060547, 0.05886444854736328, 0.058638080596923825, 0.05863759994506836, 0.05860451126098633, 0.058552318572998044, 0.05860678482055664, 0.05858137512207031, 0.058493377685546875, 0.05849497604370117, 0.05859436798095703, 0.05856118392944336, 0.05868572616577149, 0.0586580810546875, 0.05922889709472656, 0.05970249557495117, 0.05971023941040039, 0.059396095275878906, 0.05935923385620117, 0.058931198120117184, 0.0587407341003418, 0.05850316619873047, 0.05864243316650391, 0.058545951843261716, 0.05872019195556641, 0.05867139053344726, 0.0587960319519043, 0.058755039215087894, 0.05888617706298828, 0.05896384048461914, 0.05891263961791992, 0.05907199859619141, 0.05965289688110351, 0.05965411376953125, 0.05948211288452149, 0.059420673370361325, 0.05933369445800781, 0.05918339157104492, 0.059294368743896486, 0.059262977600097654, 0.05911920166015625, 0.059111839294433595, 0.059049983978271485, 0.05922566223144531, 0.059314399719238284, 0.05926681518554688, 0.05923657608032227, 0.05929580688476563, 0.05956735992431641, 0.05957932662963867, 0.059672161102294924, 0.05960540771484375, 0.05954969787597656, 0.05937152099609375, 0.05918515014648437, 0.059222015380859375, 0.05923612976074219, 0.05915465545654297, 0.05897830581665039, 0.05902115249633789, 0.05912137603759766, 0.05907295989990234]",tokens/s,16.926537745290048,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,782.716928,3286.106112,0.0,2883.584,2829.29664,s,1,7.05448681640625,7.05448681640625,0.0,7.05448681640625,7.05448681640625,7.05448681640625,7.05448681640625,[7.05448681640625],,kWh,4.289453437543974e-06,4.5831804149439347e-07,9.88611902008607e-07,5.736383381046974e-06,,MB,1291.137024,3307.077632,0.0,2891.972608,2759.921664,s,10,0.3990406341552734,0.03990406341552734,0.0005877813039314963,0.03987228775024414,0.04058767890930176,0.04093025493621826,0.041204315757751464,"[0.04127283096313476, 0.039656639099121094, 0.039533279418945313, 0.039135166168212894, 0.039924800872802736, 0.03993977737426758, 0.040511550903320315, 0.03926559829711914, 0.03998121643066406, 0.039819774627685545]",tokens/s,6415.386757339257,kWh,1.3001758981296611e-06,1.4338596069335414e-07,8.602587128978298e-07,2.3038205717208454e-06,tokens/kWh,111119764.76917213,MB,1324.007424,3309.174784,0.0,2891.972608,2771.964928,s,10,10.9634052734375,1.09634052734375,0.0043184483344112365,1.0963485717773436,1.1010726074218748,1.1025677368164062,1.1037638403320313,"[1.092314453125, 1.0980943603515625, 1.089384033203125, 1.093124755859375, 1.0948165283203124, 1.092976806640625, 1.100010498046875, 1.1040628662109375, 1.1007403564453124, 1.097880615234375]",tokens/s,57.46389778423906,kWh,3.2161596090622716e-05,3.546856123433878e-06,2.13063145759004e-05,5.701476678995701e-05,tokens/kWh,1104976.8954083887,,s,630,10.961093379974361,0.01739856092059423,0.0002406430922410485,0.017352527618408203,0.01759302749633789,0.01774413137435913,0.018343591346740726,"[0.01746668815612793, 0.017234624862670897, 0.017305343627929688, 0.017184768676757813, 0.01717683219909668, 0.01761075210571289, 0.017208959579467772, 0.017824127197265626, 0.01726438331604004, 0.017387359619140626, 0.017150623321533203, 0.017202592849731444, 0.01730556869506836, 0.017293695449829102, 0.017328128814697266, 0.017349952697753905, 0.017230527877807617, 0.017399232864379884, 0.01737376022338867, 0.01738960075378418, 0.017225376129150392, 0.01725472068786621, 0.017235551834106445, 0.017248672485351564, 0.017227296829223634, 0.017744159698486327, 0.01743276786804199, 0.017372415542602538, 0.017306623458862306, 0.017342208862304687, 0.017241567611694337, 0.017565568923950194, 0.017404256820678712, 0.01766793632507324, 0.017408031463623047, 0.017328575134277345, 0.017249311447143555, 0.017328672409057617, 0.01726108741760254, 0.017982431411743164, 0.01732086372375488, 0.017317920684814452, 0.017294368743896484, 0.01725254440307617, 0.017322303771972657, 0.017255136489868164, 0.01721446418762207, 0.01726540756225586, 0.017311840057373046, 0.01731376075744629, 0.01714374351501465, 0.017246271133422853, 0.017260543823242186, 0.017194944381713866, 0.017359872817993165, 0.017281856536865235, 0.017363136291503906, 0.017350080490112305, 0.017295936584472656, 0.01728118324279785, 0.017247264862060546, 0.017343456268310548, 0.017319391250610352, 0.018092063903808592, 0.017342367172241212, 0.01719715118408203, 0.017526432037353514, 0.018307647705078124, 0.017420543670654296, 0.017352224349975586, 0.017334720611572266, 0.017432096481323243, 0.01729737663269043, 0.01733087921142578, 0.017367040634155274, 0.01765990447998047, 0.01964134407043457, 0.017374208450317383, 0.0172741756439209, 0.01734521675109863, 0.017272607803344726, 0.017203424453735353, 0.017252351760864256, 0.017254400253295898, 0.017352703094482422, 0.01722163200378418, 0.017222944259643554, 0.017240800857543946, 0.017176576614379883, 0.01734422492980957, 0.01741548728942871, 0.01731193542480469, 0.01734646415710449, 0.017270816802978515, 0.01751046371459961, 0.017259456634521483, 0.017217376708984374, 0.01721548843383789, 0.017242111206054688, 0.017369087219238282, 0.01735862350463867, 0.01733260726928711, 0.017376256942749024, 0.017341535568237306, 0.01726608085632324, 0.017262239456176758, 0.01734111976623535, 0.017324031829833983, 0.01726611137390137, 0.017260799407958983, 0.017310016632080077, 0.017361183166503907, 0.017236000061035157, 0.01748316764831543, 0.017250591278076172, 0.017297407150268555, 0.017267967224121095, 0.01728371238708496, 0.017342592239379884, 0.01852342414855957, 0.017697568893432616, 0.017393823623657226, 0.017477407455444335, 0.017569664001464844, 0.017604543685913087, 0.017636831283569337, 0.01729270362854004, 0.01735558319091797, 0.017321952819824217, 0.017254400253295898, 0.01746112060546875, 0.017309823989868165, 0.017260000228881835, 0.017233951568603516, 0.017198879241943358, 0.017270751953125, 0.017195327758789063, 0.017214080810546876, 0.017184383392333986, 0.01738105583190918, 0.01720783996582031, 0.017190271377563477, 0.01720585632324219, 0.01720319938659668, 0.017254400253295898, 0.01726464080810547, 0.017217248916625977, 0.017366304397583007, 0.017223743438720702, 0.017287872314453126, 0.01758028793334961, 0.017250303268432618, 0.01726028823852539, 0.01723347282409668, 0.017226432800292967, 0.017153343200683593, 0.017234624862670897, 0.01717452812194824, 0.017217536926269532, 0.01731372833251953, 0.017239679336547853, 0.017225439071655274, 0.017658592224121094, 0.01738528060913086, 0.0172589111328125, 0.01721743965148926, 0.01752195167541504, 0.01732054328918457, 0.01740595245361328, 0.017577184677124023, 0.017289663314819338, 0.01716054344177246, 0.017172479629516603, 0.01733945655822754, 0.017238431930541993, 0.01724278450012207, 0.01790348815917969, 0.017251903533935548, 0.01715670394897461, 0.017205087661743165, 0.01716239929199219, 0.01727881622314453, 0.017125375747680666, 0.017234079360961913, 0.017219423294067383, 0.017273887634277344, 0.01729840087890625, 0.017317888259887695, 0.017475391387939455, 0.01717398452758789, 0.01729180717468262, 0.017254400253295898, 0.017209344863891602, 0.017329248428344726, 0.017269664764404297, 0.01737843132019043, 0.01729622459411621, 0.01812441635131836, 0.017243839263916014, 0.017258848190307617, 0.017273216247558593, 0.017456607818603517, 0.017195871353149414, 0.017227327346801758, 0.017329408645629884, 0.017289312362670898, 0.0174354248046875, 0.017274208068847656, 0.017228479385375976, 0.017192384719848634, 0.01730393600463867, 0.017186880111694336, 0.01721356773376465, 0.017131071090698242, 0.017754335403442383, 0.017733823776245116, 0.01714182472229004, 0.017273887634277344, 0.01788313674926758, 0.017384351730346678, 0.01726464080810547, 0.017395456314086913, 0.017330432891845705, 0.017260543823242186, 0.017383424758911133, 0.017326080322265625, 0.017289215087890625, 0.017401216506958007, 0.01731033515930176, 0.017313440322875975, 0.01735625648498535, 0.017181440353393553, 0.01796108818054199, 0.01742451286315918, 0.017311616897583006, 0.017286815643310548, 0.01721820831298828, 0.01723360061645508, 0.017281120300292968, 0.017430496215820313, 0.01733420753479004, 0.017299007415771485, 0.017352575302124022, 0.017250463485717772, 0.017625696182250978, 0.017362464904785158, 0.017361215591430664, 0.017350624084472657, 0.01734774398803711, 0.017277151107788085, 0.017377920150756836, 0.017249536514282227, 0.017379199981689453, 0.01745289611816406, 0.01745680046081543, 0.01737398338317871, 0.017391008377075197, 0.01731439971923828, 0.0174202880859375, 0.017315839767456053, 0.017377279281616212, 0.01762486457824707, 0.01759663963317871, 0.01733964729309082, 0.01734105682373047, 0.017323551177978517, 0.017343391418457033, 0.017286848068237305, 0.01720947265625, 0.017147775650024413, 0.017526592254638672, 0.017375423431396485, 0.017226816177368164, 0.017292415618896485, 0.01733203125, 0.017143808364868163, 0.0171909122467041, 0.0171944637298584, 0.017201791763305663, 0.017366943359375, 0.017193151473999024, 0.017304704666137694, 0.01726348876953125, 0.017159519195556642, 0.017339967727661134, 0.01722256088256836, 0.017278976440429687, 0.018057247161865235, 0.017274688720703125, 0.017215648651123048, 0.017344671249389647, 0.01726448059082031, 0.017252351760864256, 0.017268735885620116, 0.017297407150268555, 0.017310943603515625, 0.017298208236694337, 0.017268735885620116, 0.0172708797454834, 0.017245311737060547, 0.01763199996948242, 0.017352895736694338, 0.017385311126708984, 0.017344736099243165, 0.018140960693359375, 0.017373184204101562, 0.017387231826782226, 0.01730384063720703, 0.017360895156860352, 0.01740902328491211, 0.018592767715454102, 0.01764508819580078, 0.017361215591430664, 0.017285280227661133, 0.017338144302368165, 0.01725971221923828, 0.017390399932861327, 0.01740185546875, 0.0172293758392334, 0.017281471252441408, 0.017286687850952148, 0.017332511901855467, 0.017266912460327147, 0.01726630401611328, 0.01722198486328125, 0.017225088119506834, 0.01749260711669922, 0.017214752197265624, 0.01727884864807129, 0.017230688095092775, 0.017258495330810548, 0.01720319938659668, 0.01719705581665039, 0.017202911376953126, 0.017479488372802734, 0.017224159240722656, 0.017141759872436522, 0.017252351760864256, 0.017219488143920898, 0.01722175979614258, 0.017190752029418947, 0.01721673583984375, 0.01719161605834961, 0.017921247482299806, 0.01759116744995117, 0.01716441535949707, 0.01720035171508789, 0.01718134307861328, 0.017176704406738283, 0.017168384552001953, 0.017172319412231445, 0.01718400001525879, 0.01716111946105957, 0.017192544937133788, 0.017152351379394533, 0.017180736541748048, 0.017306976318359375, 0.017609184265136718, 0.01776233673095703, 0.017499807357788087, 0.01754982376098633, 0.017593631744384764, 0.017470176696777345, 0.01744486427307129, 0.017385055541992187, 0.017406368255615236, 0.017458463668823244, 0.017355487823486327, 0.01763737678527832, 0.01746124839782715, 0.01739571189880371, 0.01739084815979004, 0.017398815155029296, 0.017888671875, 0.01769203186035156, 0.017392576217651366, 0.017342559814453123, 0.017477535247802736, 0.01745804786682129, 0.017470943450927735, 0.017492511749267577, 0.017862464904785155, 0.017349088668823242, 0.017317663192749022, 0.01737107276916504, 0.017295360565185547, 0.01732601547241211, 0.017381439208984373, 0.017386655807495117, 0.017397951126098633, 0.017324703216552734, 0.017293312072753905, 0.017481311798095703, 0.01734288024902344, 0.017744096755981445, 0.018171680450439452, 0.018358272552490236, 0.017469440460205078, 0.017438047409057616, 0.017346975326538085, 0.017360992431640625, 0.01728118324279785, 0.017337791442871092, 0.01735500717163086, 0.017323936462402344, 0.01736739158630371, 0.01792972755432129, 0.017322240829467775, 0.017369407653808594, 0.017459199905395507, 0.01741414451599121, 0.017401567459106447, 0.017451168060302735, 0.0175863037109375, 0.01744607925415039, 0.01751475143432617, 0.017343040466308593, 0.017337343215942384, 0.017376256942749024, 0.017451007843017577, 0.017434623718261717, 0.01759436798095703, 0.017375232696533204, 0.017430496215820313, 0.017463327407836914, 0.017383424758911133, 0.01761894416809082, 0.017304672241210937, 0.017490848541259766, 0.017332223892211913, 0.01736444854736328, 0.017348352432250976, 0.017404287338256837, 0.01732659149169922, 0.017448863983154296, 0.017379072189331053, 0.017541376113891602, 0.017575935363769533, 0.01743417549133301, 0.017714975357055664, 0.01731564712524414, 0.01742563247680664, 0.01740812873840332, 0.017375520706176758, 0.017424768447875976, 0.017346559524536134, 0.017356800079345702, 0.017309696197509765, 0.017276927947998046, 0.017425888061523436, 0.017367488861083986, 0.01732364845275879, 0.017391103744506836, 0.017343456268310548, 0.017424224853515625, 0.0173155517578125, 0.01736953544616699, 0.017319936752319336, 0.017342144012451172, 0.017342784881591796, 0.017266592025756835, 0.01737913513183594, 0.017313535690307618, 0.01733072090148926, 0.017297407150268555, 0.017311744689941407, 0.01729852867126465, 0.017418432235717773, 0.017437408447265625, 0.01742848014831543, 0.017431936264038085, 0.017379968643188477, 0.017382911682128906, 0.01749836730957031, 0.019832256317138672, 0.01847318458557129, 0.019085344314575196, 0.017450912475585938, 0.017568639755249024, 0.01749760055541992, 0.017553855895996093, 0.017362783432006836, 0.017311744689941407, 0.017860767364501953, 0.017544191360473634, 0.01737222480773926, 0.01744883155822754, 0.01730508804321289, 0.017396127700805664, 0.017405920028686524, 0.0173939208984375, 0.017404960632324218, 0.01786476707458496, 0.017435264587402344, 0.017672256469726564, 0.01774720001220703, 0.017910528182983398, 0.017972671508789062, 0.01763590431213379, 0.01736419105529785, 0.017537824630737303, 0.01741619110107422, 0.01746892738342285, 0.017477407455444335, 0.01756399917602539, 0.017467391967773437, 0.01749932861328125, 0.017533695220947266, 0.01754444885253906, 0.017403968811035155, 0.01753900718688965, 0.017447744369506836, 0.017358848571777344, 0.017444000244140626, 0.01740611267089844, 0.017662303924560547, 0.017422687530517577, 0.01748921585083008, 0.017741792678833006, 0.01744339179992676, 0.01741007995605469, 0.01755353546142578, 0.017506303787231444, 0.01741619110107422, 0.017532928466796875, 0.01742233657836914, 0.01740390396118164, 0.017592031478881835, 0.018252288818359375, 0.01742006492614746, 0.01764575958251953, 0.017517791748046876, 0.0173656005859375, 0.01747091293334961, 0.017380191802978517, 0.01743574333190918, 0.017592960357666016, 0.017408000946044923, 0.017413280487060548, 0.017445280075073243, 0.01744326400756836, 0.01735660743713379, 0.017404096603393555, 0.01742848014831543, 0.017399776458740236, 0.017442848205566405, 0.01760256004333496, 0.01742438316345215, 0.017647647857666017, 0.017559520721435545, 0.017512351989746093, 0.017520736694335938, 0.017508575439453125, 0.01743574333190918, 0.017357568740844726, 0.017459135055541992, 0.017367040634155274, 0.017350656509399414, 0.01739366340637207, 0.017366239547729492, 0.017300256729125975, 0.017354591369628906, 0.017326400756835936, 0.01733206367492676, 0.017349855422973633, 0.017400096893310547, 0.0173156795501709, 0.017480384826660155, 0.017530784606933594, 0.01743280029296875, 0.017516704559326173, 0.017469440460205078, 0.01748134422302246, 0.017479263305664062, 0.01739036750793457, 0.017350656509399414, 0.017407039642333984, 0.017396671295166016, 0.01742780876159668, 0.017350719451904296, 0.017441728591918945, 0.017386144638061523, 0.017430944442749022, 0.017377887725830078, 0.017307647705078123, 0.017459199905395507, 0.01744895935058594, 0.017303552627563477, 0.017321983337402345, 0.017426559448242188, 0.017330047607421874, 0.017389503479003907, 0.017313215255737306, 0.018045568466186525, 0.017504255294799806, 0.017398111343383788, 0.01739334487915039, 0.017463264465332032, 0.017335872650146484, 0.017308095932006835, 0.017377279281616212, 0.017336320877075196, 0.017637216567993164, 0.01733033561706543, 0.01763478469848633, 0.017336448669433593, 0.01748214340209961, 0.01748303985595703, 0.017423168182373047, 0.01740185546875, 0.017485727310180665, 0.017379583358764647, 0.017382495880126952, 0.01744758415222168, 0.01750806427001953, 0.017445152282714843, 0.017463327407836914, 0.017356767654418945, 0.01731292724609375, 0.01738409614562988, 0.017352479934692383, 0.017332639694213867, 0.017296960830688476, 0.01737548828125, 0.017434560775756835, 0.01733452796936035, 0.01743280029296875, 0.017345472335815428, 0.017650239944458006, 0.017377599716186524]",tokens/s,57.476017962860695,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,838.209536,9637.39648,0.0,9242.148864,8603.568128,s,1,7.56006689453125,7.56006689453125,0.0,7.56006689453125,7.56006689453125,7.56006689453125,7.56006689453125,[7.56006689453125],,kWh,1.2805606391702895e-05,1.4049791025938898e-06,6.6475053179942956e-06,2.085809081229108e-05,,MB,1194.815488,9889.05472,0.0,9481.224192,8972.090368,s,10,1.128035583496094,0.11280355834960938,0.0011751249572338504,0.11267300796508789,0.11414554901123046,0.11428141555786132,0.11439010879516602,"[0.11201679992675781, 0.11153091430664062, 0.11100089263916016, 0.1131654052734375, 0.11218061065673827, 0.11388262176513672, 0.11441728210449219, 0.11393654632568359, 0.11178915405273437, 0.1141153564453125]",tokens/s,2269.431955387305,kWh,3.5066526572917507e-06,3.867173765259819e-07,2.328391412976124e-06,6.221761446793857e-06,tokens/kWh,41145904.12847147,MB,1223.151616,9893.249024,0.0,9485.418496,8972.092928,s,10,25.124321044921878,2.512432104492188,0.01365794814109764,2.508675903320312,2.5302686279296878,2.5304315551757814,2.530561896972656,"[2.523666748046875, 2.509734130859375, 2.530594482421875, 2.527587890625, 2.530232421875, 2.502421875, 2.49313671875, 2.495555908203125, 2.503773193359375, 2.50761767578125]",tokens/s,25.07530447782331,kWh,7.272253918687309e-05,8.02116478447727e-06,4.820976012862395e-05,0.0001289534640999743,tokens/kWh,488548.333615588,,s,630,25.12142214202883,0.03987527324131557,0.0008178128978666541,0.03974924850463867,0.04038665008544922,0.04076124038696289,0.044132319068908694,"[0.04416851043701172, 0.04000019073486328, 0.03940556716918946, 0.03972051239013672, 0.039725440979003906, 0.039647296905517576, 0.03943219375610352, 0.04027801513671875, 0.039779903411865235, 0.039735744476318356, 0.04018399810791016, 0.039707744598388675, 0.03994112014770508, 0.03994384002685547, 0.039516448974609375, 0.040199966430664064, 0.039792640686035156, 0.03975167846679688, 0.03993600082397461, 0.040013824462890625, 0.040013824462890625, 0.03999667358398438, 0.03985897445678711, 0.04008752059936523, 0.039755775451660154, 0.03988889694213867, 0.0397918701171875, 0.03990399932861328, 0.03994236755371094, 0.03968115234375, 0.039760543823242185, 0.039757694244384766, 0.040193214416503906, 0.039801151275634765, 0.039828033447265626, 0.040419391632080075, 0.04000153732299805, 0.039726112365722654, 0.03989193725585938, 0.03954457473754883, 0.039815425872802734, 0.040129791259765624, 0.03973936080932617, 0.039678752899169924, 0.04025324630737305, 0.03996895980834961, 0.039814334869384765, 0.03992659378051758, 0.039997665405273435, 0.043370079040527344, 0.0397457275390625, 0.03973494338989258, 0.039780033111572265, 0.04000425720214844, 0.039890209197998044, 0.03996131134033203, 0.03978854370117187, 0.04299980926513672, 0.040011489868164066, 0.04002025604248047, 0.040199649810791015, 0.039909919738769534, 0.03989871978759765, 0.04362803268432617, 0.039938526153564455, 0.03957350540161133, 0.039777313232421875, 0.04047766494750977, 0.04005062484741211, 0.03976134490966797, 0.040256126403808594, 0.0399441909790039, 0.03975065612792969, 0.039638015747070314, 0.03959366226196289, 0.0398191032409668, 0.03998972702026367, 0.03973484802246094, 0.03990367889404297, 0.04003180694580078, 0.039500064849853515, 0.03930944061279297, 0.039690273284912106, 0.04067132949829102, 0.039699615478515624, 0.04114246368408203, 0.039737503051757814, 0.04002012634277344, 0.03959436798095703, 0.03970220947265625, 0.0393238410949707, 0.03935388946533203, 0.03992009735107422, 0.039215103149414066, 0.03931340789794922, 0.039476577758789065, 0.039860897064208985, 0.03951366424560547, 0.0393322868347168, 0.03911679840087891, 0.039360321044921875, 0.039183616638183594, 0.03932841491699219, 0.03906588745117188, 0.03933388900756836, 0.039585792541503906, 0.03956326293945313, 0.03936595153808594, 0.040080062866210936, 0.04001331329345703, 0.040178176879882815, 0.04095795059204101, 0.03959177780151367, 0.03958297729492188, 0.0397334098815918, 0.03973606491088867, 0.03968819046020508, 0.040269824981689455, 0.0397127685546875, 0.039538272857666014, 0.039542911529541015, 0.039417537689208984, 0.04095651245117188, 0.04067532730102539, 0.03991551971435547, 0.03972614288330078, 0.044502368927001955, 0.04037311935424805, 0.04230348968505859, 0.03965692901611328, 0.039975456237792965, 0.03926607894897461, 0.03982521438598633, 0.03956089782714844, 0.03954147338867187, 0.03987017440795899, 0.039893280029296874, 0.039809024810791016, 0.03949756622314453, 0.03942211151123047, 0.03929087829589844, 0.039122943878173826, 0.03937068939208985, 0.03959609603881836, 0.03932160186767578, 0.039790592193603515, 0.039686145782470705, 0.040296192169189456, 0.04003036880493164, 0.04019619369506836, 0.04005478286743164, 0.040038047790527345, 0.03989132690429688, 0.04014182281494141, 0.040133598327636716, 0.0404370231628418, 0.04017635345458984, 0.039981056213378906, 0.04025276947021485, 0.04007183837890625, 0.040153087615966795, 0.040099071502685546, 0.040174335479736326, 0.040293697357177735, 0.03999609756469726, 0.040237056732177735, 0.04029644775390625, 0.04022995376586914, 0.04005574417114258, 0.040118270874023435, 0.040002750396728515, 0.040003616333007815, 0.04000611114501953, 0.040255809783935545, 0.04002191925048828, 0.04069136047363281, 0.040188350677490235, 0.04013868713378906, 0.04013388824462891, 0.04007404708862305, 0.04065280151367188, 0.04011212921142578, 0.04064051055908203, 0.04130815887451172, 0.04233027267456055, 0.0404826545715332, 0.040065025329589846, 0.03983769607543945, 0.040325023651123046, 0.04454598236083984, 0.04073878479003906, 0.03982432174682617, 0.03990867233276367, 0.039755550384521485, 0.03968912124633789, 0.03947708892822266, 0.03940572738647461, 0.039686145782470705, 0.04053919982910156, 0.039947200775146484, 0.039876609802246096, 0.0398289909362793, 0.04188415908813477, 0.039866336822509764, 0.040192031860351564, 0.040310081481933595, 0.04068239974975586, 0.04062547302246094, 0.039874561309814455, 0.04004092788696289, 0.040021854400634764, 0.03982966232299805, 0.03983564758300781, 0.03964723205566406, 0.04021247863769531, 0.04009769439697266, 0.03981286239624023, 0.04001827239990234, 0.0397946891784668, 0.039583744049072264, 0.03993190383911133, 0.039774208068847655, 0.040564735412597655, 0.039880702972412106, 0.03983564758300781, 0.040054336547851566, 0.03994854354858399, 0.040683712005615234, 0.03969228744506836, 0.040185855865478515, 0.04021583938598633, 0.04042211151123047, 0.04015465545654297, 0.03976611328125, 0.04021491241455078, 0.03987043380737305, 0.039779552459716795, 0.03981110382080078, 0.03974943923950195, 0.04038339233398437, 0.04019401550292969, 0.039888065338134764, 0.040308734893798825, 0.039697246551513674, 0.03986188888549805, 0.0402334098815918, 0.039749057769775394, 0.040632896423339844, 0.04041638565063477, 0.03982368087768555, 0.04014547348022461, 0.039853279113769534, 0.04419638442993164, 0.040647262573242186, 0.04017289733886719, 0.039680416107177735, 0.03966988754272461, 0.03995647811889649, 0.039876094818115236, 0.03988460922241211, 0.03977462387084961, 0.040083999633789065, 0.04032396697998047, 0.0397334098815918, 0.0398394889831543, 0.04041596984863281, 0.03963452911376953, 0.039674495697021486, 0.03987254333496094, 0.03968172836303711, 0.04043193435668945, 0.0401860466003418, 0.03982460784912109, 0.040020481109619144, 0.040244895935058596, 0.039877056121826175, 0.040994430541992186, 0.03979507064819336, 0.040101886749267575, 0.04036403274536133, 0.040013824462890625, 0.04011196899414062, 0.040183967590332034, 0.03992521667480469, 0.040024608612060544, 0.0398047981262207, 0.04005625534057617, 0.04019587326049805, 0.04310927963256836, 0.03971686553955078, 0.03985190582275391, 0.03957078552246094, 0.039772415161132814, 0.039893535614013674, 0.03971651077270508, 0.04048112106323242, 0.04131148910522461, 0.04000214385986328, 0.039860641479492184, 0.04032460784912109, 0.04018320083618164, 0.04069462585449219, 0.039874561309814455, 0.040310176849365234, 0.03986697769165039, 0.03973500823974609, 0.040054656982421874, 0.03984016036987305, 0.040597503662109374, 0.04007872009277344, 0.03984022521972656, 0.03994844818115234, 0.04029439926147461, 0.03989654541015625, 0.03979727935791016, 0.044315807342529295, 0.04021337509155273, 0.039763904571533205, 0.03956121444702149, 0.039376895904541014, 0.039636863708496096, 0.039701759338378904, 0.039457439422607425, 0.039912990570068356, 0.039701183319091796, 0.0393438720703125, 0.03923993682861328, 0.03911654281616211, 0.03942015838623047, 0.03958784103393555, 0.03967795181274414, 0.039964672088623046, 0.039901153564453125, 0.039487232208251954, 0.039534881591796876, 0.039232990264892575, 0.03936105728149414, 0.03945062255859375, 0.039583744049072264, 0.03944607925415039, 0.039637439727783205, 0.03950175857543945, 0.03947731018066406, 0.03930112075805664, 0.03944156646728516, 0.03965574264526367, 0.03964339065551758, 0.03953692626953125, 0.04002627182006836, 0.04156947326660156, 0.0396151351928711, 0.039196670532226564, 0.03935539245605469, 0.039152641296386716, 0.03936460876464844, 0.03943219375610352, 0.03949977493286133, 0.03947865676879883, 0.03946470260620117, 0.0392487678527832, 0.04017110443115234, 0.043108768463134765, 0.03973324966430664, 0.039403518676757815, 0.03924780654907226, 0.03909145736694336, 0.039322433471679685, 0.03950796890258789, 0.03980287933349609, 0.03946905517578125, 0.03958335876464844, 0.039518592834472656, 0.04061974334716797, 0.03964137649536133, 0.03956121444702149, 0.04031488037109375, 0.03924979019165039, 0.03921702575683594, 0.046403553009033205, 0.04075110244750976, 0.039499839782714846, 0.03943529510498047, 0.039175071716308595, 0.0390184326171875, 0.040470592498779295, 0.03879731369018555, 0.039651329040527344, 0.03889926528930664, 0.038986270904541015, 0.03884431838989258, 0.03925785446166992, 0.03955532836914062, 0.039172096252441405, 0.038997791290283204, 0.039230846405029295, 0.039050048828125, 0.03960335922241211, 0.0401396484375, 0.03922083282470703, 0.03927081680297852, 0.039115806579589844, 0.03900310516357422, 0.03983564758300781, 0.03937279891967774, 0.03908185577392578, 0.03906572723388672, 0.039093505859375, 0.039185150146484375, 0.039066879272460935, 0.039472991943359376, 0.039142433166503905, 0.03929651260375976, 0.04003478240966797, 0.039954334259033206, 0.039480705261230466, 0.039244415283203125, 0.039218433380126955, 0.03912726211547852, 0.03949350357055664, 0.03929155349731445, 0.0395338249206543, 0.04009651184082031, 0.040769535064697264, 0.041603073120117184, 0.04005411148071289, 0.039471073150634764, 0.03953129577636719, 0.03940752029418945, 0.03930646514892578, 0.039581985473632814, 0.0392545280456543, 0.039403263092041015, 0.03940991973876953, 0.039272449493408204, 0.03945808029174805, 0.03982566452026367, 0.03963913726806641, 0.039672191619873044, 0.03934620666503906, 0.0390491828918457, 0.03919664001464844, 0.04495942306518555, 0.040583839416503904, 0.03956115341186523, 0.03994585418701172, 0.039266529083251955, 0.03923510360717773, 0.03886137771606445, 0.03885391998291016, 0.038742881774902344, 0.03907583999633789, 0.039065601348876954, 0.03985203170776367, 0.0394886703491211, 0.03958208084106445, 0.0388325424194336, 0.03878713607788086, 0.03895065689086914, 0.03881804656982422, 0.038825984954833984, 0.03919257736206055, 0.0394886703491211, 0.039656288146972654, 0.03964271926879883, 0.03931584167480469, 0.03894889450073242, 0.039060993194580076, 0.03941017532348633, 0.03950140762329102, 0.0398803825378418, 0.03954537582397461, 0.0392869758605957, 0.03935027313232422, 0.03939689636230469, 0.039379425048828125, 0.03932956695556641, 0.039485088348388674, 0.03970073699951172, 0.03971097564697266, 0.03979388809204101, 0.03952316665649414, 0.039479297637939455, 0.039569408416748046, 0.039577598571777346, 0.039652862548828126, 0.03975628662109375, 0.03971072006225586, 0.03957468795776367, 0.03950646209716797, 0.0420560302734375, 0.039530464172363285, 0.03953039932250976, 0.040149089813232425, 0.039907329559326174, 0.040314208984375, 0.03966195297241211, 0.03973926544189453, 0.039666046142578126, 0.0394486083984375, 0.0398616943359375, 0.039561790466308595, 0.03949382400512695, 0.03981497573852539, 0.0398131217956543, 0.04395113754272461, 0.04020611190795898, 0.03978374481201172, 0.039664222717285154, 0.03952588653564453, 0.039413761138916016, 0.03952620697021485, 0.0396973762512207, 0.039757823944091795, 0.03965340805053711, 0.03953865432739258, 0.03948134231567383, 0.039559295654296875, 0.03969216156005859, 0.03980489730834961, 0.039830944061279294, 0.03978099060058594, 0.03984998321533203, 0.039642688751220706, 0.03972121429443359, 0.039819454193115236, 0.03972403335571289, 0.03970560073852539, 0.03982953643798828, 0.039514080047607425, 0.04192051315307617, 0.04002220916748047, 0.039862079620361326, 0.03935846328735351, 0.03936665725708008, 0.03949772644042969, 0.0397844467163086, 0.04230348968505859, 0.039981056213378906, 0.039775390625, 0.03953286361694336, 0.0399318733215332, 0.03956793594360351, 0.04051116943359375, 0.03909049606323242, 0.03952560043334961, 0.03932140731811523, 0.03964617538452148, 0.03977011108398437, 0.03956934356689453, 0.039849952697753904, 0.0393155517578125, 0.03943958282470703, 0.03950048065185547, 0.03920294570922851, 0.03937071990966797, 0.03948134231567383, 0.03990854263305664, 0.039586624145507815, 0.03920896148681641, 0.03927603149414063, 0.039284801483154295, 0.03922784042358399, 0.03924582290649414, 0.03894476699829102, 0.03919177627563476, 0.039348896026611326, 0.03906777572631836, 0.0440437126159668, 0.04017593765258789, 0.039144832611083986, 0.03923238372802734, 0.039236961364746095, 0.0405654067993164, 0.040202239990234374, 0.04010598373413086, 0.0396943359375, 0.039696063995361325, 0.03956972885131836, 0.03943833541870117, 0.03944607925415039, 0.03920665740966797, 0.03946566390991211, 0.039865665435791016, 0.039497665405273434, 0.039578369140625, 0.039043041229248045, 0.03991059112548828, 0.0393155517578125, 0.039556926727294925, 0.039772319793701175, 0.04103408050537109, 0.03951270294189453, 0.03949926376342774, 0.039133502960205076, 0.03932364654541016, 0.03923286437988281, 0.03954140853881836, 0.03996464157104492, 0.03998275375366211, 0.039741825103759766, 0.039790592193603515, 0.041825439453125, 0.04246409606933594, 0.039567359924316405, 0.03976396942138672, 0.039642528533935545, 0.040071041107177734, 0.039570335388183595, 0.03959174346923828, 0.0391511344909668, 0.03928931045532227, 0.03916799926757813, 0.03944985580444336, 0.03984870529174805, 0.04031049728393555, 0.039741569519042966, 0.03960438537597656, 0.039365665435791015, 0.03961318588256836, 0.03951638412475586, 0.03954278564453125, 0.039288833618164064, 0.039292606353759765, 0.04049270248413086, 0.03993814468383789, 0.03994812774658203, 0.04008009719848633, 0.039653377532958986, 0.03951577758789063, 0.03950627136230469]",tokens/s,25.07819805893844,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,810.344448,14639.104,0.0,14243.856384,14221.3376,s,1,7.5134609375,7.5134609375,0.0,7.5134609375,7.5134609375,7.5134609375,7.5134609375,[7.5134609375],,kWh,1.4797649766668522e-05,1.591941333074149e-06,6.8258387939997694e-06,2.321542989374244e-05,,MB,1185.431552,14737.670144,0.0,14329.839616,14290.688,s,10,2.1331998596191406,0.21331998596191407,0.00621579085342007,0.21396086120605468,0.21881604003906252,0.21890145874023437,0.21896979370117187,"[0.19669474792480468, 0.21166371154785157, 0.21879705810546876, 0.2109894714355469, 0.21761846923828124, 0.2128002166748047, 0.2177275848388672, 0.21898687744140624, 0.21454396057128905, 0.2133777618408203]",tokens/s,1200.075083661903,kWh,6.355655424999946e-06,7.009066060996178e-07,4.22104202417391e-06,1.1277604055273475e-05,tokens/kWh,22699857.056986574,MB,1206.10816,14752.350208,0.0,14344.51968,14290.69056,s,10,38.713044921874996,3.8713044921874995,0.006590696282903404,3.87208447265625,3.8785936767578124,3.8798875366210934,3.8809226245117188,"[3.859248046875, 3.86244873046875, 3.86632080078125, 3.871842529296875, 3.87074365234375, 3.872326416015625, 3.873673095703125, 3.8769541015625, 3.87830615234375, 3.881181396484375]",tokens/s,16.273584298816427,kWh,0.00011356573131749991,1.2526579352459377e-05,7.55003828640261e-05,0.00020159269353398537,tokens/kWh,312511.32615765755,,s,630,38.70906281661986,0.06144295685177758,0.0005424991378040025,0.06134395027160645,0.06177381134033203,0.06188752746582031,0.06480036430358888,"[0.06405974578857422, 0.062161758422851564, 0.061213825225830076, 0.061046974182128906, 0.06102067184448242, 0.06102588653564453, 0.06105859375, 0.061370399475097655, 0.060830398559570314, 0.06098457717895508, 0.061190654754638675, 0.06119014358520508, 0.06118239974975586, 0.061128158569335934, 0.060923839569091795, 0.06110879898071289, 0.0611409912109375, 0.061515777587890626, 0.06115024185180664, 0.061141983032226566, 0.061265918731689455, 0.06106524658203125, 0.061020160675048826, 0.06111142349243164, 0.060992351531982424, 0.061216766357421876, 0.06101196670532227, 0.06129641723632812, 0.061335777282714846, 0.06107340621948242, 0.061034015655517575, 0.06101449584960938, 0.06148294448852539, 0.061482494354248046, 0.06132297515869141, 0.06133436965942383, 0.061079425811767576, 0.06098956680297852, 0.06104883193969726, 0.061042686462402344, 0.06105702209472656, 0.061110271453857425, 0.06110819244384766, 0.06100380706787109, 0.061159423828125, 0.06123846435546875, 0.061217601776123044, 0.06126182556152344, 0.06138380813598633, 0.061367198944091796, 0.06120240020751953, 0.06129244613647461, 0.06142985534667969, 0.061459583282470705, 0.061295486450195315, 0.061402240753173826, 0.06144438552856445, 0.06120684814453125, 0.06145817565917969, 0.06136819076538086, 0.06121539306640625, 0.061302143096923827, 0.06120054244995117, 0.06465602874755859, 0.06275273513793946, 0.06161552047729492, 0.061132480621337894, 0.06117264175415039, 0.060909599304199216, 0.061389888763427734, 0.0613139533996582, 0.06109779357910156, 0.06121039962768555, 0.061082015991210936, 0.06079484939575195, 0.061273887634277345, 0.06093644714355469, 0.061143039703369144, 0.06141241455078125, 0.06156793594360352, 0.06172390365600586, 0.06135270309448242, 0.06141747283935547, 0.06125139236450195, 0.06113670349121094, 0.06125606536865234, 0.06111203384399414, 0.06100409698486328, 0.06098873519897461, 0.061203102111816406, 0.06111161422729492, 0.061023937225341794, 0.06088806533813477, 0.06140620803833008, 0.0612259521484375, 0.06145753479003906, 0.06153696060180664, 0.06148323059082031, 0.06149264144897461, 0.0613361930847168, 0.0610629768371582, 0.061033824920654296, 0.061020992279052735, 0.060911296844482425, 0.06089324951171875, 0.06098479843139649, 0.06093011093139648, 0.06114905548095703, 0.06103740692138672, 0.06117788696289062, 0.06132940673828125, 0.06159356689453125, 0.0613642578125, 0.06163644790649414, 0.06142755126953125, 0.06164620971679687, 0.061475391387939456, 0.06109961700439453, 0.06142355346679688, 0.061069408416748044, 0.06094716644287109, 0.061197662353515626, 0.06124816131591797, 0.06109731292724609, 0.06121129608154297, 0.061208576202392576, 0.06485542297363281, 0.06265711975097656, 0.06143939208984375, 0.06112515258789063, 0.06097708892822266, 0.06115686416625977, 0.061233726501464844, 0.061061214447021485, 0.06128201675415039, 0.06125324630737305, 0.06087478256225586, 0.06093827056884766, 0.061078113555908205, 0.061085697174072265, 0.06107340621948242, 0.061419551849365234, 0.061773792266845706, 0.061714305877685546, 0.061689823150634766, 0.06156623840332031, 0.061286945343017575, 0.06143830490112305, 0.06100937652587891, 0.061139198303222654, 0.06111577606201172, 0.06101417541503906, 0.06101295852661133, 0.0612534065246582, 0.06137235260009766, 0.06130233764648438, 0.06127872085571289, 0.06145228958129883, 0.061548030853271485, 0.06157158279418945, 0.06163148880004883, 0.061395263671875, 0.061344158172607424, 0.061329696655273436, 0.0611247673034668, 0.06118182373046875, 0.06091772842407227, 0.06098912048339844, 0.061028705596923825, 0.06106662368774414, 0.06148492813110352, 0.061117023468017576, 0.06129411315917969, 0.061630207061767577, 0.061518688201904294, 0.06158502578735352, 0.06167385482788086, 0.061818878173828126, 0.061603839874267576, 0.061284351348876956, 0.061290496826171874, 0.061297760009765626, 0.06114985656738281, 0.06131305694580078, 0.061212512969970705, 0.0610873908996582, 0.06108438491821289, 0.06104012680053711, 0.06132371139526367, 0.06549359893798828, 0.06340780639648437, 0.06182675170898438, 0.06153043365478516, 0.06103481674194336, 0.0612044792175293, 0.06116966247558594, 0.06127740859985351, 0.06131180953979492, 0.061102046966552734, 0.061292030334472655, 0.0610302734375, 0.06105145645141601, 0.06128582382202148, 0.061446784973144535, 0.0619310417175293, 0.06177142333984375, 0.06179235076904297, 0.06195609664916992, 0.06155747222900391, 0.061504798889160155, 0.06134473419189453, 0.061091167449951175, 0.06111891174316406, 0.06104256057739258, 0.06120803070068359, 0.061134624481201175, 0.060991424560546875, 0.061037471771240234, 0.061134273529052735, 0.06114572906494141, 0.06125155258178711, 0.06130217742919922, 0.06152969741821289, 0.06139136123657227, 0.06145894241333008, 0.061573089599609374, 0.06163455963134765, 0.061663230895996096, 0.061386302947998045, 0.061129150390625, 0.06150147247314453, 0.06124540710449219, 0.06134550476074219, 0.061205982208251956, 0.06107769775390625, 0.0612083854675293, 0.061449024200439455, 0.06146262359619141, 0.06161801528930664, 0.06183260726928711, 0.06138745498657226, 0.06167548751831055, 0.061464576721191405, 0.06129257583618164, 0.06130252838134766, 0.06130691146850586, 0.061220447540283204, 0.061176414489746096, 0.06134579086303711, 0.061394622802734375, 0.06124585723876953, 0.0612020149230957, 0.06507174682617188, 0.06283673477172852, 0.06168899154663086, 0.06141219329833984, 0.06101916885375976, 0.061273056030273436, 0.06110617446899414, 0.061040481567382815, 0.06103673553466797, 0.06110822296142578, 0.06123721694946289, 0.06088911819458008, 0.061052894592285155, 0.06095449447631836, 0.06125743865966797, 0.061843841552734376, 0.06178118515014648, 0.06166409683227539, 0.06170355224609375, 0.061309024810791014, 0.06105667114257812, 0.06101900863647461, 0.06122905731201172, 0.06129663848876953, 0.0611545295715332, 0.061481536865234374, 0.06128051376342773, 0.061205951690673825, 0.06129052734375, 0.06101046371459961, 0.06132035064697266, 0.06150444793701172, 0.06157913589477539, 0.06192127990722656, 0.06160588836669922, 0.06149529647827148, 0.06140313720703125, 0.06115433502197266, 0.06117270278930664, 0.061451519012451175, 0.061069278717041015, 0.06116854476928711, 0.06120230484008789, 0.06117113494873047, 0.06129248046875, 0.060985504150390626, 0.06140156936645508, 0.06138016128540039, 0.061305248260498046, 0.061755008697509765, 0.06182454299926758, 0.06186809539794922, 0.06161491012573242, 0.06155673599243164, 0.061615455627441404, 0.06146323013305664, 0.06136201477050781, 0.06124291229248047, 0.061305438995361325, 0.06146358489990234, 0.06120732879638672, 0.061507774353027345, 0.06168169784545898, 0.06466556549072265, 0.06253107070922852, 0.061438465118408205, 0.061255233764648434, 0.06116582489013672, 0.06119142532348633, 0.06115423965454102, 0.06128572845458984, 0.06117238235473633, 0.061142433166503904, 0.06126208114624023, 0.06081571197509766, 0.061050880432128904, 0.0611748161315918, 0.061510623931884764, 0.0617775993347168, 0.06179257583618164, 0.061795520782470706, 0.06164896011352539, 0.06141414260864258, 0.06140723037719727, 0.06124550247192383, 0.06114297485351562, 0.06145788955688476, 0.061262367248535156, 0.061240638732910156, 0.061120609283447265, 0.061131393432617184, 0.061357601165771485, 0.06121516799926758, 0.06135948944091797, 0.06160857772827148, 0.061779712677001955, 0.06189494323730469, 0.06155043029785156, 0.06169817733764649, 0.0617341423034668, 0.06122371292114258, 0.061570625305175784, 0.061399486541748045, 0.06124291229248047, 0.061345600128173826, 0.06150822448730469, 0.06172256088256836, 0.06127817535400391, 0.06114009475708008, 0.06144099044799805, 0.06143952178955078, 0.06145395278930664, 0.06174601745605469, 0.061586624145507814, 0.061315902709960936, 0.06138851165771484, 0.06130847930908203, 0.061207263946533204, 0.06127382278442383, 0.06144419097900391, 0.06122304153442383, 0.06159097671508789, 0.06143606567382812, 0.061253566741943356, 0.06135657501220703, 0.06160761642456054, 0.06552387237548828, 0.06310531234741211, 0.06176358413696289, 0.06137011337280274, 0.061208831787109376, 0.06110614395141602, 0.06117529678344726, 0.06137702560424805, 0.061128799438476565, 0.061093505859375, 0.06117814254760742, 0.06114896011352539, 0.061085952758789065, 0.06111433410644531, 0.061222911834716794, 0.06164070510864258, 0.06162432098388672, 0.0617960319519043, 0.06167279815673828, 0.06167820739746094, 0.0615181770324707, 0.06148473739624023, 0.06120393753051758, 0.06121353530883789, 0.06115900802612305, 0.06126019287109375, 0.06116761779785156, 0.061216766357421876, 0.061400577545166014, 0.06131353759765625, 0.061315071105957034, 0.06141033554077149, 0.06149014282226563, 0.06166540908813477, 0.061505409240722654, 0.061505535125732425, 0.061547584533691406, 0.06129350280761719, 0.061357471466064455, 0.061212417602539065, 0.0615629768371582, 0.061743358612060546, 0.061288032531738285, 0.06129462432861328, 0.061270912170410155, 0.061314720153808594, 0.061404960632324215, 0.06149289703369141, 0.061657279968261716, 0.061811424255371096, 0.061765216827392576, 0.06176607894897461, 0.06161814498901367, 0.06157699203491211, 0.06162579345703125, 0.0613507194519043, 0.06119366455078125, 0.061118080139160154, 0.06132815933227539, 0.06107968139648438, 0.061192192077636716, 0.06131011199951172, 0.06128271865844727, 0.06490729522705078, 0.06290224075317383, 0.06169935989379883, 0.061251808166503906, 0.06106291198730469, 0.06126438522338867, 0.061257984161376955, 0.061337631225585935, 0.061112289428710935, 0.06115523147583008, 0.06118550491333008, 0.06122150421142578, 0.061282302856445314, 0.06110396957397461, 0.061593536376953126, 0.061829345703125, 0.06195199966430664, 0.06179635238647461, 0.06191241455078125, 0.06165724945068359, 0.06142617416381836, 0.061252769470214845, 0.06119715118408203, 0.06113644790649414, 0.06141996765136719, 0.061484897613525394, 0.0612138557434082, 0.06158848190307617, 0.061292545318603515, 0.06124748611450195, 0.06173081588745117, 0.061598846435546875, 0.06177471923828125, 0.061683521270751954, 0.0616833610534668, 0.06150191879272461, 0.061472801208496096, 0.061304862976074216, 0.06131670379638672, 0.061357952117919924, 0.06108009719848633, 0.06115532684326172, 0.061259777069091796, 0.06173286437988281, 0.06170169448852539, 0.061558334350585935, 0.06179449462890625, 0.06170800018310547, 0.061812961578369144, 0.06168857574462891, 0.061951648712158205, 0.06179232025146485, 0.061642559051513675, 0.06140137481689453, 0.061327232360839846, 0.06134201431274414, 0.06127558517456055, 0.06108127975463867, 0.06125657653808594, 0.06148668670654297, 0.06120223999023437, 0.061424320220947265, 0.06167337417602539, 0.06490252685546875, 0.0627410545349121, 0.061370433807373045, 0.061381729125976565, 0.061295520782470705, 0.06139884948730469, 0.06143814468383789, 0.06172819137573242, 0.06134841537475586, 0.06134172821044922, 0.06159312057495117, 0.06148051071166992, 0.061610881805419924, 0.06125686264038086, 0.06164771270751953, 0.06206991958618164, 0.06199980926513672, 0.061773983001708985, 0.06157267379760742, 0.06140777587890625, 0.06142556762695312, 0.06117375946044922, 0.06101308822631836, 0.06097958374023438, 0.061122718811035155, 0.06119417572021484, 0.06146297454833984, 0.06147174453735352, 0.06133248138427734, 0.0614799690246582, 0.061510623931884764, 0.06154764938354492, 0.06175836944580078, 0.06185964965820313, 0.06160604858398438, 0.06156492614746094, 0.0615731201171875, 0.06144371032714844, 0.06142102432250977, 0.061295486450195315, 0.06126515197753906, 0.06117574310302734, 0.06120534515380859, 0.0612426872253418, 0.061216545104980466, 0.06138070297241211, 0.06146131134033203, 0.06125158309936524, 0.06174460983276367, 0.061858177185058594, 0.061878463745117185, 0.06186188888549805, 0.06161539077758789, 0.061731521606445315, 0.061505569458007815, 0.06158137512207031, 0.06166281509399414, 0.06156934356689453, 0.061413406372070316, 0.06146656036376953, 0.061319137573242186, 0.06157708740234375, 0.06130291366577149, 0.06526566314697266, 0.06307129669189453, 0.06162454223632813, 0.061301441192626954, 0.061367454528808596, 0.061354881286621095, 0.06135539245605469, 0.06131110382080078, 0.06135424041748047, 0.06147865676879883, 0.06132374572753906, 0.06146047973632812, 0.061704193115234375, 0.06123865509033203, 0.06153263854980469, 0.06226755142211914, 0.0620687370300293, 0.06199071884155274, 0.061742431640625, 0.06163337707519531, 0.06154415893554688, 0.061792545318603516, 0.06119571304321289, 0.06167577743530273, 0.06123689651489258, 0.06145500946044922, 0.061095294952392576, 0.06138937759399414, 0.06120864105224609, 0.0614420166015625, 0.0613458251953125, 0.0615096321105957, 0.061677120208740235, 0.06167596817016602, 0.06187539291381836, 0.061778751373291016, 0.061679359436035155, 0.06149321746826172, 0.061411231994628904, 0.06134374237060547, 0.06133388900756836, 0.06134991836547852, 0.06119222259521485, 0.061450302124023436, 0.06161328125, 0.06117609786987305, 0.06134777450561523, 0.06139731216430664, 0.06151910400390625, 0.0614365119934082, 0.06169411087036133, 0.06185324859619141, 0.06180720138549805, 0.06185881423950195, 0.06153014373779297, 0.0614835205078125, 0.06164665603637695, 0.06137305450439453, 0.061421600341796875, 0.06141334533691406, 0.06154214477539063, 0.06155427169799805, 0.06148735809326172]",tokens/s,16.275258406140157,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,883.122176,6232.604672,0.0,5830.08256,5627.341824,s,1,7.68555517578125,7.68555517578125,0.0,7.68555517578125,7.68555517578125,7.68555517578125,7.68555517578125,[7.68555517578125],,kWh,6.9558748291531925e-06,7.468826957986215e-07,3.468336107995129e-06,1.1171093632946943e-05,,MB,1322.856448,6316.490752,0.0,5901.385728,5763.868672,s,10,0.782393310546875,0.0782393310546875,0.001309358515559568,0.0782970085144043,0.07954390029907227,0.08013225593566894,0.08060294044494629,"[0.07567964935302735, 0.07826294708251953, 0.0772973403930664, 0.08072061157226562, 0.07941315460205078, 0.07758303833007812, 0.079200927734375, 0.0773861083984375, 0.07851846313476563, 0.07833106994628906]",tokens/s,3272.01161550144,kWh,2.3726379093498663e-06,2.6165809001570387e-07,1.5740798500488942e-06,4.208375849414464e-06,tokens/kWh,60831068.60230147,MB,1355.395072,6381.502464,0.0,5964.300288,5763.871232,s,10,18.873771606445313,1.8873771606445313,0.00861981571786624,1.8854300537109374,1.8959946044921876,1.9028235961914062,1.9082867895507811,"[1.87702294921875, 1.8881806640625, 1.884950439453125, 1.89447705078125, 1.8810401611328125, 1.88490576171875, 1.88170849609375, 1.885923828125, 1.88590966796875, 1.909652587890625]",tokens/s,33.37965580683713,kWh,5.5493165038982174e-05,6.120636001075862e-06,3.690672871234916e-05,9.852052975240721e-05,tokens/kWh,639460.6297624042,,s,630,18.8707398033142,0.029953555243355887,0.0006834568005226398,0.029808911323547363,0.03027997169494629,0.030683816146850583,0.033427295188903805,"[0.03128230476379395, 0.029911935806274412, 0.02983526420593262, 0.02975926399230957, 0.029738752365112305, 0.030226911544799805, 0.029693952560424806, 0.029609983444213867, 0.029898752212524415, 0.029511104583740233, 0.029571647644042968, 0.029724672317504884, 0.02977574348449707, 0.029851327896118163, 0.02962067222595215, 0.02961408042907715, 0.029915136337280275, 0.029691904067993165, 0.029746431350708008, 0.029754112243652344, 0.02975107192993164, 0.029659360885620118, 0.029708288192749024, 0.029650848388671876, 0.02956502342224121, 0.029726015090942384, 0.029841663360595704, 0.02973535919189453, 0.02979430389404297, 0.030063968658447265, 0.029907615661621093, 0.029702144622802733, 0.029534208297729493, 0.029816831588745117, 0.0297606086730957, 0.030044767379760744, 0.0298540153503418, 0.02971548843383789, 0.029546720504760742, 0.029487104415893556, 0.029825599670410156, 0.029716672897338866, 0.029861696243286134, 0.0300382080078125, 0.029851072311401366, 0.029692480087280274, 0.02991244888305664, 0.02991328048706055, 0.029823423385620117, 0.029786048889160158, 0.02948851203918457, 0.029848255157470704, 0.02977097511291504, 0.029970592498779296, 0.02984579277038574, 0.029882207870483398, 0.029540672302246093, 0.029679807662963867, 0.029501440048217774, 0.0296343994140625, 0.02997622489929199, 0.029747711181640626, 0.029829120635986327, 0.03386924743652344, 0.030934911727905273, 0.029802879333496093, 0.029817087173461914, 0.029745216369628905, 0.0298024959564209, 0.030003200531005858, 0.029874176025390626, 0.029820831298828124, 0.02982307243347168, 0.02977791976928711, 0.029566368103027343, 0.02979465675354004, 0.029751167297363282, 0.03003343963623047, 0.0321396484375, 0.03015932846069336, 0.02977916717529297, 0.029784479141235352, 0.029723007202148436, 0.029689855575561523, 0.0301497917175293, 0.030027999877929687, 0.03006937599182129, 0.030133375167846678, 0.029817728042602538, 0.030091264724731445, 0.030437376022338865, 0.02996019172668457, 0.029904832839965822, 0.029820032119750976, 0.029975488662719728, 0.02998886489868164, 0.029967967987060546, 0.0298889274597168, 0.029726720809936522, 0.029868032455444334, 0.029738847732543944, 0.029958015441894532, 0.029841600418090822, 0.02982307243347168, 0.03002694320678711, 0.029839679718017577, 0.030145023345947267, 0.030404224395751953, 0.02951206398010254, 0.029632511138916014, 0.02969913673400879, 0.02959404754638672, 0.029777887344360352, 0.029521663665771483, 0.029608736038208006, 0.029533952713012696, 0.029585664749145507, 0.02979635238647461, 0.029761503219604492, 0.029804576873779298, 0.029825023651123047, 0.02979430389404297, 0.029664608001708986, 0.029753664016723632, 0.029878623962402345, 0.029837215423583984, 0.033280384063720705, 0.030677215576171875, 0.02966771125793457, 0.02968297576904297, 0.029665151596069337, 0.02969068717956543, 0.02975948715209961, 0.029872127532958984, 0.02994931221008301, 0.02965353584289551, 0.029708383560180664, 0.029964223861694336, 0.029846847534179686, 0.029983200073242188, 0.029534080505371093, 0.029538591384887694, 0.02978214454650879, 0.029955648422241212, 0.029691520690917968, 0.033428287506103514, 0.03015475273132324, 0.02959564781188965, 0.029683328628540038, 0.029619712829589844, 0.029846399307250977, 0.029730688095092772, 0.029616256713867188, 0.02961408042907715, 0.029557823181152344, 0.02967238426208496, 0.02969593620300293, 0.029921344757080078, 0.029733951568603516, 0.029651552200317382, 0.02978236770629883, 0.029661184310913087, 0.029705631256103517, 0.02954697608947754, 0.029814592361450197, 0.02960380744934082, 0.02996236801147461, 0.029859712600708008, 0.029747200012207032, 0.029763935089111328, 0.0299683837890625, 0.030064640045166017, 0.03012985610961914, 0.029673055648803712, 0.02969599914550781, 0.029601951599121094, 0.02992313575744629, 0.029805311203002928, 0.029814176559448242, 0.029702655792236327, 0.029714143753051758, 0.029596031188964842, 0.02982646369934082, 0.02971504020690918, 0.02968275260925293, 0.02968876838684082, 0.029887840270996092, 0.03119139289855957, 0.030089536666870118, 0.03317068862915039, 0.030554431915283203, 0.029916799545288086, 0.03027340888977051, 0.030143423080444334, 0.030092479705810547, 0.02998111915588379, 0.030177440643310547, 0.029915327072143554, 0.030484512329101564, 0.029980640411376953, 0.029949983596801757, 0.03040787124633789, 0.030023616790771486, 0.030124063491821288, 0.029889375686645507, 0.030115840911865234, 0.02974425506591797, 0.02984025573730469, 0.030054399490356445, 0.029919136047363282, 0.030173280715942382, 0.030223743438720703, 0.029804224014282225, 0.02984137535095215, 0.02981372833251953, 0.030000991821289062, 0.029861312866210937, 0.029926111221313476, 0.030097408294677733, 0.03019366455078125, 0.030134271621704102, 0.030023679733276368, 0.030111743927001954, 0.03013804817199707, 0.03004607963562012, 0.030871999740600585, 0.030101503372192383, 0.029896703720092774, 0.030074880599975585, 0.029816831588745117, 0.02993737602233887, 0.03006492805480957, 0.029761280059814453, 0.03014681625366211, 0.029928512573242187, 0.030127040863037108, 0.029833215713500977, 0.03005411148071289, 0.029736448287963867, 0.029782815933227538, 0.029571008682250977, 0.0296059513092041, 0.029646848678588866, 0.029936832427978517, 0.029557567596435547, 0.030017152786254882, 0.029743392944335936, 0.029675615310668944, 0.0297205753326416, 0.0303636474609375, 0.03125603294372559, 0.02980918312072754, 0.03349667358398437, 0.030649824142456053, 0.02966579246520996, 0.029788383483886717, 0.02965519905090332, 0.029592960357666016, 0.029622976303100585, 0.029871488571166994, 0.03004412841796875, 0.02983103942871094, 0.029870687484741212, 0.029703872680664063, 0.029733375549316408, 0.029831167221069335, 0.029655040740966795, 0.02979167938232422, 0.029854272842407225, 0.029851551055908202, 0.029605663299560547, 0.029475135803222655, 0.02951535987854004, 0.02966752052307129, 0.029595775604248045, 0.029607744216918946, 0.02950156784057617, 0.02961542320251465, 0.029542400360107423, 0.02964975929260254, 0.02961408042907715, 0.029781152725219726, 0.029546560287475584, 0.029612831115722656, 0.0295849609375, 0.02956470489501953, 0.02972329521179199, 0.029754751205444335, 0.029829599380493163, 0.02971254348754883, 0.029717920303344726, 0.029753952026367186, 0.02960383987426758, 0.029576927185058593, 0.03072233581542969, 0.030353023529052735, 0.029910688400268556, 0.029876512527465822, 0.029804832458496095, 0.030134431838989256, 0.03014009666442871, 0.029655359268188478, 0.029726015090942384, 0.029890783309936525, 0.02977340888977051, 0.029634592056274413, 0.02963337516784668, 0.030085119247436523, 0.030733312606811523, 0.029913856506347657, 0.02992563247680664, 0.0298024959564209, 0.029693887710571288, 0.0298940486907959, 0.02977039909362793, 0.033097728729248044, 0.030543712615966796, 0.02986716842651367, 0.029715456008911133, 0.029943008422851563, 0.029960639953613283, 0.029831520080566408, 0.029787456512451172, 0.02973971176147461, 0.029675487518310548, 0.029951488494873047, 0.02984809684753418, 0.02978374481201172, 0.02977824020385742, 0.029870080947875976, 0.03120947265625, 0.03850239944458008, 0.029724672317504884, 0.0298240966796875, 0.029620479583740235, 0.029620895385742186, 0.029710336685180663, 0.02967136001586914, 0.029728832244873046, 0.02954172706604004, 0.029630144119262694, 0.029481695175170897, 0.029394527435302735, 0.029459104537963868, 0.029444095611572265, 0.029550592422485353, 0.0295280647277832, 0.029518943786621094, 0.03070454406738281, 0.02954764747619629, 0.02971286392211914, 0.029757856369018554, 0.02958336067199707, 0.029552095413208006, 0.02943235206604004, 0.029596927642822266, 0.029993087768554687, 0.029604480743408202, 0.029495296478271486, 0.02991856002807617, 0.029713056564331056, 0.029627487182617186, 0.029487199783325195, 0.029541023254394533, 0.03007708740234375, 0.029652992248535157, 0.02958131217956543, 0.02975881576538086, 0.029556640625, 0.029638847351074218, 0.029444671630859374, 0.029640703201293944, 0.029502464294433595, 0.029634912490844725, 0.02964956855773926, 0.029603200912475584, 0.029604480743408202, 0.029435903549194335, 0.03357699203491211, 0.030877088546752928, 0.029714815139770506, 0.029702463150024415, 0.029728607177734376, 0.029820608139038085, 0.029629823684692382, 0.02985878372192383, 0.029685760498046877, 0.029652864456176757, 0.029656415939331056, 0.029629215240478516, 0.031084543228149415, 0.029765247344970703, 0.029870464324951173, 0.02980396842956543, 0.02994550323486328, 0.029890495300292967, 0.02961916732788086, 0.029714431762695313, 0.029874176025390626, 0.029683712005615235, 0.029689056396484375, 0.029696800231933593, 0.029767679214477538, 0.029674816131591796, 0.029573728561401367, 0.02964899253845215, 0.029627552032470705, 0.02992620849609375, 0.029697120666503905, 0.02954044723510742, 0.02968662452697754, 0.02954377555847168, 0.029715103149414064, 0.029531744003295897, 0.029718303680419923, 0.029937503814697265, 0.029649696350097655, 0.029631488800048827, 0.029565568923950195, 0.029822559356689454, 0.029643552780151367, 0.02952396774291992, 0.029657087326049804, 0.03012403106689453, 0.02991923141479492, 0.02968780708312988, 0.02974496078491211, 0.029554496765136717, 0.03068921661376953, 0.02998726463317871, 0.02975334358215332, 0.02980454444885254, 0.03000454330444336, 0.029831872940063477, 0.029808639526367187, 0.03017510414123535, 0.02989468765258789, 0.029913183212280273, 0.029816383361816405, 0.02972867202758789, 0.02971673583984375, 0.03318377685546875, 0.03061350440979004, 0.029875680923461913, 0.029648799896240235, 0.029718496322631835, 0.02994438362121582, 0.029668767929077147, 0.02950828742980957, 0.029578527450561522, 0.029720640182495116, 0.02963020706176758, 0.02965171241760254, 0.02970844841003418, 0.02963408088684082, 0.03077168083190918, 0.02978761672973633, 0.029695552825927736, 0.029765792846679687, 0.030339744567871092, 0.029946016311645507, 0.029713855743408204, 0.029710912704467775, 0.02974515151977539, 0.029892608642578124, 0.0295731201171875, 0.029526016235351563, 0.034887680053710936, 0.03002128028869629, 0.029826623916625977, 0.029693920135498045, 0.029702367782592772, 0.02944470405578613, 0.029849599838256836, 0.029619392395019532, 0.029641536712646483, 0.029542400360107423, 0.029534208297729493, 0.029607936859130858, 0.029591552734375, 0.029921279907226563, 0.02980454444885254, 0.029859392166137696, 0.029839584350585938, 0.029894880294799805, 0.02975334358215332, 0.029580448150634767, 0.029902975082397462, 0.029665599822998046, 0.029825439453125, 0.029773439407348633, 0.02972073554992676, 0.02998294448852539, 0.02984297561645508, 0.02994633674621582, 0.030103551864624024, 0.02978598403930664, 0.02989683151245117, 0.029820928573608397, 0.030003200531005858, 0.029691583633422853, 0.029788383483886717, 0.029947999954223634, 0.029767679214477538, 0.03342486572265625, 0.031156383514404296, 0.029914751052856445, 0.029732959747314453, 0.02974198341369629, 0.029681663513183593, 0.029709375381469727, 0.029985727310180663, 0.02968288040161133, 0.029487648010253907, 0.029784351348876952, 0.02955628776550293, 0.029829183578491212, 0.029815168380737306, 0.02978316879272461, 0.029913536071777345, 0.029681247711181642, 0.029479103088378908, 0.029674144744873048, 0.02999295997619629, 0.0295731201171875, 0.02990870475769043, 0.02983145523071289, 0.029949407577514648, 0.029825056076049804, 0.029760000228881835, 0.02979840087890625, 0.029706239700317383, 0.029843456268310548, 0.029870080947875976, 0.029795679092407226, 0.03002176094055176, 0.029884960174560545, 0.029904895782470704, 0.029814783096313476, 0.02988627243041992, 0.029839040756225586, 0.030128639221191408, 0.02972591972351074, 0.03022108840942383, 0.029927200317382812, 0.029799680709838867, 0.02984239959716797, 0.02981449508666992, 0.029900575637817384, 0.02966352081298828, 0.029888736724853517, 0.0295731201171875, 0.02972230339050293, 0.029830591201782226, 0.02985228729248047, 0.030058080673217774, 0.02998134422302246, 0.030344959259033202, 0.030021888732910156, 0.029687711715698242, 0.029751359939575197, 0.030226463317871093, 0.029744159698486327, 0.029586111068725586, 0.0316333122253418, 0.029829504013061524, 0.02959769630432129, 0.033683456420898435, 0.03227004623413086, 0.029808000564575197, 0.029751232147216797, 0.02967241668701172, 0.029657087326049804, 0.02973686408996582, 0.02993132781982422, 0.029820255279541016, 0.02988742446899414, 0.030523391723632814, 0.030437183380126954, 0.030304447174072265, 0.03015065574645996, 0.030128000259399414, 0.030044288635253907, 0.030287391662597658, 0.03028374481201172, 0.030183135986328127, 0.030521568298339845, 0.03048428726196289, 0.03023744010925293, 0.030315839767456054, 0.03018409538269043, 0.03011520004272461, 0.03023529624938965, 0.030326496124267577, 0.03017308807373047, 0.030196096420288084, 0.030203903198242187, 0.030269439697265626, 0.03016294479370117, 0.03035955238342285, 0.03021558380126953, 0.030646688461303712, 0.030343360900878906, 0.03016499137878418, 0.03031449508666992, 0.030277023315429686, 0.030327360153198243, 0.030146591186523436, 0.030279552459716797, 0.03027571105957031, 0.030255104064941408, 0.030399776458740233, 0.030325311660766602, 0.03028598403930664, 0.030052352905273437, 0.0322191047668457, 0.031059423446655274, 0.030042688369750978, 0.030115840911865234, 0.02982851219177246, 0.030534175872802733, 0.029956159591674806, 0.029959455490112304, 0.02986057662963867, 0.0298024959564209, 0.030002559661865234, 0.029934207916259767, 0.03019513511657715, 0.031208000183105468, 0.029937664031982423]",tokens/s,33.38501863553622,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 353, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 152762 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,783.192064,6162.415616,0.0,5767.168,5561.701376,s,1,7.0833408203125,7.0833408203125,0.0,7.0833408203125,7.0833408203125,7.0833408203125,7.0833408203125,[7.0833408203125],,kWh,4.374286525001026e-06,4.733619233300679e-07,3.067780232006434e-06,7.915428680337528e-06,,MB,1231.310848,6174.998528,0.0,5767.168,5440.258048,s,10,0.6849548149108887,0.06849548149108886,0.0032129089914807356,0.0690937614440918,0.07194749374389649,0.07268680152893066,0.073278247756958,"[0.06620457458496094, 0.06829011535644532, 0.06989740753173829, 0.06389587020874024, 0.07090982055664062, 0.071783203125, 0.07342610931396484, 0.06422950744628907, 0.06538047790527343, 0.07093772888183594]",tokens/s,3737.4728146601196,kWh,2.0920897395833054e-06,2.307189349801322e-07,1.3824693599428466e-06,3.705278034506284e-06,tokens/kWh,69090631.69239637,MB,1263.910912,6177.09568,0.0,5769.265152,5523.463168,s,10,19.087283813476564,1.908728381347656,0.004012082060044164,1.9088529052734375,1.9145850219726563,1.9150225524902345,1.9153725769042969,"[1.9088482666015625, 1.9154600830078126, 1.9088575439453126, 1.9078658447265624, 1.9109705810546875, 1.904739013671875, 1.909282470703125, 1.902086669921875, 1.904685546875, 1.91448779296875]",tokens/s,33.00626774120627,kWh,5.550871305083376e-05,6.1223510974629865e-06,3.695667242245729e-05,9.858773657075404e-05,tokens/kWh,639024.7123158814,,s,630,19.084383964538585,0.03029267295958504,0.0005190133975847884,0.03017751979827881,0.030649503326416014,0.030884998321533203,0.032696515808105474,"[0.03146159934997558, 0.03105177688598633, 0.03055561637878418, 0.030380224227905272, 0.030060895919799803, 0.030156415939331056, 0.029970720291137697, 0.030058591842651368, 0.02999295997619629, 0.030320575714111328, 0.03008723258972168, 0.03038412857055664, 0.030294015884399415, 0.030639711380004882, 0.029993440628051756, 0.030153760910034178, 0.030007776260375978, 0.03035385513305664, 0.029988319396972656, 0.030358047485351564, 0.030067840576171876, 0.03037068748474121, 0.0301527042388916, 0.030025087356567382, 0.03007961654663086, 0.030531583786010744, 0.03010950469970703, 0.030531295776367186, 0.02992585563659668, 0.030649471282958984, 0.030029823303222656, 0.030886783599853515, 0.030426368713378907, 0.030683904647827148, 0.030091072082519533, 0.030546016693115234, 0.03017532730102539, 0.030500864028930662, 0.030059904098510743, 0.03004684829711914, 0.029916608810424804, 0.03051558494567871, 0.03141651153564453, 0.03047327995300293, 0.029953216552734373, 0.030328575134277343, 0.03000934410095215, 0.029798559188842774, 0.030139328002929688, 0.030430112838745117, 0.030164703369140625, 0.030545280456542968, 0.0301495361328125, 0.03055411148071289, 0.02997452735900879, 0.029973760604858398, 0.030028543472290038, 0.030567615509033204, 0.029936063766479493, 0.0305032958984375, 0.030236671447753907, 0.03061555290222168, 0.03018047904968262, 0.03335168075561523, 0.03271417617797852, 0.030953119277954102, 0.030671104431152344, 0.030286495208740234, 0.030453760147094725, 0.030303871154785156, 0.030187328338623046, 0.03026915168762207, 0.03039740753173828, 0.030144384384155273, 0.030208223342895506, 0.030134048461914063, 0.03039232063293457, 0.030158111572265625, 0.03021059226989746, 0.03008230400085449, 0.031126176834106446, 0.030175071716308594, 0.0304071044921875, 0.03011599922180176, 0.030327999114990234, 0.030067359924316406, 0.030213855743408204, 0.03010588836669922, 0.03037113571166992, 0.03012272071838379, 0.03048393630981445, 0.029986400604248047, 0.03093391990661621, 0.03018956756591797, 0.030267391204833984, 0.030345216751098632, 0.03078758430480957, 0.03033087921142578, 0.030736383438110353, 0.030099456787109374, 0.03042508888244629, 0.030072256088256834, 0.030152864456176758, 0.03010950469970703, 0.030544479370117186, 0.030007295608520508, 0.03046540832519531, 0.0299866886138916, 0.03049139213562012, 0.029881664276123047, 0.030003904342651367, 0.030031871795654298, 0.03049407958984375, 0.030017984390258788, 0.030595264434814452, 0.030246912002563478, 0.030622976303100586, 0.030231296539306642, 0.030072416305541992, 0.03005891227722168, 0.030882816314697265, 0.030247711181640626, 0.030488000869750977, 0.0299935359954834, 0.030513343811035157, 0.030422336578369142, 0.033067489624023436, 0.03184448051452637, 0.030672447204589844, 0.030425504684448244, 0.030107648849487304, 0.03034726333618164, 0.0299683837890625, 0.03017523193359375, 0.02995734405517578, 0.03032512092590332, 0.03005289649963379, 0.03038604736328125, 0.030043167114257814, 0.030327775955200195, 0.03012403106689453, 0.030121023178100587, 0.02993846321105957, 0.030316703796386717, 0.030281728744506835, 0.030182432174682618, 0.030081695556640625, 0.030290239334106444, 0.030066688537597655, 0.029915136337280275, 0.029977920532226563, 0.03046067237854004, 0.03015465545654297, 0.030334239959716798, 0.029983488082885743, 0.03041689682006836, 0.030244863510131836, 0.029997055053710937, 0.029998655319213866, 0.03074246406555176, 0.030197664260864256, 0.030617727279663085, 0.03015318489074707, 0.030611455917358397, 0.030220287322998047, 0.03021843147277832, 0.02992681694030762, 0.030552480697631838, 0.030146047592163085, 0.030609920501708986, 0.029868192672729492, 0.030548063278198243, 0.02994764709472656, 0.03004585647583008, 0.029968095779418946, 0.030609920501708986, 0.030011520385742188, 0.0305664005279541, 0.030316543579101563, 0.03036796760559082, 0.029904512405395507, 0.03012556838989258, 0.030021663665771484, 0.030545888900756837, 0.030076671600341796, 0.030659488677978516, 0.029962240219116212, 0.030559999465942383, 0.029875808715820313, 0.03265327835083008, 0.03167990493774414, 0.030740480422973632, 0.030514015197753906, 0.03013430404663086, 0.030364383697509767, 0.03005238342285156, 0.029987424850463868, 0.030009727478027343, 0.03031587219238281, 0.03020867156982422, 0.030719200134277345, 0.03016281509399414, 0.030321760177612303, 0.030218048095703123, 0.02998899269104004, 0.03024883270263672, 0.03043507194519043, 0.030001407623291017, 0.030389631271362304, 0.02985228729248047, 0.030310047149658202, 0.029867616653442383, 0.029872896194458008, 0.029824735641479493, 0.03035696029663086, 0.030016191482543947, 0.030380159378051757, 0.029878271102905272, 0.03023676872253418, 0.029809728622436523, 0.030004064559936525, 0.030056447982788087, 0.030510271072387695, 0.03014121627807617, 0.030572576522827147, 0.030277631759643556, 0.030496000289916992, 0.03004083251953125, 0.030043136596679686, 0.029952064514160156, 0.03037820816040039, 0.02992201614379883, 0.030564607620239256, 0.030152448654174803, 0.030567968368530273, 0.030058719635009765, 0.03017763137817383, 0.03000294494628906, 0.030564512252807617, 0.030038015365600586, 0.0305930233001709, 0.03014656066894531, 0.030704896926879884, 0.03008793640136719, 0.03015283203125, 0.0299652156829834, 0.030573535919189453, 0.030085119247436523, 0.030533760070800782, 0.029998975753784178, 0.030620672225952147, 0.030032800674438476, 0.03475116729736328, 0.03217139053344727, 0.0310032958984375, 0.030599296569824217, 0.03022831916809082, 0.030273216247558594, 0.030043519973754883, 0.029955007553100585, 0.030113727569580077, 0.03027769660949707, 0.03010742378234863, 0.03039254379272461, 0.02998851203918457, 0.030426687240600585, 0.03010860824584961, 0.030232032775878905, 0.030422719955444336, 0.030503583908081056, 0.030156831741333007, 0.030319744110107422, 0.02998566436767578, 0.030223424911499024, 0.03035366439819336, 0.03008755111694336, 0.02999443244934082, 0.030331775665283202, 0.02995408058166504, 0.030426847457885743, 0.030009727478027343, 0.030394239425659178, 0.029837312698364257, 0.029934623718261718, 0.030096639633178712, 0.030572256088256835, 0.031022207260131836, 0.0305133113861084, 0.030030559539794922, 0.03042902374267578, 0.030040063858032227, 0.03001919937133789, 0.029954591751098634, 0.03046307182312012, 0.029971136093139648, 0.03038172721862793, 0.029915456771850587, 0.030333120346069335, 0.029806207656860352, 0.029820608139038085, 0.029907487869262697, 0.030388448715209963, 0.02991641616821289, 0.030526208877563476, 0.02995814323425293, 0.030788639068603515, 0.030806144714355468, 0.030155616760253905, 0.029962240219116212, 0.030449663162231445, 0.03002716827392578, 0.030388832092285156, 0.029959232330322265, 0.03048271942138672, 0.030011072158813476, 0.03337580871582031, 0.03178246307373047, 0.030845504760742187, 0.0303472957611084, 0.03007107162475586, 0.030244863510131836, 0.029951295852661132, 0.02997248077392578, 0.029849855422973633, 0.030200031280517577, 0.029804767608642577, 0.030212095260620117, 0.029809696197509766, 0.030172128677368164, 0.029800447463989257, 0.029822975158691405, 0.029911039352416992, 0.030320640563964843, 0.030048128128051757, 0.03017740821838379, 0.029894624710083008, 0.030234655380249022, 0.029868032455444334, 0.029849599838256836, 0.029841407775878907, 0.030309600830078123, 0.029795103073120117, 0.030224384307861327, 0.029859840393066408, 0.03016080093383789, 0.02980406379699707, 0.0299117431640625, 0.030087039947509765, 0.030447616577148437, 0.030928991317749024, 0.031110528945922852, 0.030093311309814453, 0.030349855422973634, 0.02999839973449707, 0.029896863937377928, 0.03002217674255371, 0.03030988883972168, 0.03025766372680664, 0.030252735137939454, 0.029824352264404295, 0.03032310485839844, 0.02995462417602539, 0.031079647064208984, 0.029862495422363283, 0.030465887069702147, 0.029923583984375, 0.030748800277709962, 0.029949920654296875, 0.030638080596923828, 0.03004729652404785, 0.029851999282836914, 0.029974399566650392, 0.03047907257080078, 0.03012166404724121, 0.030517248153686522, 0.030038335800170898, 0.030506656646728515, 0.029918912887573243, 0.035480991363525394, 0.03224636840820312, 0.030865407943725585, 0.030525215148925783, 0.030844287872314455, 0.03047920036315918, 0.030058143615722656, 0.030181024551391603, 0.030339712142944335, 0.0302807674407959, 0.030051328659057616, 0.030259199142456054, 0.029937664031982423, 0.03017103958129883, 0.02997158432006836, 0.02998780822753906, 0.030058719635009765, 0.030271263122558595, 0.030154272079467772, 0.030310495376586914, 0.030648704528808593, 0.030394367218017578, 0.029963327407836915, 0.02991804885864258, 0.029927520751953124, 0.030361600875854492, 0.029884416580200194, 0.03033443260192871, 0.029723167419433594, 0.030328832626342773, 0.0298591365814209, 0.029848255157470704, 0.029949951171875, 0.03077631950378418, 0.030089439392089842, 0.030421791076660157, 0.030044160842895507, 0.03041302490234375, 0.030061567306518554, 0.030060831069946288, 0.030154272079467772, 0.030612415313720703, 0.030074911117553713, 0.03032841682434082, 0.02987785530090332, 0.030462783813476564, 0.029789247512817384, 0.02988435173034668, 0.029834239959716798, 0.030443519592285157, 0.029865983963012696, 0.030584735870361326, 0.029890655517578125, 0.03042064094543457, 0.0298950080871582, 0.02993471908569336, 0.029881216049194335, 0.030508575439453126, 0.030025535583496094, 0.03053225517272949, 0.029984800338745118, 0.03046396827697754, 0.030076927185058593, 0.031486976623535154, 0.03155216026306153, 0.030861824035644532, 0.03070262336730957, 0.030047008514404297, 0.030263296127319338, 0.03001753616333008, 0.029917184829711913, 0.03000553512573242, 0.030133567810058593, 0.030062143325805663, 0.030359807968139647, 0.029964895248413087, 0.030402528762817384, 0.03004377555847168, 0.02981929588317871, 0.03042070388793945, 0.030206239700317383, 0.03002560043334961, 0.03034556770324707, 0.029918079376220704, 0.03014543914794922, 0.029793760299682618, 0.029899007797241212, 0.029901088714599608, 0.03034726333618164, 0.029894655227661132, 0.03031449508666992, 0.029943071365356445, 0.030243455886840822, 0.030191455841064453, 0.030199424743652344, 0.02985638427734375, 0.030439136505126953, 0.030054239273071288, 0.03061356735229492, 0.0300865592956543, 0.030415775299072266, 0.029982784271240234, 0.02984351921081543, 0.029890464782714843, 0.030381696701049805, 0.029933984756469727, 0.03042905616760254, 0.0299716796875, 0.03034419250488281, 0.02993756866455078, 0.029796607971191408, 0.02981452751159668, 0.030357503890991212, 0.029908992767333983, 0.030849023818969725, 0.029997055053710937, 0.03040870475769043, 0.02975334358215332, 0.030016735076904298, 0.029864736557006836, 0.030468320846557616, 0.02998454475402832, 0.030478080749511718, 0.029931776046752928, 0.03059702491760254, 0.02996643257141113, 0.032292865753173826, 0.0317174072265625, 0.03065443229675293, 0.030453760147094725, 0.030134271621704102, 0.030158271789550783, 0.02997920036315918, 0.029839359283447265, 0.029871200561523436, 0.03032579231262207, 0.02998054313659668, 0.030358688354492187, 0.029938528060913086, 0.030128128051757814, 0.029879615783691405, 0.02999718475341797, 0.030034496307373048, 0.030296064376831053, 0.030016767501831056, 0.030464767456054687, 0.029962240219116212, 0.030271488189697264, 0.02997248077392578, 0.02993292808532715, 0.03013910484313965, 0.030346944808959962, 0.03001318359375, 0.030353151321411132, 0.03008790397644043, 0.03043142318725586, 0.029984575271606445, 0.03000339126586914, 0.03005558395385742, 0.030517919540405274, 0.030083072662353515, 0.03054364776611328, 0.02995395278930664, 0.030505279541015624, 0.030213119506835938, 0.030059104919433595, 0.029976896286010742, 0.03040233612060547, 0.029944128036499023, 0.03027315139770508, 0.030019968032836915, 0.03034316825866699, 0.02989593505859375, 0.02979302406311035, 0.029917184829711913, 0.030356576919555664, 0.02994883155822754, 0.030486623764038087, 0.02991401672363281, 0.030448896408081055, 0.03007676887512207, 0.030353567123413087, 0.029941503524780273, 0.03050070381164551, 0.03008118438720703, 0.030840831756591795, 0.030101503372192383, 0.03069705581665039, 0.030132320404052733, 0.03303260803222656, 0.03164384078979492, 0.03116851234436035, 0.030525215148925783, 0.030113216400146484, 0.030249759674072264, 0.030066688537597655, 0.029934623718261718, 0.029981664657592774, 0.030336479187011718, 0.030072832107543947, 0.030302463531494142, 0.029950239181518554, 0.03020185661315918, 0.02998588752746582, 0.029946239471435546, 0.030197887420654296, 0.030345632553100587, 0.03019980812072754, 0.030719295501708984, 0.030017791748046876, 0.03062419128417969, 0.030156736373901368, 0.03012931251525879, 0.030405824661254882, 0.030424800872802735, 0.030203903198242187, 0.030418848037719725, 0.03002992057800293, 0.030509056091308592, 0.030084320068359375, 0.030278432846069337, 0.030427135467529298, 0.030718975067138672, 0.03017420768737793, 0.03072204780578613, 0.03052067184448242, 0.030554784774780273, 0.02999100875854492, 0.03002979278564453, 0.030090656280517578, 0.030613759994506835, 0.030037855148315428, 0.030529983520507814, 0.029943328857421875, 0.030665184020996095, 0.030185760498046874, 0.030349023818969728, 0.0301212158203125, 0.030464096069335936, 0.030005504608154297, 0.03052729606628418, 0.030268224716186523, 0.030649791717529296, 0.03015875244140625, 0.030314815521240233, 0.03018720054626465, 0.03091209602355957, 0.030580671310424804, 0.03080284881591797, 0.03039660835266113, 0.03070694351196289, 0.03022496032714844]",tokens/s,33.01128300345597,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,884.011008,3447.586816,0.0,3045.064704,2842.846208,s,1,7.6467548828125,7.6467548828125,0.0,7.6467548828125,7.6467548828125,7.6467548828125,7.6467548828125,[7.6467548828125],,kWh,6.035060587510088e-06,6.402252894981422e-07,2.011668276000189e-06,8.686954153008419e-06,,MB,1331.204096,3646.816256,0.0,3229.61408,2982.452736,s,10,0.43237715530395504,0.04323771553039551,0.0009452879711220824,0.04302596855163574,0.043781406021118165,0.044742158699035646,0.04551076084136963,"[0.04570291137695313, 0.04266889572143555, 0.04353647994995117, 0.042858592987060545, 0.04325056076049805, 0.043012577056884764, 0.04303936004638672, 0.04182374572753906, 0.04356790542602539, 0.0429161262512207]",tokens/s,5920.756840634552,kWh,1.4374675108660328e-06,1.584702886852826e-07,9.57506212627467e-07,2.553444012178783e-06,tokens/kWh,100256750.79578593,MB,1370.218496,3646.816256,0.0,3229.61408,2982.455296,s,10,15.364648437499998,1.5364648437500001,0.004800779918114868,1.5366460571289062,1.5421542480468748,1.5431697143554688,1.5439820874023438,"[1.5419285888671874, 1.53745703125, 1.54021923828125, 1.5441851806640625, 1.5358350830078125, 1.5344205322265625, 1.5393236083984374, 1.531820068359375, 1.53039599609375, 1.5290631103515624]",tokens/s,41.00321608806742,kWh,4.425022085246616e-05,4.880455047701306e-06,2.705843286677287e-05,7.618910876694034e-05,tokens/kWh,826889.8405507625,,s,630,15.360380672454843,0.02438155662294418,0.0004991819096972918,0.024298319816589357,0.024646966171264648,0.02485362386703491,0.02613171491622925,"[0.024706911087036133, 0.024282783508300782, 0.024305696487426757, 0.024168928146362303, 0.02419094467163086, 0.024150047302246094, 0.02434048080444336, 0.024337440490722655, 0.024418495178222657, 0.024171295166015624, 0.024309984207153322, 0.024219423294067382, 0.024123424530029296, 0.024376480102539063, 0.024111936569213867, 0.024217599868774413, 0.02428927993774414, 0.024102912902832032, 0.024221696853637696, 0.024144927978515626, 0.02439472007751465, 0.024211456298828125, 0.024293279647827147, 0.024418399810791015, 0.024334016799926757, 0.024199487686157227, 0.024236032485961914, 0.02432204818725586, 0.02481273651123047, 0.030450464248657227, 0.027338783264160157, 0.02431795120239258, 0.0242475528717041, 0.024451839447021485, 0.024219104766845703, 0.024580095291137697, 0.024375104904174806, 0.02437603187561035, 0.024276992797851563, 0.024983552932739257, 0.024211360931396485, 0.024082527160644532, 0.024196128845214843, 0.02449660873413086, 0.026036735534667968, 0.025199968338012694, 0.024565792083740233, 0.024346752166748045, 0.02425913619995117, 0.024259584426879883, 0.024292352676391602, 0.024153120040893556, 0.024062400817871095, 0.024134111404418946, 0.024256223678588866, 0.02417283248901367, 0.02409267234802246, 0.024014047622680664, 0.024203584671020507, 0.024074176788330078, 0.024053600311279295, 0.024154048919677734, 0.02414873504638672, 0.0244881591796875, 0.02409676742553711, 0.024145055770874023, 0.023925600051879884, 0.024123392105102538, 0.024125120162963868, 0.02428960037231445, 0.024130752563476562, 0.024255584716796875, 0.02415945625305176, 0.024207872390747072, 0.02410412788391113, 0.024128128051757812, 0.024268863677978515, 0.024221824645996093, 0.024184831619262694, 0.02439369583129883, 0.024225439071655273, 0.02427961540222168, 0.02430134391784668, 0.02415523147583008, 0.02445136070251465, 0.024141536712646485, 0.02422675132751465, 0.024331615447998046, 0.02420528030395508, 0.024611391067504883, 0.024184959411621094, 0.02444044876098633, 0.02425894355773926, 0.024203264236450195, 0.024211456298828125, 0.024424448013305664, 0.024200384140014648, 0.02466694450378418, 0.024546655654907226, 0.0252542724609375, 0.024432863235473633, 0.024427616119384765, 0.024560415267944335, 0.024572128295898436, 0.024376991271972657, 0.02460089683532715, 0.024631296157836914, 0.025106367111206056, 0.024544864654541015, 0.024297632217407227, 0.024475967407226563, 0.024409120559692382, 0.024472543716430664, 0.02430691146850586, 0.02433513641357422, 0.024928255081176756, 0.02468659210205078, 0.024540384292602538, 0.024451871871948243, 0.024465408325195313, 0.024582143783569335, 0.02447996711730957, 0.024432159423828124, 0.024673952102661132, 0.02497702407836914, 0.024672639846801757, 0.02487548828125, 0.024528863906860352, 0.02436639976501465, 0.024547359466552735, 0.024588960647583008, 0.02461836814880371, 0.024681087493896484, 0.02446476745605469, 0.024444799423217773, 0.024431039810180664, 0.024606880187988282, 0.024543392181396485, 0.024696575164794923, 0.02466431999206543, 0.02456985664367676, 0.024576000213623047, 0.02453875160217285, 0.024373632431030273, 0.024510400772094727, 0.024571968078613282, 0.02449407958984375, 0.024569055557250977, 0.024628000259399416, 0.024731647491455077, 0.024700767517089845, 0.02450217628479004, 0.024680192947387696, 0.02436761665344238, 0.024327648162841796, 0.024246816635131837, 0.02427663993835449, 0.02435312080383301, 0.02435910415649414, 0.024395584106445312, 0.024475168228149415, 0.024508096694946288, 0.024656383514404297, 0.024539424896240235, 0.024606719970703125, 0.024386688232421874, 0.02440070343017578, 0.024376960754394533, 0.02437775993347168, 0.024389663696289064, 0.02434867286682129, 0.024360864639282227, 0.02439360046386719, 0.024187103271484375, 0.024145919799804686, 0.024122751235961915, 0.024203584671020507, 0.024182239532470704, 0.024306047439575196, 0.024295839309692382, 0.024334400177001954, 0.024137727737426756, 0.0242391357421875, 0.024131616592407226, 0.024696800231933595, 0.024322559356689453, 0.024438592910766603, 0.024033952713012695, 0.02435686492919922, 0.0247926082611084, 0.024070783615112303, 0.024123392105102538, 0.024070144653320313, 0.024383487701416014, 0.024205312728881836, 0.024346208572387694, 0.024111520767211913, 0.024088319778442384, 0.02418079948425293, 0.024119359970092773, 0.024399328231811523, 0.02389263916015625, 0.02400985527038574, 0.0241997127532959, 0.024370784759521483, 0.024630016326904296, 0.024291296005249024, 0.02457526397705078, 0.02454400062561035, 0.02474723243713379, 0.02429417610168457, 0.024436735153198243, 0.024288320541381837, 0.024435647964477537, 0.024475488662719726, 0.024536928176879882, 0.024377664566040038, 0.02818057632446289, 0.030482336044311522, 0.024895488739013674, 0.024720895767211915, 0.024471872329711913, 0.024524608612060548, 0.02458233642578125, 0.024398015975952148, 0.02431795120239258, 0.02431795120239258, 0.025357664108276366, 0.024379264831542968, 0.02434739112854004, 0.02427497673034668, 0.024190975189208985, 0.02414156723022461, 0.024106367111206055, 0.024248767852783203, 0.024240575790405273, 0.024196832656860352, 0.024246015548706056, 0.024461856842041017, 0.024285120010375978, 0.02458624076843262, 0.024295007705688477, 0.024283615112304688, 0.02473779106140137, 0.024348352432250978, 0.02407769584655762, 0.02442451286315918, 0.02437619209289551, 0.02413100814819336, 0.024132160186767577, 0.024010751724243166, 0.02402079963684082, 0.024619007110595705, 0.024190591812133788, 0.02619206428527832, 0.02548886489868164, 0.024416063308715822, 0.024361888885498048, 0.024468608856201172, 0.024300416946411132, 0.024409151077270506, 0.024273855209350586, 0.02447135925292969, 0.02434272003173828, 0.024449024200439453, 0.02435481643676758, 0.024280351638793947, 0.02442518424987793, 0.024247840881347658, 0.024715744018554687, 0.024528287887573243, 0.024293312072753905, 0.024316192626953125, 0.02493177604675293, 0.02446636772155762, 0.0260897274017334, 0.024255327224731445, 0.024191904067993163, 0.024027135848999022, 0.024114303588867188, 0.024281984329223634, 0.02402659225463867, 0.023953920364379884, 0.024182815551757813, 0.02427289581298828, 0.02426812744140625, 0.02430633544921875, 0.024217599868774413, 0.02429324722290039, 0.024042879104614258, 0.02409926414489746, 0.024014207839965822, 0.024012800216674804, 0.024742847442626954, 0.024201215744018553, 0.02405686378479004, 0.024011520385742186, 0.0240600643157959, 0.023953472137451172, 0.024035327911376952, 0.024035327911376952, 0.023857152938842774, 0.02409267234802246, 0.02408448028564453, 0.024205312728881836, 0.02450227165222168, 0.02502230453491211, 0.02442844772338867, 0.024404224395751954, 0.024737247467041014, 0.024528831481933595, 0.02409891128540039, 0.024317632675170897, 0.024376127243041994, 0.024442880630493165, 0.024492063522338868, 0.0242794246673584, 0.024219648361206055, 0.02408243179321289, 0.02411315155029297, 0.02429952049255371, 0.02427872085571289, 0.02415622329711914, 0.024312063217163084, 0.024184383392333985, 0.024016639709472657, 0.024110944747924804, 0.024260671615600585, 0.024283487319946288, 0.024512960433959962, 0.024237503051757814, 0.024263135910034178, 0.024435840606689452, 0.024714208602905272, 0.02438105583190918, 0.0246746883392334, 0.02430975914001465, 0.024529983520507812, 0.024433504104614256, 0.024485759735107422, 0.0243222713470459, 0.024231935501098634, 0.02439289665222168, 0.024174911499023437, 0.02405632019042969, 0.024383487701416014, 0.024131584167480468, 0.024219648361206055, 0.02406809616088867, 0.024260671615600585, 0.0243275203704834, 0.024175199508666992, 0.024313663482666014, 0.0242193603515625, 0.02467683219909668, 0.024453119277954103, 0.024397823333740236, 0.024338111877441407, 0.024291648864746093, 0.024604223251342774, 0.024370815277099608, 0.024165184020996093, 0.024348735809326172, 0.024258079528808593, 0.024410367965698242, 0.024324256896972655, 0.0243507194519043, 0.024475648880004884, 0.024344575881958007, 0.024188224792480468, 0.024729600906372072, 0.025437023162841795, 0.024485376358032225, 0.024555871963500977, 0.02447132873535156, 0.02431155204772949, 0.024353248596191406, 0.02432793617248535, 0.02614886474609375, 0.024511520385742187, 0.024398815155029296, 0.024829952239990235, 0.02448147201538086, 0.02439567947387695, 0.02435465621948242, 0.024272607803344726, 0.024314655303955077, 0.02476448059082031, 0.024575679779052735, 0.024469823837280275, 0.024254463195800782, 0.024190975189208985, 0.02427414321899414, 0.024464096069335937, 0.0243175048828125, 0.024395391464233397, 0.024411008834838866, 0.02416640090942383, 0.024236032485961914, 0.02446950340270996, 0.024336383819580077, 0.02430975914001465, 0.024200639724731444, 0.02427907180786133, 0.0244117431640625, 0.024341440200805663, 0.024207359313964845, 0.024094047546386718, 0.024162559509277343, 0.024054176330566408, 0.024233983993530273, 0.024211456298828125, 0.024444160461425782, 0.02419174385070801, 0.024284576416015623, 0.02412156867980957, 0.024190528869628906, 0.023978815078735352, 0.024296768188476564, 0.024198848724365233, 0.024333343505859376, 0.02411516761779785, 0.024200576782226563, 0.024149791717529297, 0.024300384521484374, 0.02427903938293457, 0.02412518310546875, 0.024905376434326172, 0.025115232467651367, 0.026888191223144533, 0.024493440628051758, 0.024776512145996094, 0.024607040405273437, 0.024278528213500978, 0.025268352508544922, 0.02417299270629883, 0.024373695373535155, 0.024345760345458985, 0.024395647048950194, 0.024303615570068358, 0.024193824768066405, 0.02514121627807617, 0.025407424926757814, 0.024226079940795897, 0.024304800033569336, 0.024107231140136718, 0.024052064895629884, 0.024119359970092773, 0.024012800216674804, 0.024035327911376952, 0.02395484733581543, 0.024055583953857422, 0.02411097526550293, 0.023904735565185548, 0.024053823471069335, 0.023943199157714843, 0.024040992736816407, 0.024087392807006835, 0.024069791793823243, 0.02405311965942383, 0.02396054458618164, 0.024200992584228517, 0.024370527267456053, 0.02425331115722656, 0.024236032485961914, 0.024024896621704102, 0.02414566421508789, 0.024186464309692384, 0.024671072006225585, 0.024757984161376954, 0.024588064193725587, 0.024206911087036133, 0.02422265625, 0.024282943725585936, 0.024385311126708983, 0.024240320205688476, 0.024555583953857422, 0.02455686378479004, 0.02438435173034668, 0.024468736648559572, 0.025496320724487306, 0.02433024024963379, 0.024366464614868164, 0.024164831161499024, 0.024238239288330077, 0.024358911514282225, 0.02433568000793457, 0.02445804786682129, 0.024339712142944336, 0.024140480041503907, 0.024180671691894532, 0.02434636878967285, 0.0244902400970459, 0.02453923225402832, 0.024299007415771484, 0.024314271926879884, 0.02433228874206543, 0.024199167251586915, 0.024567808151245117, 0.024493247985839843, 0.024271167755126954, 0.024254047393798828, 0.024220319747924806, 0.024328447341918944, 0.02487299156188965, 0.024514528274536134, 0.024604671478271483, 0.024243520736694335, 0.024645919799804687, 0.024449440002441408, 0.024475648880004884, 0.024327487945556642, 0.02453798484802246, 0.023988031387329103, 0.02411238479614258, 0.023992128372192383, 0.02393382453918457, 0.02420947265625, 0.024123392105102538, 0.02435478401184082, 0.02401487922668457, 0.0241429443359375, 0.024287712097167968, 0.024273311614990235, 0.024123424530029296, 0.024289024353027343, 0.024183040618896486, 0.02427631950378418, 0.024394399642944337, 0.024373247146606446, 0.024297119140625, 0.024408416748046877, 0.024418495178222657, 0.024282943725585936, 0.02434467124938965, 0.024132543563842774, 0.024312192916870118, 0.02417036819458008, 0.02434867286682129, 0.02401139259338379, 0.024215167999267578, 0.024209888458251953, 0.02421481513977051, 0.024128223419189455, 0.02408243179321289, 0.02398195266723633, 0.02424131202697754, 0.02409775924682617, 0.0244715518951416, 0.024615968704223633, 0.024592512130737303, 0.024240991592407227, 0.02419673538208008, 0.0245762882232666, 0.02458140754699707, 0.024347455978393554, 0.02429465675354004, 0.024174591064453126, 0.024189472198486328, 0.023958816528320312, 0.02417145538330078, 0.02411494445800781, 0.024247583389282228, 0.024264736175537108, 0.024791776657104494, 0.024284992218017578, 0.024197536468505858, 0.024571903228759767, 0.02415001678466797, 0.024352767944335937, 0.024390783309936524, 0.024435583114624022, 0.024233087539672852, 0.024261119842529297, 0.02425894355773926, 0.02410086441040039, 0.024143455505371093, 0.024463584899902344, 0.024316095352172853, 0.0242872314453125, 0.024379295349121095, 0.024257791519165038, 0.02438435173034668, 0.02419033622741699, 0.02444761657714844, 0.02429292869567871, 0.024294879913330077, 0.024221920013427736, 0.024244064331054686, 0.024359840393066406, 0.024180736541748047, 0.02412748718261719, 0.024170495986938476, 0.024474784851074217, 0.02397270393371582, 0.023986175537109376, 0.023987968444824218, 0.023950592041015625, 0.02412441635131836, 0.023977760314941407, 0.02416662406921387, 0.024345855712890625, 0.02420812797546387, 0.024190975189208985, 0.024138751983642577, 0.02424470329284668, 0.024044063568115233, 0.024002464294433593, 0.02394940757751465, 0.024108991622924805, 0.024069887161254883, 0.02411952018737793, 0.024381311416625976, 0.024163711547851564, 0.024097631454467774, 0.02399564743041992, 0.02412566375732422, 0.024104799270629883, 0.025327327728271485, 0.02604745674133301, 0.024231935501098634, 0.023998464584350586, 0.02431590461730957, 0.02428108787536621, 0.02404761505126953, 0.023932544708251954, 0.02436537551879883, 0.024100927352905272, 0.024224800109863283, 0.025295616149902344]",tokens/s,41.014608520071,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1004.064768,7156.465664,0.0,6761.218048,6730.975744,s,1,7.08850537109375,7.08850537109375,0.0,7.08850537109375,7.08850537109375,7.08850537109375,7.08850537109375,[7.08850537109375],,kWh,7.473964341640263e-06,8.138492495711588e-07,4.322225680003511e-06,1.2610039271214931e-05,,MB,1527.885824,7196.311552,0.0,6784.28672,5879.090688,s,10,3.6730435485839843,0.3673043548583984,0.004754656938597579,0.36560081481933593,0.3695772918701172,0.3752982437133789,0.3798750051879883,"[0.3654117431640625, 0.3666231689453125, 0.3810191955566406, 0.36740234375, 0.36578988647460936, 0.36830596923828124, 0.36518331909179685, 0.36410806274414065, 0.36380136108398436, 0.36539849853515627]",tokens/s,696.9696836257006,kWh,1.07758625104168e-05,1.1883993905485938e-06,5.3282483102145325e-06,1.7292510211179925e-05,tokens/kWh,14804097.084441293,MB,1573.31456,7198.408704,0.0,6786.383872,5879.093248,s,10,23.891348876953124,2.3891348876953122,0.004509357251511045,2.3890838623046875,2.3951504638671874,2.3954573120117186,2.3957027905273436,"[2.385606201171875, 2.395082275390625, 2.39576416015625, 2.382531494140625, 2.39032275390625, 2.3897607421875, 2.388406982421875, 2.386313720703125, 2.383421875, 2.394138671875]",tokens/s,26.369377603779075,kWh,7.006303175333226e-05,7.725936789977188e-06,4.655107097098468e-05,0.00012434003951429412,tokens/kWh,506675.08427771996,,s,630,23.889669300079365,0.03792011000012595,0.0003598356810397235,0.037836273193359374,0.03816456489562988,0.03840336837768555,0.03975314315795899,"[0.038434814453125, 0.03816425704956055, 0.03808422470092773, 0.037837406158447266, 0.03790643310546875, 0.03793100738525391, 0.037820415496826174, 0.037976062774658204, 0.03801603317260742, 0.03786800003051758, 0.037982719421386715, 0.03776847839355469, 0.03777199935913086, 0.0378081283569336, 0.037742176055908204, 0.037695518493652345, 0.03802076721191406, 0.03784982299804687, 0.038254432678222657, 0.038082721710205075, 0.03779087829589844, 0.03768406295776367, 0.037840385437011716, 0.037827072143554685, 0.03774054336547852, 0.03768025588989258, 0.03773251342773438, 0.03790431976318359, 0.03773110580444336, 0.03775830459594726, 0.037687454223632816, 0.03779840087890625, 0.037689342498779296, 0.03775283050537109, 0.03774399948120117, 0.037767807006835935, 0.03783459091186524, 0.03771139144897461, 0.03772377777099609, 0.03799126434326172, 0.03793936157226562, 0.038174720764160154, 0.037937152862548826, 0.03777769470214844, 0.03779759979248047, 0.03797401428222656, 0.03793920135498047, 0.03785836791992187, 0.03794425582885742, 0.03795264053344727, 0.037802879333496096, 0.03795065689086914, 0.03774166488647461, 0.03783651351928711, 0.037926910400390625, 0.03780198287963867, 0.03795711898803711, 0.03777177429199219, 0.037639934539794924, 0.03777356719970703, 0.037789215087890626, 0.037832801818847656, 0.03792079925537109, 0.038636512756347656, 0.038199295043945314, 0.038258689880371094, 0.038529022216796875, 0.037872638702392575, 0.03786444854736328, 0.03786137771606445, 0.037806079864501956, 0.03787980651855469, 0.038117374420166016, 0.03805593490600586, 0.037904384613037106, 0.037601280212402347, 0.03767705535888672, 0.03782553482055664, 0.03781324768066406, 0.03779993438720703, 0.037698558807373043, 0.03775993728637695, 0.0376954574584961, 0.03825878524780273, 0.03870089721679688, 0.03788508987426758, 0.041063423156738284, 0.03853107070922852, 0.03816387176513672, 0.03805782318115234, 0.03796003341674805, 0.0378056640625, 0.03813580703735352, 0.037873760223388675, 0.03774556732177734, 0.037574462890625, 0.03764950561523438, 0.03788604736328125, 0.03783353424072266, 0.03786115264892578, 0.03772633743286133, 0.03767036819458008, 0.03768502426147461, 0.037714622497558595, 0.037858783721923826, 0.03765523147583008, 0.038035457611083984, 0.03771187210083008, 0.0377050895690918, 0.03790911865234375, 0.03769155120849609, 0.037741790771484374, 0.03762860870361328, 0.03762099075317383, 0.037823169708251954, 0.037986209869384766, 0.037822559356689454, 0.037994014739990235, 0.03832889556884766, 0.03784672164916992, 0.04024716949462891, 0.03815663909912109, 0.03842435073852539, 0.03794966506958008, 0.03811328125, 0.038016223907470705, 0.03837593460083008, 0.038176769256591796, 0.0381030387878418, 0.037910526275634765, 0.0380882568359375, 0.03814035034179687, 0.03785318374633789, 0.03795119857788086, 0.03909356689453125, 0.038056926727294924, 0.03779103851318359, 0.03817305755615234, 0.04041555023193359, 0.03816758346557617, 0.038406753540039064, 0.038395713806152344, 0.037953407287597656, 0.03768083190917969, 0.03786041641235351, 0.03771590423583984, 0.03778511810302734, 0.03791001510620117, 0.03801113510131836, 0.03784368133544922, 0.03771372985839844, 0.037888446807861326, 0.03804134368896484, 0.038209537506103515, 0.03793664169311523, 0.03776358413696289, 0.03782358551025391, 0.03805072021484375, 0.03785846328735352, 0.037921630859375, 0.03786140823364258, 0.03977977752685547, 0.03834940719604492, 0.03797804641723633, 0.03786297607421875, 0.03781660842895508, 0.03781798553466797, 0.038386207580566406, 0.038121440887451175, 0.037815807342529296, 0.0376407356262207, 0.03754940795898438, 0.037763553619384764, 0.03775052642822266, 0.038021183013916014, 0.0379233283996582, 0.03773632049560547, 0.03786649703979492, 0.037735424041748046, 0.037820415496826174, 0.03783603286743164, 0.03769625473022461, 0.03760947036743164, 0.03804774475097656, 0.037814273834228515, 0.03789766311645508, 0.037923393249511717, 0.03799846267700195, 0.03811094284057617, 0.03832179260253906, 0.0380994873046875, 0.03804716873168945, 0.03781027221679688, 0.03779756927490235, 0.037935009002685545, 0.037829345703125, 0.03785932922363281, 0.03811328125, 0.037647903442382814, 0.03764249420166016, 0.0379005126953125, 0.03772415924072266, 0.0376627197265625, 0.03760332870483398, 0.03804323196411133, 0.03783107376098633, 0.03789823913574219, 0.03793920135498047, 0.037814529418945315, 0.037817310333251954, 0.037782302856445314, 0.03792892837524414, 0.03779996871948242, 0.03774828720092773, 0.03779423904418945, 0.037622974395751956, 0.03761849594116211, 0.0376545295715332, 0.03785113525390625, 0.03779174423217774, 0.037736160278320316, 0.03776124954223633, 0.03802096176147461, 0.03775305557250976, 0.03767059326171875, 0.037636417388916016, 0.03779587173461914, 0.03766812896728516, 0.037665470123291016, 0.03766835021972656, 0.03763776016235352, 0.03774284744262695, 0.037720832824707035, 0.03762777709960938, 0.037976062774658204, 0.037907455444335936, 0.037894878387451175, 0.03793331146240234, 0.037927169799804684, 0.03783452987670898, 0.03777926254272461, 0.037811614990234374, 0.037776161193847656, 0.03793100738525391, 0.038027263641357424, 0.037806079864501956, 0.03783580780029297, 0.037740768432617186, 0.037808895111083984, 0.037664768218994144, 0.03790447998046875, 0.037769119262695314, 0.038642559051513675, 0.03816447830200195, 0.03803750228881836, 0.037916160583496096, 0.03790041732788086, 0.03790240097045899, 0.03778591918945313, 0.03785308837890625, 0.03779779052734375, 0.03788937759399414, 0.037947967529296876, 0.03820163345336914, 0.0377262077331543, 0.037897247314453125, 0.03803340911865234, 0.0378131217956543, 0.03796326446533203, 0.03779612731933594, 0.037967903137207035, 0.038585792541503905, 0.03793596649169922, 0.03817788696289062, 0.038064800262451175, 0.03789030456542969, 0.03776041412353515, 0.03781488037109375, 0.037876960754394534, 0.03804012680053711, 0.037824737548828126, 0.03780531311035156, 0.037628353118896486, 0.03768998336791992, 0.037779136657714846, 0.0377077751159668, 0.03771596908569336, 0.03807369613647461, 0.037896800994873046, 0.03780819320678711, 0.037617504119873045, 0.03782857513427734, 0.0376956787109375, 0.038141952514648435, 0.03801702499389648, 0.03789823913574219, 0.03804111862182617, 0.037763774871826174, 0.03833792114257813, 0.039895294189453125, 0.03823132705688476, 0.037769248962402344, 0.03767612838745117, 0.037643966674804685, 0.037576766967773435, 0.03785728073120117, 0.03790659332275391, 0.03810876846313477, 0.038156097412109374, 0.037888446807861326, 0.037863201141357425, 0.03758451080322266, 0.037757118225097655, 0.03773404693603516, 0.0378502082824707, 0.038300830841064455, 0.03816534423828125, 0.03787980651855469, 0.03788185501098633, 0.03778335952758789, 0.0377243537902832, 0.03780198287963867, 0.03772118377685547, 0.03764876937866211, 0.0381459846496582, 0.03790428924560547, 0.03791737747192383, 0.03779174423217774, 0.037649951934814456, 0.03769596862792969, 0.037838462829589845, 0.03769996643066406, 0.037607425689697264, 0.03761356735229492, 0.03768326568603515, 0.037650367736816404, 0.03772313690185547, 0.03770880126953125, 0.037771263122558595, 0.037797409057617186, 0.03767036819458008, 0.037694465637207034, 0.03773795318603516, 0.03828489685058594, 0.038448062896728516, 0.03934419250488281, 0.038096481323242185, 0.03772988891601563, 0.03765935897827148, 0.03780732727050781, 0.03952313613891602, 0.03856588745117188, 0.03796495819091797, 0.03774176025390625, 0.03772089767456055, 0.03878998565673828, 0.03778889465332031, 0.03792156982421875, 0.03783171081542969, 0.03762063980102539, 0.03766019058227539, 0.037668609619140626, 0.0376321907043457, 0.03772025680541992, 0.037684703826904295, 0.03793171310424805, 0.03801523208618164, 0.037944862365722656, 0.037730785369873045, 0.03801430511474609, 0.03781292724609375, 0.03772822570800781, 0.0382435188293457, 0.038894657135009766, 0.03807206344604492, 0.03813759994506836, 0.03780764770507813, 0.037838592529296874, 0.03833196640014649, 0.038144542694091794, 0.037967071533203126, 0.037937950134277344, 0.03792051315307617, 0.03784320068359375, 0.03798361587524414, 0.03780672073364258, 0.03947017669677735, 0.03788451385498047, 0.03791865539550781, 0.03789043045043945, 0.03765043258666992, 0.037789695739746096, 0.03793113708496094, 0.03787753677368164, 0.03781846237182617, 0.0381399040222168, 0.03807027053833008, 0.037893825531005856, 0.037875648498535155, 0.03790172958374023, 0.03782345581054687, 0.03780953598022461, 0.03784767913818359, 0.037934398651123045, 0.038052543640136716, 0.037705726623535156, 0.03768320083618164, 0.037855232238769534, 0.0376545295715332, 0.037996543884277346, 0.037649471282958986, 0.037959617614746095, 0.038599327087402345, 0.037719745635986325, 0.037808799743652345, 0.03771161651611328, 0.03765663909912109, 0.03771731185913086, 0.03761446380615235, 0.03779993438720703, 0.03777740859985351, 0.03774604797363281, 0.03780467224121094, 0.03795500946044922, 0.03803577423095703, 0.03774272155761719, 0.037728382110595704, 0.037787647247314454, 0.03776716613769531, 0.03790182495117188, 0.03777155303955078, 0.037739936828613284, 0.037747520446777344, 0.03780176162719726, 0.037822017669677736, 0.03902032089233398, 0.03791347122192383, 0.037846782684326175, 0.037920639038085936, 0.03796384048461914, 0.037800254821777346, 0.038555519104003906, 0.03826496124267578, 0.037771263122558595, 0.03783427047729492, 0.037980640411376956, 0.03775897598266602, 0.03785894393920899, 0.03772003173828125, 0.0376959342956543, 0.037846656799316404, 0.037879550933837894, 0.038142559051513675, 0.03794940948486328, 0.03780611038208008, 0.0384450569152832, 0.03787081527709961, 0.037873985290527344, 0.03773680114746094, 0.03794956970214844, 0.03789328002929687, 0.037862239837646486, 0.037860671997070314, 0.03783545684814453, 0.03831193542480469, 0.03799587249755859, 0.0380577278137207, 0.03786985778808594, 0.037939838409423825, 0.0381214714050293, 0.03782428741455078, 0.03795785522460938, 0.03787366485595703, 0.03786342239379883, 0.0377262077331543, 0.03781868743896484, 0.037829822540283206, 0.03780454254150391, 0.03788819122314453, 0.03781574249267578, 0.03781843185424805, 0.03775110244750977, 0.038109054565429686, 0.03778268814086914, 0.037739105224609375, 0.03766502380371094, 0.037723297119140624, 0.0376104621887207, 0.037811519622802735, 0.03765523147583008, 0.037779006958007816, 0.037659072875976564, 0.037822463989257815, 0.0377828483581543, 0.03780019378662109, 0.0378015022277832, 0.03798659133911133, 0.037791454315185546, 0.038163360595703126, 0.03786259078979492, 0.0376163215637207, 0.03792294311523438, 0.037680801391601564, 0.03772428894042969, 0.03827519989013672, 0.037943294525146484, 0.0378488655090332, 0.03766191864013672, 0.03773747253417969, 0.03784703826904297, 0.03768729782104492, 0.03775651168823242, 0.037685054779052735, 0.03786140823364258, 0.03796640014648438, 0.03793920135498047, 0.03797401428222656, 0.03784703826904297, 0.037781505584716796, 0.03771529769897461, 0.03775305557250976, 0.037765216827392575, 0.037851776123046875, 0.03784249496459961, 0.03773155212402344, 0.03809913635253906, 0.037743358612060546, 0.03768320083618164, 0.03755984115600586, 0.03756233596801758, 0.03798476791381836, 0.03779481506347656, 0.03805628967285156, 0.03779446411132813, 0.03784835052490235, 0.03787567901611328, 0.03769625473022461, 0.03778169631958008, 0.03766457748413086, 0.03769728088378906, 0.03770908737182617, 0.03789836883544922, 0.038037376403808595, 0.04062108612060547, 0.0381275520324707, 0.03795558547973633, 0.0377663688659668, 0.037724960327148435, 0.03787273788452149, 0.037632831573486326, 0.03768124771118164, 0.03786547088623047, 0.037691009521484374, 0.03761190414428711, 0.03772825622558594, 0.03746771240234375, 0.03764678573608399, 0.03766057586669922, 0.03766486358642578, 0.03766032028198242, 0.037609375, 0.03763420867919922, 0.03766896057128906, 0.037775550842285156, 0.03768320083618164, 0.03787980651855469, 0.03766499328613281, 0.038867198944091796, 0.03826956939697266, 0.03811532974243164, 0.039931999206542966, 0.03806915283203125, 0.03805728149414062, 0.0378856315612793, 0.037855232238769534, 0.037689342498779296, 0.037797374725341795, 0.03769190216064453, 0.03763814544677734, 0.03763558578491211, 0.037658496856689455, 0.03863820648193359, 0.037969696044921876, 0.03779401779174805, 0.037750049591064455, 0.037840736389160155, 0.038257537841796876, 0.037897632598876956, 0.03791523361206055, 0.037926910400390625, 0.03776921463012695, 0.03791817474365235, 0.03778531265258789, 0.03782534408569336, 0.03807436752319336, 0.037814273834228515, 0.03781631851196289, 0.0376995849609375, 0.03910860824584961, 0.03814179229736328, 0.03794140625, 0.03839923095703125, 0.03822668838500977, 0.03804159927368164, 0.038139713287353515, 0.03775289535522461, 0.037806209564208985, 0.037943294525146484, 0.037901630401611326, 0.03783340835571289, 0.037951648712158205, 0.038068000793457034, 0.037781566619873044, 0.03783270263671875, 0.03799971389770508, 0.037824798583984375, 0.03798409652709961, 0.037764991760253906, 0.037835296630859376, 0.037719871520996096, 0.0378721923828125, 0.03968793487548828, 0.038011104583740234, 0.03791049575805664, 0.03794745635986328, 0.03784000015258789, 0.03769027328491211, 0.03772003173828125, 0.03775283050537109, 0.03767695999145508]",tokens/s,26.371231517964443,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,843.194368,12640.518144,0.0,12245.270528,12234.440192,s,1,7.641587890625,7.641587890625,0.0,7.641587890625,7.641587890625,7.641587890625,7.641587890625,[7.641587890625],,kWh,1.3793814154164843e-05,1.512477956816801e-06,6.774172085997551e-06,2.2080464196979194e-05,,MB,1152.118784,12925.730816,0.0,12517.900288,12440.744448,s,10,1.8887086944580078,0.18887086944580078,0.0031780540365539452,0.18875924682617187,0.19160263061523436,0.19316996765136718,0.19442383728027343,"[0.18195571899414062, 0.18829545593261718, 0.1900277099609375, 0.18892422485351562, 0.1861235809326172, 0.1881266632080078, 0.19125433349609375, 0.19066943359375, 0.18859426879882812, 0.1947373046875]",tokens/s,1355.4234210451543,kWh,5.656758292948766e-06,6.235645935288457e-07,3.7684218181538376e-06,1.004874470463145e-05,tokens/kWh,25475818.87337729,MB,1174.585344,12988.645376,0.0,12580.814848,12543.681024,s,10,36.67357470703126,3.667357470703125,0.006592214839360489,3.6670753173828126,3.675392626953125,3.677593408203125,3.679354033203125,"[3.655662109375, 3.6608779296875, 3.6623349609375, 3.66615576171875, 3.667674072265625, 3.668391357421875, 3.6664765625, 3.67130419921875, 3.674903564453125, 3.679794189453125]",tokens/s,17.17858171811141,kWh,0.000107243756794551,1.1829444516997074e-05,7.122219372984619e-05,0.00019029539504139423,tokens/kWh,331064.23824021517,,s,630,36.67032344436643,0.05820686261010548,0.0005326349760603359,0.058130367279052735,0.0585946159362793,0.05883475112915039,0.06157141201019287,"[0.0616734733581543, 0.05863423919677734, 0.05784972763061524, 0.05770662307739258, 0.057624576568603515, 0.057617568969726564, 0.05753273773193359, 0.05757804870605469, 0.05756480026245117, 0.057741630554199216, 0.057827487945556644, 0.057826751708984374, 0.05787897491455078, 0.057568862915039064, 0.05755945587158203, 0.057540607452392575, 0.058060832977294925, 0.058225822448730466, 0.05803606414794922, 0.05805721664428711, 0.05789334487915039, 0.057665313720703125, 0.057708415985107425, 0.057831424713134766, 0.0577724494934082, 0.057638240814208985, 0.05784870529174805, 0.05781884765625, 0.05777151870727539, 0.05781145477294922, 0.05788217544555664, 0.05788499069213867, 0.05808697509765625, 0.058490943908691403, 0.05812905502319336, 0.058234752655029295, 0.05814064025878906, 0.05808153533935547, 0.057945503234863284, 0.05794198226928711, 0.057885311126708985, 0.058000385284423826, 0.0578897590637207, 0.05812339019775391, 0.0579131851196289, 0.058012481689453124, 0.058137985229492185, 0.058095840454101565, 0.05795449447631836, 0.05808355331420898, 0.05812588882446289, 0.05821488189697266, 0.058198177337646484, 0.05842105484008789, 0.05818982315063476, 0.05820415878295898, 0.05817958450317383, 0.05830854415893555, 0.05828607940673828, 0.05811820983886719, 0.05816729736328125, 0.05801574325561523, 0.058159103393554686, 0.06135603332519531, 0.05881856155395508, 0.0578416633605957, 0.057649150848388675, 0.05765529632568359, 0.05750697708129883, 0.05751007843017578, 0.057581600189208985, 0.05787881469726563, 0.05784998321533203, 0.05774153518676758, 0.057775711059570314, 0.05756707382202148, 0.05762217712402344, 0.05780368041992188, 0.05773926544189453, 0.05796393585205078, 0.058261566162109375, 0.05823542404174804, 0.05810383987426758, 0.05805385589599609, 0.05791206359863281, 0.05784521484375, 0.057858463287353515, 0.05776947021484375, 0.05791603088378906, 0.057891040802001956, 0.057966400146484375, 0.05798908615112305, 0.05797683334350586, 0.05801574325561523, 0.058011425018310546, 0.058046783447265625, 0.05829827117919922, 0.058298366546630856, 0.05848905563354492, 0.058240352630615236, 0.05824121475219726, 0.058396961212158205, 0.058135711669921875, 0.05802169418334961, 0.05827276611328125, 0.05820828628540039, 0.05797990417480469, 0.05795267105102539, 0.058194496154785155, 0.05815091323852539, 0.05809878540039062, 0.05804864120483398, 0.05812067031860352, 0.05827616119384765, 0.058324417114257815, 0.05850300979614258, 0.05853257751464844, 0.05838582229614258, 0.05826211166381836, 0.05826969528198242, 0.05825081634521485, 0.05815311813354492, 0.058105792999267575, 0.05817379379272461, 0.058175487518310545, 0.0582509765625, 0.061288478851318356, 0.05884662246704102, 0.0577562255859375, 0.057779743194580076, 0.0576475830078125, 0.057667423248291015, 0.057686302185058595, 0.05767750549316406, 0.05754604721069336, 0.05775040054321289, 0.05773311996459961, 0.05769625473022461, 0.05776793670654297, 0.05793791961669922, 0.05784371185302734, 0.057843742370605467, 0.058286048889160155, 0.05846550369262695, 0.05837424087524414, 0.058184383392333984, 0.05807923126220703, 0.05792486572265625, 0.05814080047607422, 0.057872608184814454, 0.05783388900756836, 0.05795782470703125, 0.05798380661010742, 0.057955265045166016, 0.05798937606811523, 0.058036800384521484, 0.05816128158569336, 0.05801504135131836, 0.058073665618896486, 0.05829571151733398, 0.05840342330932617, 0.058386016845703125, 0.05839846420288086, 0.05822127914428711, 0.058230720520019534, 0.05830656051635742, 0.058064479827880856, 0.05800592041015625, 0.05788195037841797, 0.05806707382202148, 0.058001953125, 0.05800703811645508, 0.05803641510009765, 0.05803852844238281, 0.05801910400390625, 0.058120990753173826, 0.058464256286621094, 0.05837004852294922, 0.05837619018554688, 0.058396129608154296, 0.05832553482055664, 0.058139999389648436, 0.058433727264404295, 0.05831727981567383, 0.05819801712036133, 0.058396671295166014, 0.05809561538696289, 0.05808745574951172, 0.05809968185424805, 0.061937248229980466, 0.058886463165283204, 0.05790934371948242, 0.057737342834472655, 0.05775347137451172, 0.05772675323486328, 0.05765987014770508, 0.05816239929199219, 0.05766403198242188, 0.05779632186889649, 0.05808566284179688, 0.058039905548095704, 0.05792809677124024, 0.0579090576171875, 0.058187839508056644, 0.057853214263916014, 0.0582309455871582, 0.0582171516418457, 0.05822895812988281, 0.05819369506835938, 0.05803945541381836, 0.05786710357666015, 0.057788448333740236, 0.057823200225830075, 0.05781449508666992, 0.05793804931640625, 0.05796905517578125, 0.058058303833007814, 0.05800377655029297, 0.058011775970458986, 0.05803392028808594, 0.05801398468017578, 0.05798908615112305, 0.05843360137939453, 0.058247104644775394, 0.0583741455078125, 0.05818982315063476, 0.058191295623779296, 0.058186302185058596, 0.05840812683105469, 0.05827462387084961, 0.05813452911376953, 0.05814476776123047, 0.05810358428955078, 0.05801596832275391, 0.05831270217895508, 0.058114078521728514, 0.05805871963500977, 0.058133823394775394, 0.0581802864074707, 0.05827993774414063, 0.0584571533203125, 0.058373054504394534, 0.05859942245483398, 0.05828006362915039, 0.05865039825439453, 0.05836582565307617, 0.05834979248046875, 0.058322017669677734, 0.05820883178710937, 0.0583355827331543, 0.058308609008789064, 0.058353824615478514, 0.06163248062133789, 0.05907939147949219, 0.058413311004638674, 0.05795753479003906, 0.05793264007568359, 0.05788832092285156, 0.05788681411743164, 0.057938270568847657, 0.05790291213989258, 0.057914718627929684, 0.0578138542175293, 0.057775390625, 0.057747711181640626, 0.057811233520507814, 0.0580753288269043, 0.05834854507446289, 0.05808425521850586, 0.058314655303955076, 0.058380481719970706, 0.058340545654296874, 0.05800815963745117, 0.057971073150634767, 0.0579192008972168, 0.058131679534912106, 0.05806991958618164, 0.057929729461669924, 0.057995262145996096, 0.058060798645019535, 0.05798425674438477, 0.05794892883300781, 0.057903102874755856, 0.057987167358398435, 0.05805670547485352, 0.058149856567382814, 0.05824812698364258, 0.05852057647705078, 0.05824124908447265, 0.05825414276123047, 0.058138206481933595, 0.05839091110229492, 0.05828607940673828, 0.058226432800292965, 0.05800508880615234, 0.058120193481445315, 0.058038463592529295, 0.05821900939941406, 0.05804848098754883, 0.05807686233520508, 0.05805692672729492, 0.058310047149658206, 0.05832112121582031, 0.05838691329956055, 0.05820751953125, 0.05841340637207031, 0.05839091110229492, 0.058170528411865235, 0.05818454360961914, 0.058449790954589846, 0.05850284957885742, 0.05835558319091797, 0.05830227279663086, 0.05860547256469727, 0.058536575317382815, 0.06161423873901367, 0.05906713485717773, 0.057949695587158206, 0.0576847038269043, 0.057638687133789064, 0.057654529571533206, 0.057655071258544924, 0.05794611358642578, 0.057909854888916014, 0.05792601776123047, 0.0577760009765625, 0.057771678924560546, 0.057739742279052736, 0.05770153427124024, 0.057680736541748046, 0.057731071472167966, 0.05841100692749023, 0.05871206283569336, 0.05843548965454102, 0.05842339324951172, 0.05799718475341797, 0.05806041717529297, 0.057786880493164064, 0.05802598571777344, 0.05802188873291016, 0.05797884750366211, 0.05802384185791016, 0.05796486282348633, 0.057984832763671876, 0.058001407623291014, 0.058076351165771485, 0.05796851348876953, 0.05839763259887695, 0.05820137786865234, 0.05828003311157227, 0.05855705642700195, 0.0582902717590332, 0.05828303909301758, 0.058184288024902345, 0.058386848449707034, 0.05818966293334961, 0.058285953521728516, 0.05816361618041992, 0.058119937896728514, 0.058052223205566404, 0.05816972732543945, 0.05845196914672852, 0.058916862487792966, 0.05815929412841797, 0.05833504104614258, 0.058400032043457034, 0.05854844665527344, 0.05841542434692383, 0.05842963027954102, 0.05844521713256836, 0.05847715377807617, 0.058343425750732425, 0.05825235366821289, 0.058385345458984376, 0.058375457763671874, 0.05823766326904297, 0.05859123229980469, 0.05842943954467773, 0.06165760040283203, 0.05901926422119141, 0.05797628784179688, 0.057740833282470705, 0.05768499374389648, 0.057631935119628906, 0.05777635192871094, 0.057844318389892575, 0.05765439987182617, 0.05771353530883789, 0.05787551879882812, 0.05794911956787109, 0.05778988647460938, 0.057635391235351566, 0.05779251098632812, 0.05804854583740234, 0.058268798828125, 0.05844678497314453, 0.05824911880493164, 0.058060798645019535, 0.05790924835205078, 0.057857185363769534, 0.05771731185913086, 0.057743648529052734, 0.05776176071166992, 0.05784579086303711, 0.05814700698852539, 0.05791084671020508, 0.05798937606811523, 0.05800755310058594, 0.057915233612060545, 0.05803206253051758, 0.058183456420898436, 0.0583623046875, 0.05857024002075195, 0.05865727996826172, 0.05826150512695313, 0.05819913482666016, 0.058188705444335936, 0.058191871643066405, 0.05809561538696289, 0.05825107192993164, 0.05810195159912109, 0.05813398361206055, 0.05816128158569336, 0.05863862228393555, 0.0581448974609375, 0.05803113555908203, 0.05799417495727539, 0.05817971038818359, 0.058480545043945314, 0.05858083343505859, 0.05881174468994141, 0.05870998382568359, 0.058953857421875, 0.058290912628173826, 0.05819801712036133, 0.05841100692749023, 0.05837209701538086, 0.05823897552490234, 0.058357982635498046, 0.05840070343017578, 0.05833916854858399, 0.06146656036376953, 0.05897689437866211, 0.05806694412231445, 0.05793344116210938, 0.05790156936645508, 0.057880447387695315, 0.05782284927368164, 0.057710975646972654, 0.05769625473022461, 0.05794377517700195, 0.05803036880493164, 0.05779235076904297, 0.057893024444580075, 0.05796422576904297, 0.05791379165649414, 0.05787839889526367, 0.0580423698425293, 0.05851340866088867, 0.05823833465576172, 0.05798086547851562, 0.05786495971679687, 0.057939903259277344, 0.057953472137451174, 0.05807596969604492, 0.057890655517578125, 0.05794972610473633, 0.05808780670166016, 0.057995521545410156, 0.05799321746826172, 0.05809721755981445, 0.05807558441162109, 0.05800860977172852, 0.05804540634155273, 0.058353023529052736, 0.05957900619506836, 0.058922721862792966, 0.05843356704711914, 0.058372318267822264, 0.05828406524658203, 0.058294273376464846, 0.05822067260742188, 0.058423168182373045, 0.05816134262084961, 0.0582031021118164, 0.05802275085449219, 0.05811366271972656, 0.05840934371948242, 0.058287361145019534, 0.058327392578125, 0.05847046279907227, 0.0585272331237793, 0.05861891174316406, 0.058858463287353516, 0.05869657516479492, 0.05857686233520508, 0.05832089614868164, 0.05841030502319336, 0.05831955337524414, 0.05824012756347656, 0.05850815963745117, 0.05858015823364258, 0.05853084945678711, 0.05831248092651367, 0.06185087966918945, 0.05904569625854492, 0.05799580764770508, 0.05780115127563477, 0.05783555221557617, 0.05787865447998047, 0.057888641357421874, 0.05790105438232422, 0.057665534973144535, 0.05787798309326172, 0.05777385711669922, 0.05783148956298828, 0.05865542221069336, 0.05798083114624023, 0.05780284881591797, 0.057951904296875, 0.05830595016479492, 0.058912704467773434, 0.05862707138061524, 0.05808246231079101, 0.05822671890258789, 0.0578870735168457, 0.05800598526000977, 0.058011905670166015, 0.05805849456787109, 0.05798092651367188, 0.058062110900878906, 0.05802671813964844, 0.0579317741394043, 0.057888736724853514, 0.05804035186767578, 0.05799935913085937, 0.0582369270324707, 0.058504768371582035, 0.05880201721191406, 0.058650337219238284, 0.05866175842285156, 0.0585011215209961, 0.05823823928833008, 0.05825404739379883, 0.058080734252929686, 0.05811049652099609, 0.058068416595458985, 0.05834169769287109, 0.05840908813476563, 0.05831897735595703, 0.05826355361938477, 0.058369407653808596, 0.05838502502441406, 0.05859449768066406, 0.05870675277709961, 0.05885737609863281, 0.058687454223632814, 0.058579071044921875, 0.05866889572143555, 0.05865654373168945, 0.05839091110229492, 0.058498783111572264, 0.058288448333740236, 0.058234848022460935, 0.0592097282409668, 0.05863116836547851, 0.058595680236816404, 0.06166793441772461, 0.059229598999023435, 0.05820905685424805, 0.058025054931640625, 0.05783795166015625, 0.05795801544189453, 0.057971614837646485, 0.05809673690795898, 0.05787289428710937, 0.057966529846191404, 0.058038558959960934, 0.058005599975585936, 0.05794569778442383, 0.05803891372680664, 0.058046337127685546, 0.05823078536987305, 0.05875500869750976, 0.0588936653137207, 0.05862614440917969, 0.058468673706054686, 0.058207775115966795, 0.058098464965820315, 0.05799116897583008, 0.05799731063842774, 0.058072929382324216, 0.05810515213012695, 0.05821295928955078, 0.058517696380615235, 0.05804652786254883, 0.058173439025878904, 0.05814803314208984, 0.05814969635009765, 0.05838569641113281, 0.05866934585571289, 0.05889683151245117, 0.05884467315673828, 0.05898291015625, 0.05861580657958984, 0.05827913665771484, 0.05830073547363281, 0.05828384017944336, 0.05819881439208984, 0.05824908828735351, 0.058224639892578124, 0.058327041625976565, 0.05828403091430664, 0.058265598297119144, 0.05857279968261719, 0.058241024017333984, 0.05871820831298828, 0.05850931167602539, 0.05901724624633789, 0.05894553756713867, 0.05882262420654297, 0.05869772720336914, 0.05854412841796875, 0.058422721862792966, 0.05828054428100586, 0.05827171325683594, 0.05846137619018555, 0.058391361236572265, 0.058619903564453124, 0.05849087905883789]",tokens/s,17.18010480479645,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,894.963712,3900.571648,0.0,3498.049536,3295.335424,s,1,7.8541064453125,7.8541064453125,0.0,7.8541064453125,7.8541064453125,7.8541064453125,7.8541064453125,[7.8541064453125],,kWh,6.05320767918632e-06,6.51058117165495e-07,2.6094465319925764e-06,9.313712328344392e-06,,MB,1337.11872,4045.275136,0.0,3630.170112,3408.337408,s,10,0.37896022796630857,0.03789602279663086,0.0008168233137309175,0.037674560546875,0.03811797828674316,0.03919809398651123,0.040062186546325686,"[0.0402782096862793, 0.037379329681396484, 0.0377694091796875, 0.03731167984008789, 0.03783615875244141, 0.037866847991943356, 0.03754848098754883, 0.0375797119140625, 0.037512447357177736, 0.03787795257568359]",tokens/s,6755.326314157686,kWh,1.335161112785314e-06,1.4712494443575218e-07,8.817904060913021e-07,2.3640764633123686e-06,tokens/kWh,108287529.60101458,MB,1369.657344,4049.46944,0.0,3632.267264,3408.339968,s,10,14.821669433593751,1.4821669433593752,0.006779634447235293,1.482548095703125,1.4889016235351562,1.4932907165527345,1.496801990966797,"[1.4831070556640624, 1.480173095703125, 1.4821318359375, 1.483393310546875, 1.47314892578125, 1.477865234375, 1.473279541015625, 1.48296435546875, 1.4976798095703125, 1.48792626953125]",tokens/s,42.5053333447099,kWh,4.341839803471418e-05,4.7887773309054904e-06,2.78795659363081e-05,7.608674130192776e-05,tokens/kWh,828002.3420901035,,s,630,14.819124269485476,0.023522419475373767,0.0003892109116823496,0.023453408241271973,0.02376732177734375,0.023918926811218263,0.024893698139190675,"[0.023903743743896484, 0.02344976043701172, 0.02347235107421875, 0.023469728469848634, 0.023251392364501952, 0.023299423217773438, 0.02327222442626953, 0.023517440795898438, 0.023502592086791993, 0.02351923179626465, 0.02356412887573242, 0.023422208786010743, 0.023578784942626954, 0.023509759902954102, 0.02370310401916504, 0.023581247329711914, 0.0236561279296875, 0.02374060821533203, 0.023700992584228517, 0.023435775756835937, 0.023622751235961914, 0.02347100830078125, 0.02359609603881836, 0.02358899116516113, 0.023591808319091797, 0.023457088470458985, 0.023930496215820312, 0.023421951293945312, 0.023654399871826173, 0.02353561592102051, 0.02367283248901367, 0.023410688400268553, 0.023605472564697267, 0.0233768310546875, 0.023562496185302734, 0.023548511505126952, 0.02346188735961914, 0.02349193572998047, 0.023530336380004884, 0.023564384460449218, 0.02336067199707031, 0.023220735549926756, 0.02348464012145996, 0.023258975982666016, 0.023453535079956053, 0.023455680847167967, 0.02341481590270996, 0.023273664474487303, 0.023277183532714844, 0.023096960067749025, 0.023068607330322264, 0.023167903900146485, 0.023248191833496093, 0.02321878433227539, 0.023240480422973633, 0.023374048233032227, 0.023410688400268553, 0.023291072845458984, 0.023403327941894533, 0.027625471115112304, 0.023654176712036134, 0.023758176803588868, 0.02344403266906738, 0.02384873580932617, 0.023451648712158202, 0.023351295471191406, 0.02334227180480957, 0.02324336051940918, 0.0233023681640625, 0.023361663818359375, 0.023432512283325196, 0.023355424880981444, 0.023290399551391602, 0.023392223358154298, 0.02341891288757324, 0.02372403144836426, 0.0234881591796875, 0.023390432357788087, 0.023545087814331053, 0.02347257614135742, 0.02333945655822754, 0.023355583190917968, 0.02345871925354004, 0.02340752029418945, 0.02337343978881836, 0.02353104019165039, 0.023522144317626954, 0.024250368118286132, 0.023521280288696288, 0.02350284767150879, 0.02490777587890625, 0.023393888473510743, 0.023703968048095703, 0.023267200469970703, 0.023246496200561524, 0.023335391998291016, 0.02318659210205078, 0.023444320678710936, 0.023318239212036133, 0.023473663330078123, 0.023266080856323243, 0.023453376770019532, 0.023342880249023437, 0.02361369514465332, 0.02391004753112793, 0.023394943237304688, 0.02329804801940918, 0.02341391944885254, 0.02332758331298828, 0.023642303466796875, 0.023447359085083008, 0.023556095123291015, 0.023555072784423828, 0.023610368728637695, 0.02343744087219238, 0.0234965763092041, 0.02339232063293457, 0.02341801643371582, 0.023630048751831053, 0.02347270393371582, 0.023444543838500975, 0.023517919540405274, 0.023990400314331056, 0.02338006401062012, 0.023443552017211915, 0.023500640869140624, 0.023940704345703126, 0.023663328170776366, 0.023658496856689453, 0.023521600723266603, 0.023642080307006836, 0.023517183303833008, 0.023529472351074218, 0.023695104598999022, 0.02343065643310547, 0.023769855499267578, 0.023514335632324218, 0.023367551803588866, 0.023509759902954102, 0.02338422393798828, 0.023289567947387697, 0.02346121597290039, 0.023384096145629883, 0.02343414306640625, 0.023402271270751954, 0.02353583908081055, 0.023557567596435548, 0.023433792114257813, 0.023222272872924804, 0.023523136138916014, 0.023814592361450195, 0.02356608009338379, 0.023412736892700195, 0.02351103973388672, 0.02349465560913086, 0.023425024032592775, 0.023418367385864256, 0.023447488784790037, 0.023336671829223634, 0.02385932731628418, 0.023279584884643555, 0.0234169921875, 0.02343996810913086, 0.02336089515686035, 0.02334783935546875, 0.023393760681152342, 0.023337055206298828, 0.024766944885253907, 0.024924127578735352, 0.023497760772705076, 0.023355871200561523, 0.023453760147094726, 0.023369184494018556, 0.02336662483215332, 0.02348646354675293, 0.023838720321655273, 0.023412736892700195, 0.023454912185668947, 0.023532096862792968, 0.0236997127532959, 0.02342092704772949, 0.02350694465637207, 0.02333616065979004, 0.023436159133911134, 0.023383295059204102, 0.023394975662231445, 0.023191551208496093, 0.023467456817626953, 0.023333440780639647, 0.023771167755126953, 0.02341654396057129, 0.023412992477416992, 0.02333395195007324, 0.023742464065551756, 0.023391168594360353, 0.023480319976806642, 0.023349248886108398, 0.02347577667236328, 0.02333945655822754, 0.023345151901245118, 0.023838720321655273, 0.023455360412597655, 0.02343142318725586, 0.02361356735229492, 0.023767040252685546, 0.023852031707763673, 0.023544479370117187, 0.023351648330688476, 0.023277664184570314, 0.023292896270751953, 0.023446048736572266, 0.023367679595947266, 0.023455680847167967, 0.02345008087158203, 0.02330828857421875, 0.023216127395629883, 0.0232891845703125, 0.023389984130859375, 0.023274560928344727, 0.023316287994384767, 0.023197439193725584, 0.02323276710510254, 0.023438560485839845, 0.023523647308349608, 0.023407136917114258, 0.023418880462646483, 0.02391753578186035, 0.024607711791992188, 0.024672256469726563, 0.02353152084350586, 0.02359516716003418, 0.02344313621520996, 0.023384319305419923, 0.023738271713256837, 0.02364326477050781, 0.023493696212768554, 0.023545663833618165, 0.023404224395751953, 0.023468576431274413, 0.023400224685668946, 0.023386112213134767, 0.02335971260070801, 0.024258432388305665, 0.02508902359008789, 0.0234935359954834, 0.023480319976806642, 0.023366655349731445, 0.023556800842285158, 0.023398719787597656, 0.023455968856811525, 0.02361280059814453, 0.023624288558959962, 0.023961599349975587, 0.023506240844726564, 0.023423679351806642, 0.023431167602539063, 0.023510751724243165, 0.02344169616699219, 0.023418880462646483, 0.02351708793640137, 0.02336367988586426, 0.023362783432006835, 0.023448352813720704, 0.02305638313293457, 0.02305433654785156, 0.023330495834350585, 0.023361087799072266, 0.023249664306640626, 0.02327372741699219, 0.02345465660095215, 0.023369888305664062, 0.024021663665771485, 0.023369312286376953, 0.02332099151611328, 0.02328371238708496, 0.023500799179077148, 0.023326719284057617, 0.02332499122619629, 0.023456607818603516, 0.02357948875427246, 0.023354816436767577, 0.023369823455810547, 0.02320969581604004, 0.023708415985107423, 0.02366979217529297, 0.023487455368041994, 0.023287391662597655, 0.02343772888183594, 0.0233123836517334, 0.023238271713256837, 0.023166847229003907, 0.023329280853271486, 0.02345155143737793, 0.02351443290710449, 0.023232704162597657, 0.02337603187561035, 0.02326300811767578, 0.023325504302978514, 0.023268224716186524, 0.02336457633972168, 0.023310400009155272, 0.023148288726806642, 0.023215839385986328, 0.023339008331298827, 0.02336329650878906, 0.023683839797973633, 0.023332160949707033, 0.02347897529602051, 0.023214080810546874, 0.023345279693603515, 0.023191423416137696, 0.023341215133666993, 0.023215967178344725, 0.023183231353759767, 0.02345382308959961, 0.023772640228271483, 0.023534175872802734, 0.02342316818237305, 0.023453439712524414, 0.023387487411499024, 0.023470624923706055, 0.02344153594970703, 0.02341894340515137, 0.023534879684448243, 0.02356483268737793, 0.023548032760620115, 0.023595199584960938, 0.023521215438842773, 0.023435136795043946, 0.023472127914428712, 0.02368716812133789, 0.023427072525024413, 0.023812095642089845, 0.023468032836914062, 0.023602367401123047, 0.023812192916870117, 0.023824607849121094, 0.02345212745666504, 0.023572095870971678, 0.023369695663452147, 0.023232095718383788, 0.023513919830322267, 0.0233503360748291, 0.02360767936706543, 0.023472896575927736, 0.023406431198120116, 0.023311904907226563, 0.02315497589111328, 0.023105087280273436, 0.023197439193725584, 0.023413471221923828, 0.023455360412597655, 0.023623807907104492, 0.023415199279785158, 0.023607519149780272, 0.02345347213745117, 0.023386112213134767, 0.0234149112701416, 0.02331804847717285, 0.023486783981323242, 0.0232774715423584, 0.0233919677734375, 0.023377344131469725, 0.02346259117126465, 0.023391712188720704, 0.02347113609313965, 0.02345088005065918, 0.023715360641479492, 0.023446527481079102, 0.023438592910766602, 0.023347967147827147, 0.023566335678100587, 0.023338144302368163, 0.023243839263916016, 0.023193376541137695, 0.023268672943115236, 0.023310304641723633, 0.023409631729125975, 0.023642112731933593, 0.023346656799316405, 0.02328937530517578, 0.023340032577514647, 0.023402496337890624, 0.023147743225097658, 0.023343135833740234, 0.023200511932373047, 0.023142559051513672, 0.023428735733032228, 0.023488735198974608, 0.023365631103515624, 0.02333286476135254, 0.023404544830322265, 0.023385791778564452, 0.023461952209472656, 0.023234239578247072, 0.023130207061767577, 0.023339487075805663, 0.023326623916625978, 0.02355824089050293, 0.02388991928100586, 0.023425024032592775, 0.02348201560974121, 0.023296384811401366, 0.023392223358154298, 0.02333695983886719, 0.023391712188720704, 0.02341494369506836, 0.023458240509033203, 0.023588800430297853, 0.023439359664916993, 0.023300128936767577, 0.023305952072143556, 0.023920064926147462, 0.02324336051940918, 0.02334489631652832, 0.023439712524414062, 0.023214208602905274, 0.023398496627807616, 0.02316182327270508, 0.02341891288757324, 0.023376800537109374, 0.023287456512451173, 0.02345350456237793, 0.023281856536865233, 0.023478656768798827, 0.023648223876953124, 0.02349171257019043, 0.023456447601318358, 0.023253183364868164, 0.02333647918701172, 0.02326371192932129, 0.023365631103515624, 0.023355583190917968, 0.023353151321411133, 0.02329737663269043, 0.023378591537475586, 0.023495967864990235, 0.023337600708007812, 0.023115840911865235, 0.023342912673950195, 0.023473663330078123, 0.0238939208984375, 0.023412736892700195, 0.023399744033813476, 0.023419872283935547, 0.023256128311157226, 0.02331113624572754, 0.023418848037719726, 0.023273408889770506, 0.023396415710449217, 0.023558143615722657, 0.023330816268920897, 0.023365631103515624, 0.023369728088378908, 0.023242752075195314, 0.023369312286376953, 0.023531232833862305, 0.023499488830566406, 0.0236596794128418, 0.023658367156982423, 0.02378028869628906, 0.02365769577026367, 0.02365724754333496, 0.023762943267822266, 0.023678495407104493, 0.023663103103637697, 0.023637632369995117, 0.02344585609436035, 0.023487680435180663, 0.023374399185180663, 0.023279808044433595, 0.023301919937133788, 0.023304479598999023, 0.02331167984008789, 0.023530176162719726, 0.0234005126953125, 0.02352908706665039, 0.023609664916992186, 0.023424896240234375, 0.023545312881469726, 0.023470752716064452, 0.023521440505981445, 0.023512384414672852, 0.023708192825317383, 0.024020992279052734, 0.023533567428588868, 0.023576480865478516, 0.023433216094970705, 0.023434783935546873, 0.023646783828735352, 0.02370355224609375, 0.02376006317138672, 0.02384979248046875, 0.023635583877563475, 0.023510751724243165, 0.023774911880493164, 0.023655391693115233, 0.023665727615356444, 0.02358572769165039, 0.023559328079223632, 0.023526336669921873, 0.02362892723083496, 0.023597280502319337, 0.02363382339477539, 0.023878911972045898, 0.02485923194885254, 0.02371942329406738, 0.023860960006713866, 0.0234968318939209, 0.023497535705566407, 0.023422048568725585, 0.023523584365844726, 0.023408832550048827, 0.023525856018066407, 0.023848928451538087, 0.023705631256103515, 0.023535423278808594, 0.02357472038269043, 0.024664064407348633, 0.023592607498168945, 0.02343734359741211, 0.02339263916015625, 0.023332128524780272, 0.023461568832397462, 0.02343343925476074, 0.023517055511474608, 0.023634815216064455, 0.023706783294677736, 0.02335318374633789, 0.024001535415649415, 0.023357440948486328, 0.02348236846923828, 0.02336528015136719, 0.023824991226196288, 0.023432479858398438, 0.023394399642944336, 0.023302528381347658, 0.023945215225219727, 0.023560192108154295, 0.02351206398010254, 0.023566783905029295, 0.023477888107299803, 0.02905353546142578, 0.025757471084594728, 0.023958303451538085, 0.024178464889526366, 0.024143871307373048, 0.02371174430847168, 0.02368921661376953, 0.023750656127929686, 0.023836544036865234, 0.024014976501464842, 0.023584768295288085, 0.023571552276611327, 0.023585695266723633, 0.023754047393798827, 0.023612064361572267, 0.023731903076171876, 0.02361993598937988, 0.0237108154296875, 0.023433408737182616, 0.023357248306274413, 0.02337596893310547, 0.023513151168823243, 0.02360425567626953, 0.023570144653320312, 0.02367679977416992, 0.024315071105957032, 0.02381292724609375, 0.02364361572265625, 0.023619552612304688, 0.023630399703979493, 0.02367500877380371, 0.02394655990600586, 0.02375699234008789, 0.023715999603271483, 0.023791648864746093, 0.023713504791259766, 0.023734752655029296, 0.02372198486328125, 0.0236461124420166, 0.02475222396850586, 0.02557241630554199, 0.024033567428588868, 0.02400480079650879, 0.02380614471435547, 0.023770496368408202, 0.023634815216064455, 0.023726112365722658, 0.023688575744628907, 0.023687807083129883, 0.023554048538208007, 0.02387763214111328, 0.02351923179626465, 0.023525375366210938, 0.023440671920776368, 0.023370431900024413, 0.02341379165649414, 0.02354911994934082, 0.02345097541809082, 0.02368355178833008, 0.02350105667114258, 0.023482336044311523, 0.02347395133972168, 0.023463935852050782, 0.023709440231323244, 0.02351910400390625, 0.02343891143798828, 0.02343744087219238, 0.023530080795288087, 0.023368831634521484, 0.023509920120239256, 0.02343948745727539, 0.023443391799926758, 0.023343103408813477, 0.023373823165893554, 0.02371379280090332, 0.02361871910095215, 0.023558528900146484, 0.023794143676757813, 0.02332806396484375, 0.023417087554931642, 0.02330668830871582, 0.023274911880493163, 0.023112287521362306, 0.02302505683898926, 0.023247360229492187, 0.023378015518188477, 0.02305593681335449, 0.023040447235107422]",tokens/s,42.51263357695521,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,888.979456,6266.159104,0.0,5863.636992,5744.700416,s,1,7.25092529296875,7.25092529296875,0.0,7.25092529296875,7.25092529296875,7.25092529296875,7.25092529296875,[7.25092529296875],,kWh,6.294323670855798e-06,6.87043933741528e-07,2.6930577100015007e-06,9.674425314598826e-06,,MB,1332.506624,6498.942976,0.0,6081.7408,6021.145088,s,10,0.8091246948242188,0.08091246948242188,0.002618087769830412,0.08134588623046875,0.08378506469726563,0.08408506011962891,0.08432505645751953,"[0.07684159851074218, 0.08153298950195312, 0.08115878295898438, 0.08073308563232422, 0.08272006225585937, 0.08371839904785157, 0.07578079986572266, 0.08438505554199219, 0.082093505859375, 0.08016041564941406]",tokens/s,3163.912826262405,kWh,2.4553806159313834e-06,2.7078195557447e-07,1.634826237831778e-06,4.360988809337631e-06,tokens/kWh,58702283.172995016,MB,1370.853376,6561.857536,0.0,6144.65536,6123.871232,s,10,21.125793701171872,2.1125793701171878,0.0032662190046748333,2.1113309326171876,2.11757216796875,2.1181353271484373,2.1185858544921876,"[2.110138427734375, 2.1083095703125, 2.113880126953125, 2.114443115234375, 2.11071875, 2.110334228515625, 2.118698486328125, 2.117447021484375, 2.111943115234375, 2.109880859375]",tokens/s,29.82136476912832,kWh,6.146231652281827e-05,6.7790816571769484e-06,4.065448537136869e-05,0.00010889588355136387,tokens/kWh,578534.2654415788,,s,630,21.122932327270494,0.03352846401154049,0.0005355371642702573,0.03343292808532715,0.033896537780761724,0.03413008117675781,0.03617329013824463,"[0.03480656051635742, 0.03414998245239258, 0.033589569091796875, 0.033099777221679685, 0.03310908889770508, 0.033266590118408206, 0.03319776153564453, 0.033161537170410156, 0.03311820983886719, 0.03305846405029297, 0.033193695068359376, 0.03301039886474609, 0.032989086151123045, 0.033099777221679685, 0.033102943420410154, 0.03312118530273438, 0.03309910583496094, 0.03320080184936523, 0.03320217514038086, 0.033140735626220705, 0.033140735626220705, 0.033185791015625, 0.03340288162231445, 0.03334348678588867, 0.03336601638793945, 0.03335488128662109, 0.03317235183715821, 0.03323897552490234, 0.03369171142578125, 0.03328409576416016, 0.03450579071044922, 0.03344070434570313, 0.03361382293701172, 0.03340697479248047, 0.03369311904907227, 0.0333051528930664, 0.033608768463134764, 0.03331689453125, 0.03333801651000977, 0.03352374267578125, 0.03345328140258789, 0.03366195297241211, 0.033570816040039066, 0.03341926574707031, 0.03344796752929687, 0.03359331130981445, 0.033562625885009766, 0.03345199966430664, 0.033895999908447265, 0.033898975372314455, 0.033529857635498046, 0.03360358428955078, 0.033605121612548826, 0.03370444869995117, 0.03377097702026367, 0.03380188751220703, 0.033562816619873044, 0.03456051254272461, 0.033628128051757813, 0.03359356689453125, 0.033882110595703126, 0.03387801742553711, 0.03410927963256836, 0.036194206237792965, 0.03438598251342773, 0.0336258544921875, 0.033468639373779294, 0.03330278396606445, 0.03319801712036133, 0.033302558898925784, 0.03332902526855469, 0.033183937072753904, 0.033091552734375, 0.03307708740234375, 0.03317161560058594, 0.03323868942260742, 0.033184097290039065, 0.033083393096923826, 0.03310931015014648, 0.03315167999267578, 0.03334467315673828, 0.03324323272705078, 0.03318451309204101, 0.03324911880493164, 0.03333523178100586, 0.03345635223388672, 0.03319398498535156, 0.03321155166625977, 0.033248191833496095, 0.03348041534423828, 0.03312243270874023, 0.033290302276611325, 0.03357196807861328, 0.03343244934082031, 0.03357900619506836, 0.033393760681152344, 0.033508480072021486, 0.03328579330444336, 0.033331329345703126, 0.033466079711914065, 0.03331510543823242, 0.0335618896484375, 0.033329055786132815, 0.0334692497253418, 0.03338854217529297, 0.03321763229370117, 0.03352873611450195, 0.03335372924804687, 0.03357503890991211, 0.03339865493774414, 0.0334986572265625, 0.03339740753173828, 0.03325523376464844, 0.033535999298095705, 0.03338649749755859, 0.03352931213378906, 0.03337270355224609, 0.03407462310791016, 0.03413008117675781, 0.03367510223388672, 0.03351551818847656, 0.03362611389160156, 0.03389235305786133, 0.03352681732177734, 0.0336209602355957, 0.03384524917602539, 0.036122081756591796, 0.03457417678833008, 0.033778369903564455, 0.03321036911010742, 0.03315407943725586, 0.03311510467529297, 0.03311526489257813, 0.03307199859619141, 0.03305472183227539, 0.0332042236328125, 0.03306208038330078, 0.033102657318115236, 0.03407036972045899, 0.03299958419799805, 0.033009407043457034, 0.03306851196289062, 0.033118431091308596, 0.03327648162841797, 0.03334291076660156, 0.03311193466186523, 0.03311385726928711, 0.033145790100097654, 0.03320377731323242, 0.0332210578918457, 0.03332198333740234, 0.033811454772949216, 0.03322060775756836, 0.03317926406860352, 0.033294719696044923, 0.03344179153442383, 0.03324313735961914, 0.033421535491943356, 0.03344095993041992, 0.033419872283935545, 0.03363430404663086, 0.033518913269042966, 0.03343990325927734, 0.03338896179199219, 0.03352572631835937, 0.033374366760253904, 0.0336517105102539, 0.033442817687988284, 0.03368310546875, 0.03352988815307617, 0.03337068939208984, 0.03342115020751953, 0.03711580657958984, 0.03353955078125, 0.03348336029052734, 0.03349292755126953, 0.033675262451171875, 0.033382144927978516, 0.03345398330688477, 0.03351542282104492, 0.0335973129272461, 0.03348332977294922, 0.033758785247802736, 0.033711616516113284, 0.035138465881347655, 0.03352783966064453, 0.03382067108154297, 0.03395379257202148, 0.0339128303527832, 0.03652204895019531, 0.03474431991577148, 0.03381167984008789, 0.033557281494140626, 0.033277950286865234, 0.033240222930908205, 0.03322761535644531, 0.03321023941040039, 0.03311820983886719, 0.0332022705078125, 0.03301583862304688, 0.03303833770751953, 0.03305440139770508, 0.03318179321289062, 0.033281791687011716, 0.03319036865234375, 0.03311820983886719, 0.033116161346435545, 0.03303977584838867, 0.03311299133300781, 0.03315475082397461, 0.03336816024780274, 0.033216064453125, 0.03316156768798828, 0.03316902542114258, 0.03315536117553711, 0.033208415985107424, 0.03331686401367188, 0.033326656341552734, 0.033837505340576175, 0.03364863967895508, 0.033454078674316406, 0.033492992401123044, 0.03343766403198242, 0.03341315078735352, 0.03475769424438477, 0.03397872161865234, 0.03368790435791016, 0.033378559112548827, 0.033705440521240235, 0.033384830474853515, 0.033531967163085936, 0.033382495880126956, 0.03363020706176758, 0.03340259170532227, 0.033583393096923826, 0.034375679016113284, 0.03385785675048828, 0.0338408317565918, 0.03328102493286133, 0.03368796920776367, 0.03341779327392578, 0.033714206695556644, 0.03351715087890625, 0.03354256057739258, 0.03374691009521484, 0.033568801879882815, 0.033691646575927735, 0.03403571319580078, 0.033898494720458985, 0.033982078552246095, 0.03407519912719727, 0.03402323150634766, 0.0363397102355957, 0.034574337005615234, 0.0339447021484375, 0.03334028625488281, 0.033314815521240236, 0.03307110214233398, 0.032977153778076175, 0.033205249786376956, 0.03328281784057617, 0.03325747299194336, 0.03323836898803711, 0.033129119873046876, 0.03306195068359375, 0.03309859085083008, 0.033041694641113284, 0.03304735946655273, 0.03304272079467773, 0.03321129608154297, 0.03319043350219727, 0.033194271087646485, 0.0331673583984375, 0.033070335388183596, 0.033178367614746095, 0.03315660858154297, 0.033221118927001955, 0.033306625366210936, 0.03318505477905274, 0.033237728118896484, 0.03341455841064453, 0.03337263870239258, 0.03350540924072266, 0.033467617034912106, 0.03332966232299805, 0.03359059143066406, 0.03391791915893555, 0.03323817443847656, 0.03330953598022461, 0.03357206344604492, 0.03319375991821289, 0.03365555191040039, 0.03331711959838867, 0.03338444900512695, 0.03339468765258789, 0.033291519165039064, 0.03369551849365234, 0.03382780838012695, 0.03357196807861328, 0.03376438522338867, 0.03383280181884766, 0.03344179153442383, 0.033770496368408204, 0.03380121612548828, 0.033748897552490234, 0.0336343994140625, 0.033789119720458984, 0.033997631072998045, 0.03384476852416992, 0.03363190460205078, 0.033583934783935544, 0.0338897590637207, 0.03391542434692383, 0.03403731155395508, 0.03362572860717773, 0.03626393508911133, 0.034592159271240236, 0.03387257766723633, 0.03326556777954102, 0.0331096305847168, 0.03306953430175781, 0.03308502578735351, 0.03300086212158203, 0.03297612762451172, 0.033005504608154296, 0.03301065444946289, 0.033264480590820315, 0.033060222625732424, 0.033204864501953125, 0.03329228973388672, 0.03315024185180664, 0.03317628860473633, 0.03320956802368164, 0.03311811065673828, 0.03331923294067383, 0.033307201385498045, 0.0331097297668457, 0.03317132949829102, 0.033425857543945316, 0.033334239959716794, 0.033291263580322264, 0.03355231857299805, 0.033443264007568356, 0.03401587295532227, 0.034082527160644534, 0.03368179321289062, 0.033861534118652344, 0.03356467056274414, 0.03353299331665039, 0.03332790374755859, 0.03362783813476562, 0.03338399887084961, 0.03362294387817383, 0.033554431915283206, 0.03367110443115234, 0.03350444793701172, 0.03339763259887695, 0.03346022415161133, 0.033468414306640625, 0.033445888519287106, 0.03349734497070313, 0.033336158752441405, 0.03358755111694336, 0.033280513763427735, 0.03347257614135742, 0.03342335891723633, 0.03377926254272461, 0.03333363342285156, 0.03354867172241211, 0.033382080078125, 0.033685504913330076, 0.033404064178466794, 0.033804927825927734, 0.033718494415283205, 0.0336769905090332, 0.03370166397094727, 0.033856033325195316, 0.033705440521240235, 0.03472412872314453, 0.03476271820068359, 0.033890304565429685, 0.03364067077636719, 0.033183521270751956, 0.03303219223022461, 0.0331778564453125, 0.03305158233642578, 0.033102657318115236, 0.033681312561035154, 0.03953881454467773, 0.0331550407409668, 0.03316121673583984, 0.03310182571411133, 0.033091583251953126, 0.03304652786254883, 0.033078529357910155, 0.03316403198242188, 0.03328409576416016, 0.033259521484375, 0.03317670440673828, 0.03331366348266602, 0.033476383209228515, 0.03317782211303711, 0.033097728729248044, 0.03316454315185547, 0.03354195022583008, 0.03441862487792969, 0.03327020645141601, 0.03346899032592773, 0.03333257675170898, 0.03366758346557617, 0.03342502212524414, 0.0335590705871582, 0.03327590560913086, 0.03334521484375, 0.03338588714599609, 0.033958431243896484, 0.0332988166809082, 0.03358857727050781, 0.033350399017333984, 0.03369884872436523, 0.033966880798339844, 0.03329759979248047, 0.03368044662475586, 0.033433406829833985, 0.033714206695556644, 0.03343155288696289, 0.03372032165527344, 0.03408003234863281, 0.03362480163574219, 0.03357603073120117, 0.03375811386108398, 0.03347148895263672, 0.03349401473999023, 0.0336231689453125, 0.03360646438598633, 0.03380390548706055, 0.03433212661743164, 0.03377593612670898, 0.033880191802978514, 0.034129886627197265, 0.03386377716064453, 0.036593406677246094, 0.03473862457275391, 0.033887199401855465, 0.03358585739135742, 0.03328217697143555, 0.03314108657836914, 0.03326441574096679, 0.033882495880126956, 0.03339481735229492, 0.033391006469726564, 0.03329801559448242, 0.0335101432800293, 0.033291999816894534, 0.03339632034301758, 0.03364899063110351, 0.03337324905395508, 0.0334161262512207, 0.03323046493530273, 0.033409183502197265, 0.033443359375, 0.03380294418334961, 0.033277889251708985, 0.03329999923706055, 0.03331865692138672, 0.03331356811523437, 0.03318937683105469, 0.0333645133972168, 0.033412769317626954, 0.03351779174804687, 0.03473823928833008, 0.03380815887451172, 0.033544097900390625, 0.03354249572753906, 0.033828704833984376, 0.03413008117675781, 0.03378790283203125, 0.03349711990356445, 0.03366083145141602, 0.03338691329956055, 0.033516288757324215, 0.03330319976806641, 0.03366934585571289, 0.033511425018310545, 0.03352105712890625, 0.03338636779785156, 0.03351603317260742, 0.03324953460693359, 0.03350246429443359, 0.033557247161865235, 0.03386703872680664, 0.03362879943847656, 0.03345935821533203, 0.03375436782836914, 0.03335945510864258, 0.033664737701416016, 0.033427326202392575, 0.03371062469482422, 0.03362403106689453, 0.0334951057434082, 0.03378156661987305, 0.033536128997802735, 0.033708030700683594, 0.03383500671386719, 0.0361102409362793, 0.034503231048583986, 0.03392895889282226, 0.03321062469482422, 0.03309996795654297, 0.03323257446289062, 0.03368563079833985, 0.03326537704467773, 0.033293697357177736, 0.033161758422851566, 0.0331343994140625, 0.03309795379638672, 0.03308281707763672, 0.03336431884765625, 0.033315486907958984, 0.03312809753417969, 0.033482593536376955, 0.03460940933227539, 0.03317804718017578, 0.033365726470947266, 0.033224319458007814, 0.03316329574584961, 0.03311996841430664, 0.03314547348022461, 0.033412288665771485, 0.03324335861206055, 0.03311990356445312, 0.033220672607421876, 0.03363484954833985, 0.03353596878051758, 0.033657215118408204, 0.0336097297668457, 0.033510848999023436, 0.033712448120117186, 0.033599742889404295, 0.03355171203613281, 0.03357763290405273, 0.03347455978393555, 0.03366912078857422, 0.03339984130859375, 0.033625057220458984, 0.03332710266113281, 0.03363343811035156, 0.03341398239135742, 0.033734432220458986, 0.033312801361083985, 0.03355852890014648, 0.03337817764282226, 0.03353782272338867, 0.033382942199707034, 0.03348867034912109, 0.033464031219482424, 0.0334463996887207, 0.03353968048095703, 0.03350476837158203, 0.033530784606933595, 0.03358512115478516, 0.0335579833984375, 0.033898719787597655, 0.03386608123779297, 0.03359494400024414, 0.033685951232910155, 0.03378156661987305, 0.03605855941772461, 0.03459043121337891, 0.033643360137939456, 0.03318783950805664, 0.03315097427368164, 0.033046817779541014, 0.03308857727050781, 0.03308921432495117, 0.03319232177734375, 0.0332022705078125, 0.03312486267089844, 0.03307680130004883, 0.03304288101196289, 0.033067008972167966, 0.03305244827270508, 0.03303868865966797, 0.033048446655273435, 0.0331036491394043, 0.03320649719238281, 0.033140735626220705, 0.0331960334777832, 0.03313167953491211, 0.033130401611328124, 0.033248191833496095, 0.033470558166503905, 0.03322256088256836, 0.033185791015625, 0.03339878463745117, 0.033384254455566406, 0.033538238525390625, 0.033605567932128905, 0.03372784042358398, 0.03338108825683594, 0.0335994873046875, 0.033347583770751955, 0.0335093765258789, 0.033321086883544924, 0.033303966522216795, 0.03357651138305664, 0.033278465270996094, 0.033667518615722654, 0.03342348861694336, 0.03375497436523438, 0.03340233612060547, 0.033499774932861326, 0.03335113525390625, 0.03365523147583008, 0.03341516876220703, 0.03360124969482422, 0.033549697875976565, 0.03353811264038086, 0.03363052749633789, 0.03358534240722656, 0.033810142517089845, 0.03385408020019531, 0.033611358642578124, 0.03359747314453125, 0.033978752136230465, 0.034095359802246095, 0.03400611114501953, 0.034009281158447265, 0.03389632034301758, 0.033931873321533204]",tokens/s,29.825404458009174,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 169049 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 270, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 83692 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 905.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 71679 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,737.046528,804.192256,0.0,408.94464,387.119104,s,1,7.05956787109375,7.05956787109375,0.0,7.05956787109375,7.05956787109375,7.05956787109375,7.05956787109375,[7.05956787109375],,kWh,6.266552395828739e-06,6.837814345044297e-07,2.0188905040033345e-06,8.969224334336502e-06,,MB,1100.791808,829.35808,0.0,421.527552,354.083328,s,18,0.19807555103302,0.011004197279612223,0.00019689022549573127,0.010912367820739746,0.011292044830322265,0.011378513336181641,0.011422403717041014,"[0.010825087547302246, 0.010987551689147949, 0.011236991882324219, 0.01096713638305664, 0.010945504188537597, 0.010912384033203125, 0.011231648445129394, 0.010849023818969726, 0.010844736099243165, 0.010888416290283203, 0.010819135665893554, 0.01087177562713623, 0.010912351608276367, 0.011368831634521485, 0.010868127822875977, 0.011433376312255859, 0.011259136199951171, 0.010854335784912109]",tokens/s,23263.850464976505,kWh,3.2461778655451273e-07,3.579943586903562e-08,2.1631923126807694e-07,5.767364536916254e-07,tokens/kWh,443876918.75790524,MB,1135.0016,844.038144,0.0,436.207616,354.085888,s,18,10.186482849121093,0.5659157138400608,0.003745610212349325,0.5648192138671875,0.5710327026367187,0.5727525726318359,0.5730854754638672,"[0.5703270874023437, 0.5690594482421875, 0.573168701171875, 0.5664511108398438, 0.5726791381835937, 0.5663496704101563, 0.563966796875, 0.56943896484375, 0.5603001708984375, 0.5606165161132812, 0.562454833984375, 0.565671630859375, 0.5635009765625, 0.5637919921875, 0.5637879638671875, 0.56255029296875, 0.5684378662109375, 0.5639296875]",tokens/s,111.3239983610087,kWh,1.634629071205645e-05,1.8027229825864778e-06,7.3478997830654334e-06,2.5496913477708366e-05,tokens/kWh,2470887.3117163815,,s,1134,10.178151805877684,0.008975442509592316,0.00019099574927967507,0.008945232391357421,0.009071491050720214,0.009170578908920288,0.009812120027542118,"[0.009137824058532714, 0.009097567558288574, 0.00898252773284912, 0.009009023666381837, 0.009011327743530273, 0.008945088386535644, 0.009015071868896484, 0.009052927970886231, 0.009056480407714843, 0.009009152412414552, 0.009011008262634278, 0.009056511878967286, 0.009074591636657714, 0.009189375877380371, 0.009548831939697265, 0.009132831573486329, 0.009123871803283692, 0.00915017604827881, 0.009049632072448731, 0.009048831939697266, 0.009012543678283691, 0.009050815582275391, 0.00902348804473877, 0.009003007888793945, 0.009025216102600098, 0.009080415725708007, 0.009007840156555176, 0.009000960350036622, 0.008997952461242676, 0.009025823593139649, 0.009034303665161133, 0.009019583702087402, 0.009010975837707519, 0.009037856101989747, 0.009035679817199707, 0.009195712089538574, 0.00897875213623047, 0.00897811222076416, 0.009000448226928711, 0.008964544296264648, 0.008997023582458496, 0.008992447853088378, 0.008976832389831543, 0.008973631858825683, 0.009083359718322755, 0.009020511627197265, 0.008991776466369629, 0.009471360206604005, 0.009014880180358887, 0.008997632026672362, 0.008988479614257812, 0.008968544006347657, 0.009017696380615234, 0.008998335838317871, 0.009035008430480957, 0.009151712417602539, 0.009032671928405761, 0.008999711990356445, 0.008994751930236817, 0.008976608276367188, 0.008974176406860351, 0.009005056381225587, 0.00902143955230713, 0.008769311904907227, 0.009494751930236816, 0.009004159927368164, 0.009003904342651367, 0.009001184463500976, 0.009057056427001954, 0.009067520141601563, 0.008968192100524902, 0.009027392387390136, 0.00905401611328125, 0.008992159843444824, 0.009010144233703613, 0.008992511749267578, 0.0089967679977417, 0.009047648429870606, 0.008999679565429687, 0.00902348804473877, 0.009119744300842286, 0.00898252773284912, 0.009002528190612792, 0.00897276782989502, 0.009006208419799804, 0.008973183631896973, 0.008964096069335938, 0.008923487663269043, 0.009096927642822265, 0.009033087730407716, 0.009003583908081054, 0.009103455543518067, 0.008970047950744629, 0.008980575561523438, 0.009026592254638671, 0.008998944282531738, 0.00913094425201416, 0.009051775932312012, 0.008988991737365722, 0.008951871871948243, 0.00904371166229248, 0.008993247985839843, 0.00904576015472412, 0.009056351661682128, 0.009014528274536132, 0.00898464012145996, 0.009095040321350098, 0.008970944404602051, 0.00900921630859375, 0.009030752182006836, 0.00897321605682373, 0.00898579216003418, 0.009022239685058595, 0.00903593635559082, 0.009080448150634766, 0.008999168395996094, 0.008972288131713867, 0.009151583671569824, 0.008969120025634766, 0.008976479530334473, 0.008951295852661132, 0.009032095909118652, 0.008974047660827636, 0.008980768203735352, 0.00932044792175293, 0.009140352249145507, 0.0087706241607666, 0.009126272201538086, 0.009007072448730469, 0.00904428768157959, 0.009025919914245605, 0.00902143955230713, 0.009004735946655273, 0.009082240104675293, 0.008997856140136718, 0.009000384330749511, 0.009013536453247071, 0.008972288131713867, 0.009103487968444825, 0.008988544464111329, 0.0090316801071167, 0.009035776138305664, 0.009076031684875488, 0.009068767547607421, 0.009346752166748047, 0.008968992233276367, 0.008945664405822755, 0.008984448432922364, 0.009828479766845703, 0.011249855995178222, 0.009828255653381348, 0.00908620834350586, 0.009038144111633301, 0.009036416053771973, 0.008990431785583496, 0.009518495559692383, 0.008997471809387207, 0.00897433567047119, 0.008980480194091797, 0.008976127624511719, 0.008984224319458008, 0.00896236801147461, 0.009088768005371093, 0.009089568138122558, 0.008971263885498047, 0.008946687698364257, 0.00899401569366455, 0.009017248153686524, 0.00898374366760254, 0.008918911933898925, 0.008994624137878418, 0.008944671630859375, 0.008948736190795899, 0.008976320266723632, 0.008967583656311034, 0.009449600219726562, 0.009001312255859375, 0.010072511672973633, 0.008954719543457031, 0.00895680046081543, 0.008951711654663085, 0.008962143898010254, 0.008945856094360351, 0.008910047531127929, 0.008933728218078613, 0.008943776130676269, 0.00888764762878418, 0.008884991645812988, 0.008945664405822755, 0.008706048011779785, 0.009000960350036622, 0.008965279579162598, 0.008974847793579101, 0.008954208374023437, 0.00894976043701172, 0.00901734447479248, 0.009000736236572266, 0.008962271690368652, 0.008951807975769043, 0.008937472343444825, 0.00896777629852295, 0.008984992027282715, 0.0090665922164917, 0.009185183525085449, 0.0090316801071167, 0.008988672256469727, 0.008967647552490235, 0.008953696250915527, 0.008970399856567383, 0.008978976249694823, 0.008965344429016113, 0.008989472389221192, 0.008951935768127441, 0.00894553565979004, 0.008922240257263184, 0.008962944030761718, 0.008976479530334473, 0.00898185634613037, 0.008960576057434081, 0.008990847587585449, 0.008982399940490722, 0.008973471641540528, 0.008950719833374024, 0.008976287841796875, 0.008943615913391113, 0.008915295600891113, 0.008933024406433105, 0.00898185634613037, 0.008942079544067384, 0.00893331241607666, 0.00893280029296875, 0.00891481590270996, 0.008938015937805176, 0.009398655891418457, 0.009093119621276855, 0.009000960350036622, 0.00908886432647705, 0.009031840324401855, 0.009029664039611817, 0.00901961612701416, 0.009029343605041503, 0.00900710391998291, 0.009029919624328614, 0.009041119575500488, 0.008948543548583984, 0.008942496299743653, 0.008981311798095703, 0.009000960350036622, 0.00898252773284912, 0.008974559783935547, 0.008929023742675781, 0.008986368179321289, 0.008785216331481934, 0.009026176452636718, 0.009002335548400879, 0.00898464012145996, 0.009005727767944336, 0.008969887733459473, 0.009035648345947266, 0.00897596836090088, 0.009030719757080079, 0.00893836784362793, 0.008986656188964843, 0.008960927963256836, 0.008961343765258789, 0.008983103752136231, 0.008978816032409668, 0.009004544258117676, 0.009025279998779296, 0.008970111846923827, 0.009185919761657715, 0.009011199951171875, 0.009117695808410644, 0.009033727645874023, 0.009165120124816895, 0.009333951950073242, 0.009162879943847657, 0.010742207527160645, 0.008967904090881348, 0.009015520095825196, 0.008978431701660156, 0.009088768005371093, 0.00914409637451172, 0.008944095611572266, 0.008947039604187012, 0.009011872291564941, 0.008927231788635253, 0.009019392013549805, 0.008947711944580078, 0.00959705638885498, 0.008957823753356934, 0.008956000328063965, 0.00896985626220703, 0.008967616081237794, 0.008928095817565918, 0.00893337631225586, 0.009177087783813476, 0.00942080020904541, 0.008964096069335938, 0.008973504066467285, 0.008978528022766113, 0.009897024154663085, 0.01069660758972168, 0.00896291160583496, 0.008958175659179687, 0.008913311958312988, 0.008945376396179198, 0.008962080001831055, 0.008947936058044434, 0.008923456192016602, 0.009050111770629882, 0.008951711654663085, 0.008939040184020997, 0.008958271980285645, 0.008921343803405762, 0.00875443172454834, 0.008990880012512206, 0.008987296104431152, 0.009158592224121093, 0.00908841609954834, 0.008999039649963379, 0.008962528228759765, 0.008971936225891113, 0.008991071701049805, 0.008939647674560547, 0.009008959770202636, 0.008978431701660156, 0.00899283218383789, 0.00901529598236084, 0.008965696334838867, 0.008952223777770996, 0.00904924774169922, 0.008999808311462403, 0.008920991897583008, 0.008984607696533203, 0.008973823547363282, 0.008921664237976074, 0.008966015815734864, 0.008990847587585449, 0.009000384330749511, 0.009159232139587402, 0.00917039966583252, 0.009271840095520019, 0.009023679733276366, 0.009033184051513671, 0.009029631614685058, 0.008994943618774414, 0.009064224243164063, 0.009004896163940429, 0.009021280288696289, 0.009006879806518554, 0.00898259162902832, 0.008967071533203126, 0.009000960350036622, 0.008934975624084473, 0.009033696174621582, 0.00897276782989502, 0.00899443244934082, 0.00897267246246338, 0.008947168350219727, 0.008981023788452148, 0.009142144203186035, 0.009027711868286133, 0.009003007888793945, 0.009070079803466797, 0.008939552307128906, 0.008937536239624023, 0.008933792114257813, 0.008874112129211426, 0.0088472318649292, 0.00887168025970459, 0.00885756778717041, 0.008816927909851073, 0.008893600463867187, 0.008858016014099121, 0.008866239547729492, 0.008845312118530273, 0.00897862434387207, 0.009006815910339356, 0.008960864067077638, 0.00903987216949463, 0.00890880012512207, 0.00890880012512207, 0.008888128280639649, 0.008837311744689942, 0.008914752006530762, 0.008958144187927246, 0.008960000038146973, 0.00888371181488037, 0.008897024154663086, 0.008880127906799316, 0.009155712127685546, 0.00903052806854248, 0.008970111846923827, 0.008937600135803223, 0.008921088218688965, 0.008887519836425781, 0.008849632263183594, 0.008978848457336425, 0.008933247566223145, 0.008890175819396972, 0.008888928413391114, 0.008902560234069825, 0.008902624130249023, 0.00889241600036621, 0.008887871742248536, 0.00893075180053711, 0.008920063972473144, 0.008976384162902832, 0.008914496421813965, 0.008962559700012206, 0.008916864395141602, 0.00895302391052246, 0.008913791656494141, 0.008887295722961425, 0.008957056045532227, 0.008968064308166504, 0.008885855674743653, 0.008903072357177735, 0.00892518424987793, 0.008921088218688965, 0.008968416213989258, 0.008981887817382812, 0.009764863967895507, 0.009005536079406738, 0.008982463836669922, 0.00890675163269043, 0.008900735855102539, 0.008920960426330567, 0.008970239639282226, 0.008943615913391113, 0.008904735565185546, 0.0089169282913208, 0.008929280281066895, 0.00890713596343994, 0.009020352363586425, 0.008877056121826172, 0.008953568458557128, 0.008898112297058105, 0.008870335578918457, 0.008886272430419923, 0.009011967658996582, 0.009527423858642577, 0.010597472190856933, 0.00901734447479248, 0.009564031600952148, 0.00898960018157959, 0.008937472343444825, 0.009236479759216308, 0.008867839813232421, 0.008927231788635253, 0.008934528350830077, 0.008893183708190918, 0.008869983673095704, 0.008902688026428223, 0.008881664276123047, 0.008915455818176269, 0.008934752464294433, 0.008876704216003417, 0.008883487701416016, 0.008926239967346192, 0.008890048027038574, 0.008812543869018554, 0.008889439582824708, 0.008922016143798828, 0.008900768280029297, 0.00902284812927246, 0.009218527793884277, 0.008939519882202148, 0.00893507194519043, 0.00905465602874756, 0.008983551979064941, 0.008872544288635254, 0.008955264091491699, 0.008931488037109374, 0.008887295722961425, 0.008875807762145997, 0.008892031669616699, 0.008971936225891113, 0.009998687744140625, 0.010410112380981445, 0.008892671585083008, 0.00894115161895752, 0.008933216094970704, 0.008880991935729981, 0.008809503555297851, 0.00890505599975586, 0.008877887725830078, 0.008937631607055664, 0.009109919548034667, 0.009170911788940429, 0.009135552406311035, 0.009003487586975098, 0.008915040016174316, 0.008939871788024903, 0.008928192138671876, 0.008884127616882325, 0.008897343635559081, 0.008877087593078613, 0.008876031875610351, 0.009171744346618653, 0.009044159889221191, 0.008893664360046388, 0.008873791694641113, 0.008613887786865235, 0.00901244831085205, 0.008910688400268554, 0.008889280319213867, 0.008882176399230958, 0.008865344047546386, 0.008845760345458984, 0.008863743782043456, 0.008790335655212402, 0.008894399642944336, 0.008988191604614258, 0.008881695747375488, 0.00887177562713623, 0.008928064346313477, 0.00894979190826416, 0.009023679733276366, 0.008904512405395509, 0.008896736145019532, 0.008971936225891113, 0.008865440368652344, 0.008864224433898926, 0.008863455772399902, 0.008865056037902832, 0.008890591621398925, 0.008915743827819823, 0.008851455688476563, 0.008894463539123536, 0.008861696243286133, 0.008939359664916992, 0.008925344467163086, 0.008896415710449218, 0.008954015731811524, 0.008912832260131835, 0.008880352020263672, 0.008883999824523927, 0.008887935638427734, 0.008909184455871582, 0.00889628791809082, 0.008868032455444336, 0.008837151527404784, 0.008847776412963868, 0.008887904167175293, 0.008857600212097168, 0.008817760467529297, 0.008866432189941407, 0.008881855964660644, 0.008893024444580079, 0.008822303771972656, 0.008804224014282226, 0.00885750389099121, 0.00887059211730957, 0.008857760429382324, 0.008854784011840821, 0.008821215629577637, 0.008871392250061035, 0.008911520004272462, 0.008843263626098634, 0.008863903999328614, 0.009018912315368652, 0.008825311660766602, 0.009039711952209473, 0.009011072158813476, 0.008870016098022461, 0.008655551910400391, 0.008943807601928712, 0.009016384124755859, 0.008961119651794434, 0.00889020824432373, 0.008921279907226562, 0.008935232162475586, 0.008964096069335938, 0.008851455688476563, 0.008873727798461914, 0.008916447639465332, 0.008842047691345214, 0.00887395191192627, 0.00892518424987793, 0.008870176315307617, 0.00890828800201416, 0.00890447998046875, 0.008918784141540527, 0.008917695999145507, 0.008870016098022461, 0.008871071815490723, 0.008870623588562011, 0.008803808212280273, 0.008863840103149414, 0.00891539192199707, 0.008870176315307617, 0.008849120140075684, 0.008910176277160644, 0.008909472465515137, 0.008946975708007813, 0.008862431526184083, 0.008910847663879394, 0.008881888389587402, 0.008933216094970704, 0.008911456108093262, 0.00889241600036621, 0.008841055870056153, 0.008877344131469726, 0.008862015724182128, 0.008817055702209472, 0.008837120056152344, 0.008857695579528809, 0.008853407859802246, 0.008871935844421386, 0.00899071979522705, 0.008895584106445312, 0.008932607650756836, 0.008904224395751953, 0.008887968063354491, 0.008850048065185547, 0.008893280029296876, 0.0089303035736084, 0.008912896156311035, 0.008828736305236817, 0.008876480102539063, 0.008830240249633789, 0.008900992393493653, 0.009200960159301759, 0.008878879547119141, 0.00885372829437256, 0.008891231536865234, 0.008850367546081542, 0.008857664108276367, 0.008593952178955078, 0.008912960052490234, 0.008914336204528809, 0.008884511947631835, 0.00891321563720703, 0.008880127906799316, 0.008840543746948242, 0.008895392417907716, 0.008975296020507813, 0.00886457633972168, 0.008879360198974609, 0.008915840148925781, 0.008854623794555663, 0.008888447761535644, 0.008868736267089844, 0.008899935722351074, 0.00892563247680664, 0.00885536003112793, 0.008855744361877442, 0.008848671913146972, 0.008874496459960938, 0.008904159545898438, 0.008903424263000488, 0.008954943656921387, 0.008942048072814941, 0.008923199653625489, 0.00888259220123291, 0.008871520042419433, 0.008923583984375, 0.008875455856323242, 0.008937824249267578, 0.00887827205657959, 0.008898655891418457, 0.008873824119567872, 0.008931391716003418, 0.008912608146667481, 0.008882080078125, 0.008919424057006835, 0.008906720161437988, 0.008916416168212891, 0.008938079833984374, 0.009110976219177246, 0.008884703636169434, 0.008951904296875, 0.008867839813232421, 0.009244671821594238, 0.009105024337768554, 0.008950143814086915, 0.00891926383972168, 0.009200608253479004, 0.00897878360748291, 0.008982975959777833, 0.00885587215423584, 0.008951519966125488, 0.008898303985595704, 0.008964192390441895, 0.008887807846069335, 0.008917792320251465, 0.008879615783691406, 0.008987168312072753, 0.008916671752929688, 0.009005215644836426, 0.008991071701049805, 0.008660991668701172, 0.008982208251953125, 0.00905247974395752, 0.008912896156311035, 0.008900927543640138, 0.008951199531555177, 0.00891932773590088, 0.008934847831726074, 0.008946592330932618, 0.00893507194519043, 0.008929280281066895, 0.008894463539123536, 0.008898591995239258, 0.008879872322082519, 0.009021663665771484, 0.008986559867858887, 0.009072095870971679, 0.008870847702026367, 0.008940320014953613, 0.008896832466125488, 0.008882623672485351, 0.008933024406433105, 0.008882656097412109, 0.00894547176361084, 0.0088721923828125, 0.008957887649536133, 0.00931174373626709, 0.008978943824768066, 0.008910976409912109, 0.00893939208984375, 0.00892751979827881, 0.00926796817779541, 0.009779359817504883, 0.009499456405639648, 0.009337183952331542, 0.00898739242553711, 0.0089934720993042, 0.008973952293395995, 0.008955743789672852, 0.008946528434753418, 0.008875103950500488, 0.008930432319641112, 0.008897631645202637, 0.00886025619506836, 0.008954879760742187, 0.008954463958740234, 0.008958368301391602, 0.008867839813232421, 0.008910847663879394, 0.008943615913391113, 0.009089152336120605, 0.00913599967956543, 0.00899891185760498, 0.008937472343444825, 0.0089169921875, 0.008888319969177246, 0.008931327819824218, 0.008923135757446288, 0.008894335746765137, 0.008899999618530273, 0.00883187198638916, 0.008865856170654296, 0.008875136375427245, 0.008702752113342285, 0.009015232086181641, 0.008942655563354492, 0.008967103958129883, 0.008927552223205567, 0.00901088047027588, 0.008879551887512207, 0.00890937614440918, 0.008877568244934082, 0.008948415756225585, 0.008918975830078125, 0.008908672332763672, 0.00892518424987793, 0.008926560401916504, 0.008905376434326172, 0.008888319969177246, 0.008920512199401855, 0.008886240005493163, 0.008903583526611329, 0.008860960006713867, 0.008895296096801758, 0.008916576385498047, 0.008983903884887696, 0.00915715217590332, 0.008957823753356934, 0.008972479820251464, 0.00899897575378418, 0.008980480194091797, 0.008881312370300293, 0.009073504447937012, 0.00926540756225586, 0.008988415718078614, 0.009005056381225587, 0.008964096069335938, 0.008959967613220215, 0.008938624382019042, 0.008940223693847657, 0.008904000282287598, 0.008949664115905762, 0.0089303035736084, 0.008893856048583984, 0.008921088218688965, 0.008923744201660156, 0.008893440246582032, 0.008874591827392577, 0.009052895545959472, 0.008894144058227539, 0.008923135757446288, 0.0088853759765625, 0.00894809627532959, 0.008890239715576172, 0.008945504188537597, 0.009025856018066407, 0.008859935760498048, 0.008933568000793457, 0.008946911811828613, 0.008891519546508788, 0.008926495552062989, 0.008922816276550293, 0.008927935600280762, 0.00887929630279541, 0.008893024444580079, 0.00888371181488037, 0.008677696228027344, 0.009243807792663573, 0.009261919975280761, 0.009233823776245117, 0.008949376106262208, 0.009182175636291505, 0.008947232246398925, 0.009021696090698242, 0.008952159881591797, 0.008926176071166992, 0.009024319648742675, 0.009607263565063476, 0.009040096282958984, 0.008916768074035644, 0.008856896400451661, 0.008910592079162599, 0.009089983940124512, 0.008924927711486817, 0.00889680004119873, 0.008931296348571777, 0.008935423851013183, 0.008884223937988281, 0.008861632347106933, 0.008851519584655762, 0.008857600212097168, 0.008900959968566895, 0.008877728462219238, 0.008855263710021972, 0.00892137622833252, 0.00896828842163086, 0.009012864112854005, 0.0089901123046875, 0.00901414394378662, 0.008975584030151368, 0.008956704139709473, 0.00889241600036621, 0.008953791618347168, 0.008878144264221191, 0.008941727638244629, 0.00889020824432373, 0.008905759811401367, 0.008969183921813964, 0.008934816360473634, 0.008964703559875489, 0.008918304443359375, 0.008935968399047851, 0.008923328399658204, 0.008893471717834472, 0.00885654354095459, 0.008951359748840333, 0.008855135917663574, 0.008889023780822753, 0.008849056243896484, 0.008833760261535645, 0.008855648040771484, 0.008879743576049804, 0.008839391708374023, 0.008855392456054687, 0.008802111625671388, 0.008835264205932617, 0.008874176025390625, 0.008844703674316405, 0.00886240005493164, 0.008654848098754882, 0.008904704093933105, 0.008857600212097168, 0.008908479690551759, 0.008855968475341798, 0.009045439720153809, 0.008837183952331544, 0.008886688232421875, 0.008870240211486816, 0.008867487907409669, 0.008876031875610351, 0.008877568244934082, 0.008885791778564453, 0.008889632225036621, 0.009145952224731446, 0.009100864410400391, 0.00895849609375, 0.008951680183410644, 0.008949888229370118, 0.008971839904785157, 0.008976832389831543, 0.008951359748840333, 0.008934880256652832, 0.008987615585327149, 0.008943615913391113, 0.008972288131713867, 0.008902655601501466, 0.008965472221374511, 0.009087615966796875, 0.008990752220153808, 0.008942720413208007, 0.00890345573425293, 0.009000288009643555, 0.008995903968811034, 0.008918560028076172, 0.008921567916870116, 0.008883904457092286, 0.008912320137023927, 0.008898528099060058, 0.008861824035644532, 0.008899264335632324, 0.008894368171691895, 0.008910400390625, 0.008853183746337891, 0.008902527809143067, 0.008889439582824708, 0.008984224319458008, 0.00899071979522705, 0.008861696243286133, 0.008943488121032715, 0.009140352249145507, 0.009281439781188965, 0.008859744071960449, 0.00883619213104248, 0.008895392417907716, 0.008873311996459961, 0.008854016304016114, 0.00914243221282959, 0.00891881561279297, 0.009480192184448242, 0.00888649559020996, 0.008845312118530273, 0.008931551933288574, 0.008653663635253906, 0.008993856430053711, 0.009318719863891602, 0.00901734447479248, 0.008932000160217285, 0.008916768074035644, 0.008904800415039063, 0.008928352355957032, 0.008895392417907716, 0.00886796760559082, 0.00889408016204834, 0.00894092845916748, 0.008914912223815918, 0.008922016143798828, 0.008896672248840332, 0.008914079666137695, 0.008946623802185059, 0.009038975715637207, 0.008854047775268555, 0.008953951835632324, 0.008939519882202148, 0.00900710391998291, 0.008919039726257324, 0.00894761562347412, 0.009020928382873536, 0.008893024444580079, 0.008897695541381836, 0.008866047859191895, 0.008956512451171876, 0.008896512031555176, 0.008896608352661133, 0.008878080368041993, 0.008919072151184083, 0.008946623802185059, 0.008887231826782227, 0.008843647956848144, 0.008887455940246583, 0.008895135879516602, 0.008898367881774902, 0.008935423851013183, 0.00890060806274414, 0.00894713592529297, 0.008972127914428711, 0.00894819164276123, 0.008915264129638672, 0.00891487979888916, 0.008912320137023927, 0.008911423683166504, 0.00890880012512207, 0.008895520210266114, 0.008903583526611329, 0.008941632270812988, 0.008899871826171874, 0.008886752128601074, 0.008877951622009278, 0.009083264350891114, 0.008898816108703612, 0.008826656341552734, 0.008880096435546875, 0.008889375686645508, 0.008906815528869629, 0.008925151824951173, 0.00888044834136963, 0.008795007705688477, 0.008994720458984374, 0.008986144065856934, 0.008937952041625976, 0.008996479988098145, 0.008992704391479492, 0.00893177604675293, 0.008969280242919922, 0.008973247528076172, 0.008919039726257324, 0.009000767707824707, 0.00890454387664795, 0.008946016311645507, 0.008954143524169922, 0.008999967575073242, 0.008999615669250489, 0.008962143898010254, 0.008976287841796875, 0.008996864318847657, 0.008955424308776855, 0.009003487586975098, 0.00898470401763916, 0.008962271690368652, 0.008916319847106934, 0.008885951995849609, 0.008923775672912597, 0.0089169921875, 0.00890988826751709, 0.008919551849365234, 0.008908224105834962, 0.00909823989868164, 0.009174176216125488, 0.00900972843170166, 0.008935359954833985, 0.009021792411804199, 0.009041440010070801, 0.009005215644836426, 0.008977888107299804, 0.008967167854309082, 0.008914719581604004, 0.008906463623046874, 0.011077887535095214, 0.01050767993927002, 0.009095871925354004, 0.008998432159423829, 0.009001152038574219, 0.009000639915466308, 0.009007072448730469, 0.009011967658996582, 0.008905856132507325, 0.008966912269592285, 0.008970432281494141, 0.008959263801574706, 0.00892182445526123, 0.008931136131286621, 0.008917280197143555, 0.008926943778991699, 0.008873215675354004, 0.008895551681518555, 0.008826560020446777, 0.008836735725402832, 0.008888575553894043, 0.008872063636779785, 0.008608927726745605, 0.008928095817565918, 0.008939616203308106, 0.008893600463867187, 0.008931679725646973, 0.008914560317993163, 0.008863615989685058, 0.008928159713745117, 0.008906559944152832, 0.008829119682312012, 0.008929120063781739, 0.008972448348999023, 0.008923135757446288, 0.008939616203308106, 0.008962176322937011, 0.008949343681335448, 0.008931520462036133, 0.008904704093933105, 0.008867487907409669, 0.008892191886901855, 0.008954239845275878, 0.008945631980895995, 0.009013471603393554, 0.00924079990386963, 0.009044960021972656, 0.009075136184692382, 0.009023872375488282, 0.009025216102600098, 0.009023615837097168, 0.008994400024414062, 0.008991328239440918, 0.008929632186889648, 0.009197216033935546, 0.008946016311645507, 0.009021087646484375, 0.008965279579162598, 0.008894911766052246, 0.009099072456359863, 0.008882783889770507, 0.008843168258666993, 0.008895711898803711, 0.008893312454223633, 0.008922975540161132, 0.008878496170043946, 0.008840255737304688, 0.008901311874389648, 0.008878080368041993, 0.009174528121948243, 0.009058943748474121, 0.008927295684814453, 0.00887715244293213, 0.008926943778991699, 0.008909824371337891, 0.0089169921875, 0.00892080020904541, 0.008907039642333985, 0.008970047950744629, 0.009015487670898438, 0.00889260768890381, 0.008906815528869629, 0.008901663780212402, 0.008934111595153808, 0.00890287971496582]",tokens/s,111.41511952544634,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1066.164224,2079.260672,0.0,1684.013056,1657.439232,s,1,7.20651171875,7.20651171875,0.0,7.20651171875,7.20651171875,7.20651171875,7.20651171875,[7.20651171875],,kWh,3.902005829168804e-06,4.2313658090350484e-07,1.08944531600208e-06,5.414587726074389e-06,,MB,1465.778176,2121.203712,0.0,1713.373184,1302.298112,s,10,0.2045635223388672,0.020456352233886716,9.982592927681153e-05,0.020436384201049804,0.020562000274658203,0.020579784393310548,0.020594011688232425,"[0.02054652786254883, 0.020274528503417967, 0.020597568511962892, 0.02044051170349121, 0.020333696365356445, 0.02040687942504883, 0.020550016403198243, 0.020558048248291015, 0.0204322566986084, 0.02042348861694336]",tokens/s,12514.450136223524,kWh,6.115423075575541e-07,6.744228202231944e-08,4.052339364602495e-07,1.0842185260401231e-06,tokens/kWh,236114762.70838627,MB,1498.3168,2167.341056,0.0,1759.510528,1302.300672,s,10,10.629517578125,1.0629517578125,0.006288410895526865,1.0607664184570313,1.0732203857421874,1.074286279296875,1.075138994140625,"[1.0546771240234376, 1.05889404296875, 1.0729835205078124, 1.0753521728515625, 1.0589505615234376, 1.05885888671875, 1.066343994140625, 1.05998388671875, 1.0615489501953126, 1.0619244384765625]",tokens/s,59.26891746211583,kWh,3.077133158369081e-05,3.3935785457500333e-06,1.673241866253999e-05,5.089732879198083e-05,tokens/kWh,1237785.9800360685,,s,630,10.627107385635394,0.016868424421643456,0.00040986439613106514,0.01679119968414307,0.01704718360900879,0.01721332950592041,0.01900769989013672,"[0.01665843200683594, 0.01665843200683594, 0.016521215438842773, 0.01657478332519531, 0.016576192855834962, 0.016639999389648438, 0.016680959701538087, 0.016615423202514648, 0.016570144653320313, 0.016547775268554686, 0.016611616134643556, 0.01687721633911133, 0.016636255264282227, 0.01659222412109375, 0.016642719268798827, 0.016620832443237303, 0.016692256927490233, 0.016608383178710936, 0.016656959533691406, 0.01665417671203613, 0.016699552536010742, 0.01661952018737793, 0.01658470344543457, 0.01661337661743164, 0.016656415939331055, 0.016823520660400392, 0.01691929626464844, 0.016719871520996094, 0.016637855529785157, 0.01686128044128418, 0.01677107238769531, 0.016785408020019533, 0.016709632873535156, 0.016602304458618163, 0.01662406349182129, 0.01672835159301758, 0.016683103561401368, 0.016672767639160157, 0.01656012725830078, 0.0165928955078125, 0.01664793586730957, 0.017186912536621093, 0.01675507164001465, 0.01666640090942383, 0.01685417556762695, 0.016728927612304687, 0.0168222713470459, 0.01663542366027832, 0.016701919555664062, 0.016535295486450195, 0.01656563186645508, 0.01680678367614746, 0.016769023895263673, 0.016614463806152342, 0.019633087158203125, 0.01718262481689453, 0.016664991378784178, 0.016662208557128907, 0.01678451156616211, 0.016681856155395507, 0.016777215957641603, 0.016618656158447265, 0.01665011215209961, 0.016661888122558595, 0.01660108757019043, 0.01672256088256836, 0.01658880043029785, 0.016580160140991212, 0.016563711166381837, 0.016647104263305665, 0.016604448318481447, 0.016658880233764647, 0.016658527374267578, 0.016732608795166016, 0.016657472610473633, 0.01674825668334961, 0.016662687301635743, 0.016660287857055665, 0.016670816421508788, 0.016659360885620117, 0.01680179214477539, 0.016722015380859375, 0.016606271743774412, 0.016587615966796875, 0.016800928115844726, 0.016880767822265625, 0.017581024169921876, 0.01807846450805664, 0.017460832595825194, 0.016946720123291015, 0.01682111930847168, 0.01681203269958496, 0.016605440139770507, 0.01666806411743164, 0.016666112899780275, 0.016698047637939452, 0.016691360473632812, 0.01655513572692871, 0.016620416641235352, 0.018130048751831055, 0.01666736030578613, 0.016629856109619142, 0.01672198486328125, 0.016586751937866212, 0.016752639770507814, 0.017182144165039062, 0.016682655334472656, 0.016650751113891603, 0.01692207908630371, 0.016742591857910157, 0.016763647079467772, 0.016777151107788085, 0.016703296661376953, 0.016651935577392578, 0.016863840103149414, 0.01675491142272949, 0.016938783645629882, 0.016781312942504883, 0.016985279083251953, 0.01686809539794922, 0.016810239791870116, 0.016829568862915038, 0.016884128570556642, 0.016855327606201172, 0.01680384063720703, 0.01700864028930664, 0.016789024353027343, 0.016935327529907226, 0.016791616439819336, 0.016758687973022462, 0.016646240234375, 0.016774656295776368, 0.016712255477905273, 0.016670656204223634, 0.016556032180786134, 0.01660848045349121, 0.016707391738891603, 0.016712671279907228, 0.01659699249267578, 0.016613471984863282, 0.01661942481994629, 0.01658880043029785, 0.01682636833190918, 0.01706188774108887, 0.016717824935913086, 0.016812288284301757, 0.016685983657836915, 0.016661376953125, 0.016721887588500975, 0.01715123176574707, 0.017185535430908203, 0.016848096847534178, 0.016904991149902345, 0.0168407039642334, 0.016821247100830078, 0.016696319580078126, 0.017006591796875, 0.01700249671936035, 0.016957439422607423, 0.016793600082397463, 0.016908287048339844, 0.016664480209350584, 0.016773216247558592, 0.017092607498168946, 0.021839967727661135, 0.017072032928466797, 0.01712460708618164, 0.020431583404541015, 0.018933792114257813, 0.017072128295898437, 0.016891904830932617, 0.016786880493164062, 0.016894048690795898, 0.016875999450683594, 0.017046655654907227, 0.01685593605041504, 0.016848384857177736, 0.01697990417480469, 0.01680851173400879, 0.01780672073364258, 0.0170250244140625, 0.016953311920166015, 0.016801599502563477, 0.016819040298461915, 0.01677712059020996, 0.01682009506225586, 0.016908512115478516, 0.017313791275024415, 0.016842975616455078, 0.01675913619995117, 0.017279392242431642, 0.01679363250732422, 0.016916032791137695, 0.01683888053894043, 0.017410560607910155, 0.01682316780090332, 0.016827232360839845, 0.01693187141418457, 0.016777503967285157, 0.016781503677368165, 0.016845312118530274, 0.017086463928222655, 0.01691628837585449, 0.017076416015625, 0.01694246482849121, 0.016940736770629884, 0.01687238311767578, 0.01733843231201172, 0.01693631935119629, 0.017021503448486328, 0.016883712768554687, 0.01680793571472168, 0.016942975997924804, 0.017090688705444335, 0.017305599212646485, 0.01723494338989258, 0.01704652786254883, 0.017084415435791016, 0.01689727973937988, 0.016767744064331055, 0.018114559173583983, 0.01698147201538086, 0.016859136581420898, 0.020581184387207033, 0.017000160217285155, 0.016838239669799804, 0.016867744445800782, 0.016807872772216795, 0.017558847427368164, 0.019430143356323242, 0.01700454330444336, 0.017092096328735353, 0.01696614456176758, 0.01685264015197754, 0.01687366485595703, 0.016856895446777344, 0.016826719284057618, 0.016953472137451173, 0.01706585693359375, 0.017606367111206056, 0.01693110466003418, 0.0168035831451416, 0.016799104690551757, 0.016827264785766603, 0.01689219284057617, 0.01677510452270508, 0.0168341121673584, 0.016752864837646483, 0.016763935089111327, 0.01681884765625, 0.016722240447998048, 0.016876928329467772, 0.01676697540283203, 0.016748544692993163, 0.016867551803588867, 0.016664352416992188, 0.016713727951049806, 0.01679952049255371, 0.016843040466308593, 0.016741472244262694, 0.016914527893066408, 0.016818944931030273, 0.01681510353088379, 0.01676310348510742, 0.016748863220214842, 0.016734687805175782, 0.017934335708618163, 0.016680959701538087, 0.016676864624023437, 0.016672063827514648, 0.016681663513183592, 0.0167956485748291, 0.01675200080871582, 0.016749183654785157, 0.0167587833404541, 0.016871103286743162, 0.01705401611328125, 0.016913631439208984, 0.017064512252807616, 0.017062335968017577, 0.016860960006713867, 0.016807424545288087, 0.016747007369995116, 0.01683865547180176, 0.0168222713470459, 0.016776832580566406, 0.016797088623046876, 0.01673520088195801, 0.01678163146972656, 0.016686784744262696, 0.016744447708129884, 0.016728063583374024, 0.01670297622680664, 0.016687616348266602, 0.017059839248657227, 0.016957439422607423, 0.01679088020324707, 0.016756704330444336, 0.016863935470581053, 0.016762208938598634, 0.016718496322631837, 0.01672719955444336, 0.016794815063476562, 0.016663455963134767, 0.016786176681518553, 0.016725791931152343, 0.016783584594726564, 0.01679155158996582, 0.016812000274658203, 0.016734079360961916, 0.01674051284790039, 0.01674355125427246, 0.016774015426635744, 0.016668672561645507, 0.016732160568237304, 0.01670515251159668, 0.01674278450012207, 0.016801504135131835, 0.017024351119995118, 0.01688435173034668, 0.016761152267456055, 0.016891359329223633, 0.016757280349731445, 0.016924671173095703, 0.016736255645751954, 0.01671977615356445, 0.016769119262695312, 0.016737823486328126, 0.01668921661376953, 0.0166297607421875, 0.016774848937988283, 0.016734687805175782, 0.016710016250610353, 0.016795520782470704, 0.016773120880126953, 0.016852991104125976, 0.01797292709350586, 0.01683692741394043, 0.016760831832885743, 0.0168222713470459, 0.01679155158996582, 0.01680384063720703, 0.01680588722229004, 0.01683046340942383, 0.016861183166503906, 0.01684787178039551, 0.016755712509155272, 0.01677516746520996, 0.016742303848266603, 0.016744543075561523, 0.016735519409179687, 0.016720447540283203, 0.016660640716552735, 0.016723455429077147, 0.016908031463623047, 0.01682441520690918, 0.016751264572143553, 0.016687103271484375, 0.016738304138183592, 0.016703744888305665, 0.01716592025756836, 0.016797855377197267, 0.01676288032531738, 0.016696928024291992, 0.01669571113586426, 0.016780799865722656, 0.016779359817504884, 0.016757152557373048, 0.016754623413085937, 0.016805952072143554, 0.016723968505859374, 0.01704694366455078, 0.016796607971191407, 0.016746143341064453, 0.016736255645751954, 0.01677516746520996, 0.016772607803344726, 0.016801408767700195, 0.01692857551574707, 0.0168221435546875, 0.016729984283447265, 0.01682681655883789, 0.016717824935913086, 0.016893951416015626, 0.01678950309753418, 0.016756736755371093, 0.016748544692993163, 0.016693248748779296, 0.01712115287780762, 0.018301055908203124, 0.016889856338500975, 0.016972063064575195, 0.01692393684387207, 0.016882015228271485, 0.016988256454467773, 0.017327840805053712, 0.016838783264160155, 0.01678758430480957, 0.01673539161682129, 0.01671993637084961, 0.01672684860229492, 0.0167873592376709, 0.016750688552856444, 0.01680335998535156, 0.01702140808105469, 0.016848255157470703, 0.01688230323791504, 0.016788768768310546, 0.016754751205444337, 0.016844959259033204, 0.016845312118530274, 0.01675984001159668, 0.016726079940795897, 0.016656320571899415, 0.016780256271362304, 0.016672767639160157, 0.01683046340942383, 0.01679302406311035, 0.01681772804260254, 0.016753664016723634, 0.01675468826293945, 0.016734399795532227, 0.01672697639465332, 0.01693084716796875, 0.016837472915649413, 0.016738239288330077, 0.01673222351074219, 0.019037887573242186, 0.019138879776000976, 0.017323200225830077, 0.016909120559692382, 0.01685910415649414, 0.01676700782775879, 0.016732160568237304, 0.0167359676361084, 0.016781600952148437, 0.016856351852416993, 0.016861919403076173, 0.016701440811157226, 0.016871423721313478, 0.016831552505493164, 0.016726207733154298, 0.01677267265319824, 0.016757728576660157, 0.01681407928466797, 0.01707827186584473, 0.016875520706176757, 0.01677926445007324, 0.016781183242797853, 0.016742528915405272, 0.016747520446777343, 0.016749088287353515, 0.016843231201171874, 0.01695267105102539, 0.016800416946411132, 0.01670710372924805, 0.016727584838867188, 0.01672492790222168, 0.016738304138183592, 0.01680384063720703, 0.016756736755371093, 0.0167587833404541, 0.016726015090942382, 0.016731136322021483, 0.017054975509643553, 0.01690387153625488, 0.016793664932250978, 0.016780479431152344, 0.016810272216796877, 0.016790048599243164, 0.01681407928466797, 0.016737855911254883, 0.01671331214904785, 0.016819295883178712, 0.016946943283081054, 0.016774591445922853, 0.01677574348449707, 0.016736255645751954, 0.01678873634338379, 0.016757503509521484, 0.017344127655029296, 0.016799583435058593, 0.016771007537841796, 0.016869983673095702, 0.016797151565551758, 0.01683919906616211, 0.016770591735839845, 0.01690889549255371, 0.016869247436523436, 0.01680931282043457, 0.016818815231323243, 0.01676291275024414, 0.016869375228881836, 0.016867328643798828, 0.01681407928466797, 0.016807199478149414, 0.016714399337768554, 0.017149248123168946, 0.016843103408813478, 0.016843328475952147, 0.016811872482299806, 0.01682636833190918, 0.01685055923461914, 0.016892288208007814, 0.01677663993835449, 0.016749120712280272, 0.016664447784423827, 0.01675276756286621, 0.01672742462158203, 0.016775808334350585, 0.016772192001342775, 0.017090591430664062, 0.016978815078735353, 0.016720991134643554, 0.016748575210571288, 0.01670172882080078, 0.016800031661987305, 0.016755008697509767, 0.016884960174560548, 0.016746944427490234, 0.016769247055053713, 0.016754816055297852, 0.01680303955078125, 0.016735008239746094, 0.01695929527282715, 0.017031360626220703, 0.016852991104125976, 0.016737472534179686, 0.01676576042175293, 0.0168341121673584, 0.016904640197753906, 0.017299455642700197, 0.016787424087524414, 0.016754720687866213, 0.0167475528717041, 0.016769439697265624, 0.01697350311279297, 0.016783519744873045, 0.016788192749023437, 0.01675468826293945, 0.016793600082397463, 0.01685830307006836, 0.01691935920715332, 0.01672563171386719, 0.016808319091796874, 0.01676838493347168, 0.016771743774414063, 0.016877119064331054, 0.01694761657714844, 0.01681407928466797, 0.01704934310913086, 0.016877824783325196, 0.016817279815673828, 0.01685183906555176, 0.017075519561767578, 0.01739641571044922, 0.016778335571289063, 0.01696656036376953, 0.016990207672119142, 0.016936063766479492, 0.01684979248046875, 0.016781312942504883, 0.016924671173095703, 0.016856191635131836, 0.01685513687133789, 0.016744672775268556, 0.01682899284362793, 0.016793952941894532, 0.017362943649291994, 0.016879615783691407, 0.016850240707397462, 0.0167740478515625, 0.01680771255493164, 0.016746400833129883, 0.016742719650268554, 0.016735872268676757, 0.016844959259033204, 0.01680588722229004, 0.016783071517944337, 0.016715423583984375, 0.01685183906555176, 0.01676406478881836, 0.016787296295166017, 0.016726112365722655, 0.017075872421264647, 0.01700147247314453, 0.016958879470825194, 0.01673423957824707, 0.016710208892822265, 0.01683785629272461, 0.016816928863525392, 0.016777568817138672, 0.016715423583984375, 0.01678982353210449, 0.016846015930175783, 0.016793888092041017, 0.016709856033325195, 0.01680179214477539, 0.01678892707824707, 0.01681056022644043, 0.017095680236816405, 0.01686809539794922, 0.016824384689331055, 0.016801599502563477, 0.016817920684814452, 0.016794240951538086, 0.01686083221435547, 0.01678780746459961, 0.01673535919189453, 0.016735103607177733, 0.016774911880493164, 0.01692288017272949, 0.016885408401489256, 0.01673456001281738, 0.016769023895263673, 0.01696758460998535, 0.016990304946899414, 0.016850944519042968, 0.016836383819580077, 0.016963808059692383, 0.01690153694152832, 0.016791519165039064, 0.0168239688873291, 0.01679203224182129, 0.016979488372802734, 0.0168253116607666, 0.016911775588989257, 0.016775775909423828, 0.017111040115356444, 0.01761065673828125]",tokens/s,59.28235945480034,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.634048,13880.918016,0.0,13478.395904,13476.849152,s,1,7.35041064453125,7.35041064453125,0.0,7.35041064453125,7.35041064453125,7.35041064453125,7.35041064453125,[7.35041064453125],,kWh,8.439093891668866e-06,9.236513171774608e-07,5.1227818759977395e-06,1.4485527084844066e-05,,MB,1311.862784,14117.896192,0.0,13702.791168,13671.637504,s,10,2.127459121704102,0.2127459121704102,0.00519433418086913,0.214504753112793,0.216806494140625,0.21770822448730467,0.21842960876464843,"[0.20121987915039063, 0.21380003356933594, 0.21627299499511718, 0.21650291442871095, 0.21660610961914062, 0.21860995483398438, 0.2151478729248047, 0.20882572937011717, 0.20661199951171874, 0.21386163330078126]",tokens/s,1203.313367520515,kWh,6.191126560330278e-06,6.823516867686455e-07,4.090581976166618e-06,1.0964060223265541e-05,tokens/kWh,23349014.396762665,MB,1367.769088,14119.993344,0.0,13702.791168,13671.640064,s,10,37.44088891601562,3.744088891601563,0.007610170858894072,3.7435976562500004,3.75319150390625,3.75492119140625,3.75630494140625,"[3.736145263671875, 3.734241455078125, 3.74118798828125, 3.74171337890625, 3.74886767578125, 3.75280712890625, 3.75665087890625, 3.75016748046875, 3.74548193359375, 3.733625732421875]",tokens/s,16.826523574618246,kWh,0.00010880863433841748,1.2002286469289821e-05,7.226475688583382e-05,0.00019307567769354106,tokens/kWh,326296.9253952153,,s,630,37.43761271286017,0.05942478208390492,0.0005483191210658585,0.05936655998229981,0.05980356597900391,0.06000085124969482,0.06286157764434815,"[0.062053726196289065, 0.05935804748535156, 0.05880627059936523, 0.058896190643310545, 0.058714305877685544, 0.058782943725585936, 0.05884543991088867, 0.05893379211425781, 0.058832447052001954, 0.05885116958618164, 0.05887855911254883, 0.05920153427124023, 0.05889190292358398, 0.05875120162963867, 0.05884329605102539, 0.059033599853515625, 0.05957017517089844, 0.05949030303955078, 0.05939567947387695, 0.059095455169677735, 0.05899468612670898, 0.058867233276367184, 0.058796222686767576, 0.05887958526611328, 0.05884179306030273, 0.05907660675048828, 0.059057857513427736, 0.05916067123413086, 0.05914751815795898, 0.059526111602783205, 0.059385665893554686, 0.059380992889404294, 0.059640766143798825, 0.059797088623046876, 0.05989321517944336, 0.05954246520996094, 0.05933055877685547, 0.059272350311279295, 0.05927113723754883, 0.059168960571289064, 0.05920147323608398, 0.05910809707641602, 0.059189247131347655, 0.059399681091308595, 0.05944527816772461, 0.05943913650512695, 0.05948255920410156, 0.05943091201782227, 0.05963529586791992, 0.05961974334716797, 0.05962860870361328, 0.05966739273071289, 0.05953945541381836, 0.059545600891113284, 0.05935222244262695, 0.059310943603515624, 0.059461631774902345, 0.059518081665039066, 0.05938191986083984, 0.05959552001953125, 0.05954761505126953, 0.0594513931274414, 0.05961715316772461, 0.06287769699096679, 0.05943888092041016, 0.05890204620361328, 0.058778305053710934, 0.05869772720336914, 0.05890867233276367, 0.058877216339111325, 0.058880096435546876, 0.05903219223022461, 0.059172863006591796, 0.05897830581665039, 0.059143646240234375, 0.05899318313598633, 0.05896806335449219, 0.0590371208190918, 0.05902579116821289, 0.059668704986572264, 0.059703262329101565, 0.05935923385620117, 0.05921791839599609, 0.05905168151855469, 0.05904624176025391, 0.059224063873291016, 0.058982398986816405, 0.05892214584350586, 0.05884310531616211, 0.05896486282348633, 0.05913779067993164, 0.05919126510620117, 0.05915017700195312, 0.05916511917114258, 0.059122913360595705, 0.05962406539916992, 0.05964313507080078, 0.05971014404296875, 0.059472095489501955, 0.059262977600097654, 0.059246593475341794, 0.05906156921386719, 0.05904441452026367, 0.05911360168457031, 0.05932032012939453, 0.059004257202148434, 0.059019935607910155, 0.059122974395751954, 0.05911561584472656, 0.05917923355102539, 0.059359649658203124, 0.05945334243774414, 0.0595948486328125, 0.05954764938354492, 0.059650047302246094, 0.05960022354125977, 0.0595643196105957, 0.05927471923828125, 0.059176895141601564, 0.05924143981933594, 0.05919075012207031, 0.059146785736083986, 0.05931417465209961, 0.05930099105834961, 0.05947049713134766, 0.05958793640136719, 0.06348534393310547, 0.060346912384033204, 0.05932038497924805, 0.05920476913452148, 0.05878665542602539, 0.058825790405273436, 0.05881676864624023, 0.05874480056762695, 0.05868515014648437, 0.05876633453369141, 0.05878579330444336, 0.058772510528564456, 0.05879072189331055, 0.058977920532226565, 0.05899318313598633, 0.059152416229248043, 0.05960214233398437, 0.05945625686645508, 0.05920336151123047, 0.05917241668701172, 0.05889279937744141, 0.05918044662475586, 0.05906713485717773, 0.05890457534790039, 0.05894496154785156, 0.05896822357177734, 0.05895996856689453, 0.05901526260375976, 0.05875446319580078, 0.058877918243408205, 0.05893616104125977, 0.05907865524291992, 0.05947555160522461, 0.05982249450683594, 0.059675838470458986, 0.059749183654785154, 0.05959065628051758, 0.05950668716430664, 0.05941571044921875, 0.05943350219726563, 0.05945171356201172, 0.05955379104614258, 0.05947596740722656, 0.05943296051025391, 0.0593851203918457, 0.059498207092285156, 0.05944947052001953, 0.05954444885253906, 0.05955350494384765, 0.05979734420776367, 0.05959267044067383, 0.05985446548461914, 0.05979833602905273, 0.059762718200683594, 0.05955379104614258, 0.059611137390136716, 0.05960704040527344, 0.05961884689331055, 0.059601375579833984, 0.05952102279663086, 0.059627521514892576, 0.05971116638183594, 0.05966579055786133, 0.06282211303710937, 0.059854400634765624, 0.05920569610595703, 0.05912547302246094, 0.0590868148803711, 0.059175872802734376, 0.05903936004638672, 0.05902579116821289, 0.059172863006591796, 0.05908889770507812, 0.059264030456542965, 0.05907900619506836, 0.05905062484741211, 0.05906227111816406, 0.059082752227783204, 0.05942844772338867, 0.05979318237304688, 0.06003366470336914, 0.05972377777099609, 0.05963161468505859, 0.05948163223266602, 0.05948668670654297, 0.059324417114257816, 0.059240447998046876, 0.05922537612915039, 0.05936406326293946, 0.05943862533569336, 0.059359264373779294, 0.05923884963989258, 0.059215873718261716, 0.05918851089477539, 0.05929804611206055, 0.05946822357177734, 0.05971478271484375, 0.059418529510498044, 0.05937376022338867, 0.05930451202392578, 0.059351200103759764, 0.05922611236572266, 0.059090206146240234, 0.059146976470947264, 0.05916057586669922, 0.05919948959350586, 0.05915961456298828, 0.059152320861816404, 0.05913625717163086, 0.059112190246582035, 0.05916672134399414, 0.059328510284423826, 0.05957632064819336, 0.05966233444213867, 0.05952511978149414, 0.059487903594970706, 0.05957052612304688, 0.05942272186279297, 0.059338623046875, 0.05929177474975586, 0.05944105529785156, 0.05943091201782227, 0.05957231903076172, 0.05934080123901367, 0.05940838241577148, 0.05926911926269531, 0.0630722541809082, 0.05967871856689453, 0.05885504150390625, 0.05877737426757813, 0.05896809768676758, 0.058986465454101564, 0.05888985443115234, 0.05888713455200195, 0.05890252685546875, 0.059032958984375, 0.059114112854003906, 0.05873600006103516, 0.058757278442382814, 0.05883878326416016, 0.05895199966430664, 0.05933097457885742, 0.05979248046875, 0.05984143829345703, 0.05954927825927735, 0.05934121704101562, 0.05991219329833984, 0.05938534545898438, 0.059343360900878904, 0.059308032989501956, 0.05968076705932617, 0.05944643020629883, 0.059417438507080075, 0.05936265563964844, 0.05921449661254883, 0.059310016632080076, 0.05939820861816406, 0.05945753479003906, 0.05980979156494141, 0.05990371322631836, 0.05979724884033203, 0.05964239883422852, 0.0594595832824707, 0.05948185729980469, 0.05932672119140625, 0.05941657638549805, 0.0594714241027832, 0.05963983917236328, 0.05964432144165039, 0.05975449752807617, 0.05959190368652344, 0.05956467056274414, 0.05943910217285156, 0.0594760627746582, 0.05967059326171875, 0.06001414489746094, 0.05984707260131836, 0.05982006454467773, 0.05978281784057617, 0.05964220809936523, 0.05963350296020508, 0.059616737365722657, 0.059607200622558594, 0.05979804611206055, 0.059469024658203126, 0.0596624641418457, 0.05965856170654297, 0.059674144744873044, 0.05963654327392578, 0.06296361541748047, 0.06002534484863281, 0.05927091217041015, 0.05902524948120117, 0.05901667022705078, 0.05917177581787109, 0.05913600158691406, 0.059084800720214846, 0.0590805778503418, 0.05915865707397461, 0.059156478881835936, 0.05931340789794922, 0.05913993453979492, 0.059253662109375, 0.05949996948242187, 0.05979808044433594, 0.05987942504882812, 0.05985279846191406, 0.05961075210571289, 0.059480159759521485, 0.05931651306152344, 0.05940611267089844, 0.05933692932128906, 0.059308032989501956, 0.05935491180419922, 0.05940860748291016, 0.05936742401123047, 0.059364513397216795, 0.05923712158203125, 0.059504737854003904, 0.05942476654052734, 0.059617088317871096, 0.059811038970947264, 0.05979644775390625, 0.05979097747802734, 0.05971596908569336, 0.05958860778808594, 0.0594200325012207, 0.05955228805541992, 0.05935318374633789, 0.05946515274047852, 0.059496990203857424, 0.059383838653564454, 0.05947779083251953, 0.059496673583984375, 0.05949235153198242, 0.05949849700927735, 0.0596060791015625, 0.05999280166625977, 0.06005168151855469, 0.06004121780395508, 0.06005263900756836, 0.05997449493408203, 0.059813343048095706, 0.059507007598876956, 0.05953968048095703, 0.05953510284423828, 0.05953059387207031, 0.05948448181152344, 0.05948476791381836, 0.05961523056030273, 0.05969100952148437, 0.059631103515625, 0.06307843017578126, 0.06039958572387695, 0.05948403167724609, 0.0592856330871582, 0.05907455825805664, 0.0591196174621582, 0.059099136352539064, 0.059098400115966794, 0.0592283821105957, 0.059130367279052735, 0.059240447998046876, 0.05914940643310547, 0.05920655822753906, 0.059202816009521486, 0.05928217697143555, 0.059662174224853516, 0.06014089584350586, 0.06023779296875, 0.06019158554077148, 0.059692703247070315, 0.059404640197753905, 0.05940137481689453, 0.05924950408935547, 0.05917871856689453, 0.05927145767211914, 0.05930179214477539, 0.05934473419189453, 0.059336959838867186, 0.059344894409179685, 0.05942265701293945, 0.059516128540039064, 0.05962633514404297, 0.05986716842651367, 0.060006366729736325, 0.060063007354736325, 0.05998873519897461, 0.05992035293579102, 0.05978505706787109, 0.05945564651489258, 0.05929305648803711, 0.059425407409667966, 0.059438240051269534, 0.059312992095947266, 0.05945468902587891, 0.05929859161376953, 0.059478015899658204, 0.05952716827392578, 0.05943203353881836, 0.05965475082397461, 0.05998732757568359, 0.060187583923339845, 0.06010815811157227, 0.06012985610961914, 0.059985984802246095, 0.05999411010742187, 0.05989580917358398, 0.059719680786132816, 0.05968620681762695, 0.05953196716308594, 0.05955583953857422, 0.05955136108398437, 0.059627742767333985, 0.05952671813964844, 0.0635392951965332, 0.060434337615966796, 0.0594738883972168, 0.05931836700439453, 0.059291648864746097, 0.059271167755126954, 0.05908591842651367, 0.05913692855834961, 0.05907791900634766, 0.059170528411865236, 0.05911616134643555, 0.05930841445922851, 0.05917452621459961, 0.059246177673339846, 0.059234622955322266, 0.059363807678222656, 0.059805694580078124, 0.06017843246459961, 0.05987855911254883, 0.05971760177612305, 0.05970758438110352, 0.059512672424316404, 0.059401054382324216, 0.05934009552001953, 0.05926982498168945, 0.05928889465332031, 0.05932860946655273, 0.05931804656982422, 0.05925116729736328, 0.0592979850769043, 0.059340641021728514, 0.059365695953369144, 0.05964492797851562, 0.05971795272827148, 0.05967327880859375, 0.05967603302001953, 0.059533950805664065, 0.05948147201538086, 0.059375648498535154, 0.05940079879760742, 0.05915372848510742, 0.05914831924438477, 0.059120288848876955, 0.059033599853515625, 0.05899433517456055, 0.05902550506591797, 0.059205886840820315, 0.05920767974853516, 0.05950048065185547, 0.05970131301879883, 0.06003046417236328, 0.06007244873046875, 0.06010265731811523, 0.05976406478881836, 0.05990467071533203, 0.059842559814453126, 0.059727264404296876, 0.05980332946777344, 0.059388832092285154, 0.05930758285522461, 0.05928553771972656, 0.05944566345214844, 0.059370689392089844, 0.06245580673217774, 0.05978521728515625, 0.058916862487792966, 0.0587386245727539, 0.05874630355834961, 0.059071006774902346, 0.05902140808105469, 0.059099136352539064, 0.059229278564453126, 0.059187744140625, 0.05915276718139648, 0.059116798400878905, 0.05902191925048828, 0.059092639923095706, 0.059183551788330076, 0.059305599212646484, 0.05949030303955078, 0.05964847946166992, 0.0594820785522461, 0.059252288818359374, 0.05941004943847656, 0.05933343887329102, 0.059246593475341794, 0.059291488647460935, 0.05924879837036133, 0.05938979339599609, 0.05925289535522461, 0.05927923202514648, 0.0594310417175293, 0.05950054550170898, 0.059331966400146485, 0.05937740707397461, 0.05943580627441406, 0.05953545761108398, 0.05963776016235352, 0.05964799880981445, 0.05973798370361328, 0.059377792358398435, 0.05936332702636719, 0.059355136871337894, 0.05952716827392578, 0.05943296051025391, 0.059385185241699216, 0.05948262405395508, 0.05931161499023437, 0.05940291213989258, 0.059578369140625, 0.059495712280273436, 0.05949923324584961, 0.059908096313476565, 0.059738014221191404, 0.059819488525390624, 0.059623359680175785, 0.059800254821777345, 0.059478015899658204, 0.05943500900268555, 0.05948604965209961, 0.059539615631103514, 0.05959065628051758, 0.05969100952148437, 0.05963980865478516, 0.05948332977294922, 0.05961795043945312, 0.06289328002929688, 0.05986764907836914, 0.05911996841430664, 0.058947582244873044, 0.05892095947265625, 0.05902310562133789, 0.05901465606689453, 0.058983169555664065, 0.05904905700683594, 0.0591121597290039, 0.059148414611816406, 0.05912112045288086, 0.05901923370361328, 0.05918377685546875, 0.059127777099609376, 0.059551742553710936, 0.059952926635742185, 0.060023006439208985, 0.0594411506652832, 0.05955728149414063, 0.05928992080688476, 0.059113761901855466, 0.059146240234375, 0.059219200134277346, 0.05908118438720703, 0.059187488555908205, 0.059252288818359374, 0.05930368041992187, 0.05931897735595703, 0.05941452789306641, 0.0594505615234375, 0.05953004837036133, 0.05961011123657227, 0.05978214263916016, 0.059598175048828125, 0.05930255889892578, 0.059163745880126954, 0.05908777618408203, 0.05882265472412109, 0.05879788970947265, 0.05881782531738281, 0.05879011154174805, 0.05882060623168945, 0.05887251281738281, 0.05885708618164062, 0.05895814514160156, 0.058988574981689454, 0.05909097671508789, 0.05932358551025391, 0.059484062194824217, 0.0593636474609375, 0.05932297515869141, 0.059387073516845704, 0.059329345703125, 0.059224063873291016, 0.05922611236572266, 0.05907580947875977, 0.05906716918945312, 0.058926334381103514, 0.05891468811035156, 0.05903023910522461, 0.05895926284790039, 0.059003646850585935]",tokens/s,16.827996080625898,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 1248, in __init__ self.transformer = FalconModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 973, in __init__ self.h = nn.ModuleList([FalconDecoderLayer(config, layer_idx=i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 973, in self.h = nn.ModuleList([FalconDecoderLayer(config, layer_idx=i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 721, in __init__ self.self_attention = FALCON_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 366, in __init__ self.query_key_value = FalconLinear(self.hidden_size, qkv_out_dim, bias=config.bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 450.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 424.12 MiB is free. Process 202556 has 14.32 GiB memory in use. Of the allocated memory 14.20 GiB is allocated by PyTorch, and 6.16 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,738.668544,3450.79808,0.0,3055.550464,2937.680896,s,1,7.21254931640625,7.21254931640625,0.0,7.21254931640625,7.21254931640625,7.21254931640625,7.21254931640625,[7.21254931640625],,kWh,7.65544858750123e-06,8.368047426373106e-07,2.206668432001846e-06,1.0698921762140386e-05,,MB,1104.44544,3522.101248,0.0,3114.27072,2817.473024,s,10,0.44898410034179687,0.04489841003417969,0.0006442349365678308,0.04467651176452637,0.045460602951049804,0.045966013526916503,0.046370341987609864,"[0.0464714241027832, 0.04416009521484375, 0.04534828948974609, 0.04467161560058594, 0.044681407928466796, 0.044641857147216794, 0.04422576141357422, 0.04450051116943359, 0.045274112701416014, 0.04500902557373047]",tokens/s,5701.760926614453,kWh,1.4554342901533722e-06,1.6050690768969044e-07,9.612100005870587e-07,2.577151198430121e-06,tokens/kWh,99334490.01981068,MB,1137.303552,3522.101248,0.0,3114.27072,2877.80864,s,10,11.548456054687499,1.15484560546875,0.004692471931810019,1.1568258666992188,1.159875244140625,1.1607574462890626,1.1614632080078124,"[1.15967919921875, 1.1498546142578125, 1.1500992431640624, 1.1482830810546876, 1.158281494140625, 1.1577117919921875, 1.1616396484375, 1.157550537109375, 1.1561011962890626, 1.1492552490234376]",tokens/s,54.55274687946568,kWh,3.3499842479430185e-05,3.6945752013604073e-06,2.2181919625012554e-05,5.937633730580316e-05,tokens/kWh,1061028.7339809134,,s,630,11.545618431091308,0.018326378462049698,0.00035041837722673565,0.018253119468688964,0.01856883583068848,0.018941068649291994,0.02004849069595337,"[0.019441120147705077, 0.018897504806518556, 0.018577791213989257, 0.018319360733032225, 0.01818009567260742, 0.01811395263671875, 0.0181376953125, 0.01805516815185547, 0.018792287826538086, 0.01797881507873535, 0.018034912109375, 0.018000383377075196, 0.01800707244873047, 0.018150367736816407, 0.018104032516479494, 0.018001855850219725, 0.018032703399658203, 0.017952640533447264, 0.0186841926574707, 0.019297632217407226, 0.018570016860961915, 0.018333023071289062, 0.018301567077636718, 0.018230464935302733, 0.018131231307983397, 0.018145856857299806, 0.01803059196472168, 0.018101503372192383, 0.018078208923339844, 0.018112768173217775, 0.018370559692382812, 0.018124671936035158, 0.018210079193115233, 0.01824166488647461, 0.018274112701416014, 0.018315168380737306, 0.01822003173828125, 0.018127967834472656, 0.018076576232910157, 0.02145484733581543, 0.02059833526611328, 0.01829692840576172, 0.018153823852539063, 0.018128896713256838, 0.018214879989624025, 0.018157535552978516, 0.018324800491333008, 0.018264223098754882, 0.018227071762084962, 0.01828883171081543, 0.018386720657348633, 0.01861299133300781, 0.018579200744628908, 0.018620512008666993, 0.01860009574890137, 0.018563072204589845, 0.018582847595214842, 0.01859654426574707, 0.018521247863769533, 0.018420576095581054, 0.01840127944946289, 0.018359935760498047, 0.018319744110107422, 0.02004256057739258, 0.019277215957641602, 0.0187807674407959, 0.018503679275512695, 0.01824358367919922, 0.018319360733032225, 0.01814240074157715, 0.0180948486328125, 0.018171968460083007, 0.018112512588500978, 0.01799692726135254, 0.017949567794799805, 0.01803379249572754, 0.01827315139770508, 0.01845043182373047, 0.018017696380615233, 0.018100223541259765, 0.01859849548339844, 0.0181942081451416, 0.0182458553314209, 0.018163711547851562, 0.01802614402770996, 0.017993471145629884, 0.018073728561401367, 0.018119136810302736, 0.018112512588500978, 0.018167808532714845, 0.018083839416503905, 0.01804287910461426, 0.01805891227722168, 0.01837910461425781, 0.018259967803955078, 0.01809769630432129, 0.018040895462036133, 0.018178464889526368, 0.01814303970336914, 0.018370752334594728, 0.018167327880859375, 0.018139392852783202, 0.018094303131103516, 0.018212160110473632, 0.018203136444091796, 0.018085376739501953, 0.018096832275390624, 0.018192127227783204, 0.01819878387451172, 0.018238880157470702, 0.018201183319091797, 0.01816966438293457, 0.018155712127685547, 0.018137088775634767, 0.01826742362976074, 0.018295520782470702, 0.018316287994384766, 0.0182609920501709, 0.018274303436279296, 0.018302047729492187, 0.018383743286132813, 0.01833782386779785, 0.01823744010925293, 0.01825382423400879, 0.018253568649291993, 0.018254079818725587, 0.020050912857055663, 0.01904665565490723, 0.01877577590942383, 0.01840947151184082, 0.01841289520263672, 0.018205343246459962, 0.018056224822998047, 0.01803539276123047, 0.018009824752807616, 0.0179836483001709, 0.017973535537719725, 0.01800124740600586, 0.018036735534667968, 0.01798838424682617, 0.018008064270019532, 0.01805232048034668, 0.018002431869506837, 0.01808332824707031, 0.018016223907470704, 0.018058048248291016, 0.018325504302978517, 0.018069503784179687, 0.01802649688720703, 0.018067455291748045, 0.018092031478881835, 0.018182144165039063, 0.018155519485473632, 0.0182108154296875, 0.0195665283203125, 0.018267263412475587, 0.01817900848388672, 0.01825564765930176, 0.018129119873046873, 0.01823744010925293, 0.01798534393310547, 0.01826416015625, 0.018155168533325196, 0.018157855987548828, 0.018122880935668946, 0.01809187126159668, 0.018155647277832032, 0.018132095336914063, 0.01811756706237793, 0.018147552490234375, 0.01820444869995117, 0.018202207565307618, 0.018227615356445313, 0.01823904037475586, 0.0181907844543457, 0.018153472900390624, 0.018343551635742188, 0.018272287368774415, 0.01823094367980957, 0.018311872482299804, 0.018300928115844727, 0.018350080490112306, 0.01846067237854004, 0.018495487213134765, 0.01833683204650879, 0.018367424011230468, 0.018282495498657226, 0.018328704833984376, 0.01823798370361328, 0.019194976806640625, 0.018704416275024414, 0.01843494415283203, 0.018249727249145507, 0.01810207939147949, 0.01808118438720703, 0.01797961616516113, 0.017997983932495118, 0.018024864196777343, 0.01824470329284668, 0.018082719802856445, 0.01804697608947754, 0.018231231689453124, 0.018101343154907225, 0.018097120285034178, 0.01817190361022949, 0.01803664016723633, 0.01806959915161133, 0.018118656158447266, 0.018094079971313477, 0.01803264045715332, 0.018084991455078126, 0.018082687377929688, 0.01823744010925293, 0.01827596855163574, 0.018207103729248046, 0.01818623924255371, 0.018226207733154295, 0.018109407424926758, 0.018171104431152343, 0.018326303482055665, 0.018145280838012694, 0.01807548713684082, 0.018100383758544922, 0.018056928634643556, 0.018112127304077148, 0.01824835205078125, 0.018120704650878908, 0.01811155128479004, 0.018160575866699218, 0.018122047424316407, 0.018115264892578125, 0.01810963249206543, 0.01811689567565918, 0.018221311569213867, 0.01827577590942383, 0.018258623123168945, 0.018294464111328124, 0.01824611282348633, 0.018186176300048828, 0.018133056640625, 0.01846451187133789, 0.018353567123413086, 0.018408287048339845, 0.01844793510437012, 0.018360767364501953, 0.01836182403564453, 0.018527776718139648, 0.018380992889404296, 0.018408256530761717, 0.018350080490112306, 0.018410816192626953, 0.018313087463378907, 0.019470464706420897, 0.01901523208618164, 0.018704832077026366, 0.02062335968017578, 0.018351903915405275, 0.018208255767822267, 0.01820537567138672, 0.018087711334228516, 0.01817011260986328, 0.01832476806640625, 0.018268735885620117, 0.018169824600219726, 0.01845471954345703, 0.018206720352172853, 0.018241535186767577, 0.018317312240600587, 0.01820582389831543, 0.018191232681274414, 0.01820057678222656, 0.01825779151916504, 0.018161792755126954, 0.018228736877441407, 0.018332160949707032, 0.018159616470336915, 0.018210432052612305, 0.01832979202270508, 0.018228607177734377, 0.018266944885253905, 0.018124351501464842, 0.018182207107543945, 0.018248064041137695, 0.01827840042114258, 0.018147327423095702, 0.01826576042175293, 0.018175872802734375, 0.01826243209838867, 0.018264127731323243, 0.018159616470336915, 0.018232416152954102, 0.018289567947387696, 0.018331615447998047, 0.018345279693603514, 0.018403711318969725, 0.018319520950317383, 0.018521791458129884, 0.018456064224243163, 0.018375680923461913, 0.018341888427734376, 0.01837401580810547, 0.018310848236083983, 0.018323999404907226, 0.018416128158569335, 0.018479007720947266, 0.018542591094970702, 0.018486751556396484, 0.01855855941772461, 0.018458719253540038, 0.01855574417114258, 0.018458303451538087, 0.018567487716674803, 0.01834716796875, 0.01833660888671875, 0.018450048446655272, 0.019408895492553712, 0.01905254364013672, 0.018589696884155273, 0.01841766357421875, 0.018388063430786132, 0.01826883125305176, 0.018311424255371092, 0.018264064788818358, 0.018297855377197265, 0.018291711807250977, 0.018176000595092775, 0.018231296539306642, 0.018192384719848635, 0.018265792846679688, 0.018349632263183594, 0.018975488662719725, 0.01824563217163086, 0.0182959041595459, 0.018356672286987306, 0.01840176010131836, 0.018222272872924803, 0.01831808090209961, 0.018214975357055664, 0.018284543991088868, 0.01830076789855957, 0.018393184661865233, 0.018222368240356446, 0.01823209571838379, 0.01825584030151367, 0.01816988754272461, 0.018259967803955078, 0.018140512466430662, 0.018146976470947266, 0.01819340705871582, 0.018075424194335936, 0.018150848388671877, 0.01821776008605957, 0.018198528289794923, 0.018229248046875, 0.01823539161682129, 0.01824358367919922, 0.01840643119812012, 0.018381919860839844, 0.018252927780151366, 0.018303712844848632, 0.01835830307006836, 0.01846067237854004, 0.018499584197998048, 0.018276159286499023, 0.01839676856994629, 0.018418272018432616, 0.01855897521972656, 0.018593599319458008, 0.018501663208007814, 0.018515647888183592, 0.018444255828857423, 0.018643455505371095, 0.018642112731933592, 0.018447168350219728, 0.018384735107421876, 0.018410816192626953, 0.018413408279418945, 0.0184901123046875, 0.02029657554626465, 0.019533632278442382, 0.018962623596191407, 0.018689504623413088, 0.018543039321899414, 0.018348127365112304, 0.018325504302978517, 0.018363967895507812, 0.018264511108398437, 0.01820159912109375, 0.0182794246673584, 0.018206016540527344, 0.018141887664794923, 0.018247007369995117, 0.01821558380126953, 0.01817724800109863, 0.018201375961303713, 0.018096128463745118, 0.018161184310913087, 0.01811020851135254, 0.018088672637939455, 0.018096063613891603, 0.018208831787109376, 0.01841971206665039, 0.01942639923095703, 0.018357152938842772, 0.018197919845581053, 0.0181847038269043, 0.018232608795166017, 0.018635583877563477, 0.01835759925842285, 0.018571680068969726, 0.018249504089355467, 0.018267871856689454, 0.018494207382202147, 0.018386528015136717, 0.018779808044433594, 0.018420480728149415, 0.01831260871887207, 0.018351871490478514, 0.01845737648010254, 0.018376319885253907, 0.018330047607421875, 0.018373695373535157, 0.01832441520690918, 0.01861631965637207, 0.018990816116333006, 0.018379039764404297, 0.018372608184814454, 0.018347583770751952, 0.018295232772827148, 0.01832111930847168, 0.018352415084838865, 0.01828236770629883, 0.01837273597717285, 0.01843404769897461, 0.01836636734008789, 0.018614368438720705, 0.01856870460510254, 0.01849395179748535, 0.01849475288391113, 0.018399967193603515, 0.018394975662231444, 0.02001456069946289, 0.019399200439453125, 0.018933536529541016, 0.018665439605712892, 0.018368543624877928, 0.01822537612915039, 0.018158815383911134, 0.018148128509521484, 0.01816192054748535, 0.018150400161743165, 0.018252159118652345, 0.018279903411865233, 0.018332351684570314, 0.018387168884277345, 0.01839891242980957, 0.01830940818786621, 0.018393119812011718, 0.018561023712158203, 0.01826201629638672, 0.01844223976135254, 0.01820876884460449, 0.018141183853149414, 0.018108287811279298, 0.018130943298339842, 0.019060863494873046, 0.018253311157226563, 0.018081663131713867, 0.01808857536315918, 0.018181888580322266, 0.018157184600830076, 0.018258432388305663, 0.018249120712280274, 0.018106592178344726, 0.019118080139160155, 0.01835372734069824, 0.018269119262695314, 0.018288639068603514, 0.01816294479370117, 0.018143999099731446, 0.0181341438293457, 0.01825267219543457, 0.018364416122436524, 0.01829033660888672, 0.018319711685180665, 0.01847279930114746, 0.01838038444519043, 0.018395263671875, 0.018411968231201174, 0.018206720352172853, 0.018206464767456056, 0.018265535354614258, 0.01829555130004883, 0.018421823501586915, 0.01836796760559082, 0.018405920028686525, 0.01834297561645508, 0.01830393600463867, 0.018485248565673826, 0.018291807174682616, 0.01829318428039551, 0.018301408767700197, 0.018431999206542968, 0.01842790412902832, 0.019533344268798828, 0.01897318458557129, 0.01865727996826172, 0.018380800247192384, 0.018247360229492186, 0.01813478469848633, 0.01809056091308594, 0.018039072036743164, 0.018177440643310547, 0.01807097625732422, 0.018834304809570313, 0.020147552490234377, 0.01827702331542969, 0.018348031997680665, 0.0184682559967041, 0.01815932846069336, 0.018144128799438476, 0.018235328674316407, 0.01825388717651367, 0.01824563217163086, 0.018151424407958985, 0.018040256500244142, 0.01806710433959961, 0.018103200912475585, 0.01822105598449707, 0.01827596855163574, 0.01814156723022461, 0.0181343994140625, 0.01813350486755371, 0.018144800186157228, 0.018174560546875, 0.018233343124389647, 0.018116607666015624, 0.018184192657470705, 0.018323392868041993, 0.018362432479858398, 0.01835212707519531, 0.018298879623413086, 0.018182144165039063, 0.018431999206542968, 0.01894723129272461, 0.01914147186279297, 0.01836851119995117, 0.01830431938171387, 0.018262815475463868, 0.018249536514282228, 0.01823321533203125, 0.01825200080871582, 0.018153472900390624, 0.0182108154296875, 0.01823744010925293, 0.018288639068603514, 0.018298784255981446, 0.018393184661865233, 0.018378400802612306, 0.01832940864562988, 0.018415136337280272, 0.018397344589233398, 0.018363231658935546, 0.01826201629638672, 0.018290687561035156, 0.018284543991088868, 0.018293792724609376, 0.02017750358581543, 0.01919308853149414, 0.018897632598876953, 0.018549983978271484, 0.018272735595703124, 0.018253536224365235, 0.01811721611022949, 0.018184032440185547, 0.01820022392272949, 0.018203136444091796, 0.018100223541259765, 0.01820364761352539, 0.018240575790405274, 0.018229183197021485, 0.018308927536010742, 0.018093568801879883, 0.018043519973754883, 0.018061376571655272, 0.01800720024108887, 0.018025344848632812, 0.018081504821777342, 0.018102527618408203, 0.01818009567260742, 0.01814851188659668, 0.01812156867980957, 0.018233343124389647, 0.018110464096069336, 0.018069503784179687, 0.018046464920043945, 0.018092544555664062, 0.018114559173583983, 0.018149375915527344, 0.018077695846557617, 0.018104320526123048, 0.01807155227661133, 0.018167808532714845, 0.01809328079223633, 0.01814159965515137, 0.0180731201171875, 0.018094560623168946, 0.018055551528930663, 0.01819161605834961, 0.018098943710327147, 0.01811625671386719, 0.018104671478271484, 0.018173215866088867, 0.018162399291992187, 0.018185792922973634, 0.018135488510131834, 0.018181568145751954, 0.018158143997192382, 0.018241535186767577, 0.018251775741577148, 0.018284543991088868, 0.018279455184936524, 0.01827939224243164, 0.018284479141235353, 0.018391008377075194, 0.018400415420532227, 0.01830121612548828, 0.0182524471282959, 0.01845583915710449, 0.018370399475097655]",tokens/s,54.56615457717421,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,870.01088,15021.768704,0.0,14619.246592,14483.4816,s,1,7.67358154296875,7.67358154296875,0.0,7.67358154296875,7.67358154296875,7.67358154296875,7.67358154296875,[7.67358154296875],,kWh,8.496743091700409e-06,9.299370975366716e-07,5.0469484820098165e-06,1.4473628671246897e-05,,MB,1309.560832,15145.500672,0.0,14730.395648,14577.604608,s,10,2.0572863464355473,0.20572863464355468,0.005640822644832718,0.20692342376708983,0.2109012680053711,0.2116706199645996,0.21228610153198244,"[0.1924845733642578, 0.20853797912597657, 0.21073030090332032, 0.20530886840820312, 0.20300706481933595, 0.20878851318359376, 0.21058546447753906, 0.20443014526367187, 0.2009734649658203, 0.21243997192382813]",tokens/s,1244.3576483339107,kWh,5.942886835666589e-06,6.549601546802756e-07,3.951358716640097e-06,1.0549205706986962e-05,tokens/kWh,24267229.88541647,MB,1365.590016,15162.277888,0.0,14745.075712,14577.607168,s,10,40.173794921875,4.0173794921875,0.0025171734521191825,4.016718383789062,4.021089379882812,4.021249890136719,4.021378298339844,"[4.01408056640625, 4.014662841796875, 4.01696240234375, 4.014686767578125, 4.016474365234375, 4.01627685546875, 4.021410400390625, 4.0210537109375, 4.019179931640625, 4.019007080078125]",tokens/s,15.681864290519371,kWh,0.00011753423446391721,1.296475796820141e-05,7.801595685715978e-05,0.00020851494928927843,tokens/kWh,302136.61041922896,,s,630,40.169983070373476,0.0637618778894818,0.0004805389926100402,0.06369876670837402,0.06414760971069336,0.06428606986999512,0.06667165161132813,"[0.06625526428222656, 0.06394070434570312, 0.06332124710083008, 0.06327939224243163, 0.06308524703979493, 0.06326844787597656, 0.06320579147338867, 0.06337503814697265, 0.06343004989624024, 0.06329964828491211, 0.06344918441772461, 0.06379753494262695, 0.06321200180053711, 0.06313779067993164, 0.06361884689331054, 0.06429827117919922, 0.06419347381591797, 0.06412902069091797, 0.06352188873291016, 0.06347459030151367, 0.06332006454467773, 0.06356991958618165, 0.06348390579223633, 0.06338748931884766, 0.06339599990844727, 0.06340812683105469, 0.06335692977905273, 0.06353100967407227, 0.06357331085205079, 0.0638713264465332, 0.06381139373779297, 0.06424188995361328, 0.06427782440185546, 0.06398006439208985, 0.0637701759338379, 0.06375414276123047, 0.06345830535888672, 0.06357353591918945, 0.06365436935424805, 0.0635676155090332, 0.06351804733276367, 0.06357686233520508, 0.06369087982177735, 0.06356512069702148, 0.06360134506225586, 0.06388889694213867, 0.06387766265869141, 0.06422083282470703, 0.06411500549316407, 0.06393062210083007, 0.0637765121459961, 0.06385993576049805, 0.06358095932006835, 0.06373785781860351, 0.06378499221801757, 0.06388246536254882, 0.06374614334106446, 0.06357376098632812, 0.06361312103271484, 0.06373187255859375, 0.06397292709350585, 0.06409113311767578, 0.0640880355834961, 0.06682585906982422, 0.06421052551269531, 0.06344988632202149, 0.06329260635375976, 0.06320790481567383, 0.06334499359130859, 0.06316393661499023, 0.06340611267089843, 0.06318044662475586, 0.0633229103088379, 0.06326271820068359, 0.06329059219360351, 0.0632696647644043, 0.06323548889160156, 0.06349801635742187, 0.064146240234375, 0.06428876495361328, 0.06396928024291992, 0.06363504028320313, 0.06349846267700196, 0.0633694076538086, 0.06349209594726563, 0.0635596809387207, 0.06358835220336914, 0.06334460830688476, 0.06338972854614258, 0.0633098258972168, 0.06347366333007813, 0.06370089721679688, 0.06383561706542969, 0.06388390350341797, 0.06409209442138672, 0.06384441757202149, 0.06370918273925781, 0.06380073547363281, 0.06393507385253906, 0.06372111892700195, 0.06371158218383789, 0.06354870223999023, 0.0635374412536621, 0.06342601776123047, 0.06369174575805664, 0.06360678482055664, 0.06374307250976563, 0.06371129608154297, 0.06392508697509766, 0.06387097549438477, 0.06438706970214844, 0.06407968139648437, 0.06397283172607422, 0.06381027221679687, 0.06379024124145508, 0.06369913482666016, 0.06391875076293946, 0.06380134582519531, 0.06374160003662109, 0.06358870315551758, 0.06364108657836914, 0.06366790390014648, 0.06389228820800781, 0.06386620712280273, 0.06398838424682617, 0.06411811065673828, 0.06720537567138672, 0.0642940444946289, 0.0633942413330078, 0.06327132797241211, 0.06322175979614258, 0.06323916625976563, 0.06339276885986328, 0.0633620491027832, 0.06333132934570312, 0.06339993667602539, 0.06325452804565429, 0.06342351913452149, 0.06494409942626952, 0.06348688125610352, 0.06376867294311524, 0.06426214599609376, 0.06423551940917968, 0.06388531112670899, 0.0633177604675293, 0.06349203109741211, 0.06343711853027344, 0.06335488128662109, 0.06343065643310547, 0.06332553482055664, 0.06334531021118164, 0.06336921691894531, 0.0635104637145996, 0.06345734405517578, 0.06332777786254883, 0.06359088134765625, 0.06390921783447266, 0.06396790313720703, 0.0638809928894043, 0.06405142211914062, 0.06368460845947266, 0.06375423812866211, 0.06345833587646485, 0.06363030242919922, 0.06356787109375, 0.06374323272705078, 0.06370489501953125, 0.06360099029541015, 0.06355414581298828, 0.06374591827392578, 0.06354889678955078, 0.06385078430175781, 0.06406527709960938, 0.06406806182861328, 0.06417424011230469, 0.06397132873535157, 0.06381481552124023, 0.06380387115478516, 0.0637336654663086, 0.06385676956176758, 0.06395529556274414, 0.06412287902832031, 0.06361872100830078, 0.06364195251464844, 0.06364755249023438, 0.0639994888305664, 0.06384505462646485, 0.06400726318359375, 0.06424463653564454, 0.06671625518798828, 0.06417443084716797, 0.06349615859985351, 0.06346960067749023, 0.06322556686401368, 0.0633961296081543, 0.06321670532226563, 0.06351148986816406, 0.06330368041992188, 0.06353919982910156, 0.06337648010253906, 0.06337206268310547, 0.06342873764038086, 0.06345651245117187, 0.06350310516357421, 0.06402457427978515, 0.06396518325805664, 0.0640184326171875, 0.06375987243652344, 0.06360729598999024, 0.06339788818359375, 0.06346518325805664, 0.06342854309082031, 0.06360303878784179, 0.06335657501220703, 0.06350844955444336, 0.06331020736694336, 0.06335283279418945, 0.06352681732177734, 0.06357318496704102, 0.06413523101806641, 0.06396195220947265, 0.06367756652832031, 0.06379100799560547, 0.06388630294799805, 0.06367798233032226, 0.06352486419677734, 0.06380361557006836, 0.0637463035583496, 0.06350233459472657, 0.06342655944824219, 0.06346752166748047, 0.0636313591003418, 0.0637723503112793, 0.06353731155395508, 0.06372576141357422, 0.06356374359130859, 0.0639365119934082, 0.06404198455810547, 0.06404812622070312, 0.0640481948852539, 0.06400300598144532, 0.06361264038085937, 0.06372771072387695, 0.06409235382080078, 0.06394854354858398, 0.06367462539672851, 0.0636948471069336, 0.0636781120300293, 0.06395119857788086, 0.06380121612548828, 0.06402470397949218, 0.06408396911621093, 0.06663139343261719, 0.06405107116699219, 0.06342489624023437, 0.06344217681884766, 0.06327321624755859, 0.06334009552001953, 0.06339065551757812, 0.06339539337158204, 0.06335049438476563, 0.06337609481811524, 0.06340607833862305, 0.06344908905029296, 0.06328729629516601, 0.06337740707397461, 0.06382377624511719, 0.06391984176635743, 0.06392243194580079, 0.06387843322753907, 0.06355235290527343, 0.06366361618041992, 0.06349465560913085, 0.06341737747192383, 0.06324732971191406, 0.06365798568725586, 0.06344294357299805, 0.06348185729980468, 0.0634593276977539, 0.06346460723876954, 0.06338761520385743, 0.06362355041503906, 0.0639820785522461, 0.06407782745361328, 0.06426624298095703, 0.0638884162902832, 0.06355996704101563, 0.06374246215820313, 0.06376793670654297, 0.06402950286865235, 0.06365571212768555, 0.06375241470336913, 0.06349008178710938, 0.06386479949951172, 0.06352028656005859, 0.0638933448791504, 0.06401292419433594, 0.06396499252319336, 0.06387116622924804, 0.06384368133544922, 0.06379948806762695, 0.06387516784667968, 0.06425437164306641, 0.06411670684814454, 0.06360063934326173, 0.06371260833740235, 0.06368848037719727, 0.06399270248413086, 0.06374809646606446, 0.06369839859008788, 0.06371353530883789, 0.06389814376831054, 0.06378470230102538, 0.06400204467773438, 0.0643741455078125, 0.06671977233886718, 0.0644136962890625, 0.06358531188964844, 0.0639161605834961, 0.06316320037841797, 0.06324745559692382, 0.06330873489379883, 0.0632910385131836, 0.06332451248168945, 0.06338281631469726, 0.0633658561706543, 0.06347161483764649, 0.06318483352661133, 0.0632912940979004, 0.06363564682006836, 0.0637929916381836, 0.06456742095947265, 0.0643828125, 0.06378307342529296, 0.0635055046081543, 0.06382479858398438, 0.06330153656005859, 0.06361468887329101, 0.06356364822387696, 0.0633000946044922, 0.06353004837036133, 0.06346233749389649, 0.06354534530639648, 0.06323574447631836, 0.06376051330566407, 0.06392649459838867, 0.0643604507446289, 0.06412230682373046, 0.06385091018676758, 0.06370640182495117, 0.06375308990478516, 0.06357132720947266, 0.06365033721923828, 0.06339184188842774, 0.06348287963867187, 0.06336000061035156, 0.06351190567016601, 0.06346614456176758, 0.06356486511230469, 0.06370195388793945, 0.06396108627319336, 0.06401952362060546, 0.06406352233886718, 0.06395177459716797, 0.06432669067382812, 0.06394553756713867, 0.063998046875, 0.0637535057067871, 0.06366902542114258, 0.06358988952636718, 0.06385868835449218, 0.0638259506225586, 0.06387532806396484, 0.06381180953979493, 0.06373068618774413, 0.06364262390136718, 0.06394265747070313, 0.0640348129272461, 0.06668809509277343, 0.06410342407226563, 0.06344294357299805, 0.06337740707397461, 0.06329548645019531, 0.06326176071166992, 0.06322476959228515, 0.06339788818359375, 0.0634815673828125, 0.06346358489990235, 0.06341439819335938, 0.0636313591003418, 0.06325190353393555, 0.06346809768676757, 0.06367232131958007, 0.06416690826416016, 0.06409523010253906, 0.06400409698486328, 0.0637599983215332, 0.06360268783569335, 0.06333865737915038, 0.06367868804931641, 0.06360249710083007, 0.06372364807128907, 0.063866943359375, 0.0635494728088379, 0.06353494262695313, 0.06357577514648438, 0.06361286544799805, 0.06394518280029297, 0.06422732543945313, 0.0643705291748047, 0.06427664184570313, 0.06387631988525391, 0.06393507385253906, 0.06396047973632812, 0.06365785598754883, 0.06384118270874023, 0.06359449768066407, 0.06367843246459962, 0.0638884162902832, 0.06391705703735352, 0.06381977462768555, 0.06370489501953125, 0.06359468841552735, 0.06388121414184571, 0.06394060897827149, 0.06466355133056641, 0.06409600067138672, 0.06394496154785156, 0.06385865783691407, 0.0637519989013672, 0.06378927993774414, 0.0639447021484375, 0.06377660751342773, 0.06397763061523437, 0.06377791976928711, 0.06382032012939454, 0.06382831954956054, 0.06416998291015626, 0.06379724884033203, 0.06425804901123047, 0.06417724609375, 0.06657609558105469, 0.064251708984375, 0.06356006240844726, 0.06348502349853516, 0.06324326324462891, 0.06335692977905273, 0.06340812683105469, 0.06341382217407227, 0.06338710403442382, 0.0636602897644043, 0.06340681457519531, 0.06342860794067383, 0.06337535858154297, 0.06344908905029296, 0.06369683074951171, 0.06437808227539063, 0.06391219329833984, 0.06380604934692383, 0.06369689559936523, 0.06361446380615235, 0.06366259384155273, 0.06366003036499024, 0.06347571182250976, 0.06363343811035156, 0.06364140701293945, 0.06369472122192382, 0.06360246276855469, 0.0639984016418457, 0.06368262481689453, 0.0637540168762207, 0.06402649688720703, 0.06391843032836914, 0.06371030426025391, 0.0640252456665039, 0.06399110412597656, 0.06384940719604493, 0.06362243270874024, 0.06365667343139648, 0.06368588638305664, 0.06401100921630859, 0.0637050895690918, 0.06367232131958007, 0.0636129264831543, 0.06373513412475586, 0.06369142532348633, 0.0639447021484375, 0.06419865417480469, 0.06433558654785156, 0.06389913558959961, 0.06417282867431641, 0.0638546257019043, 0.06394879913330079, 0.0637787857055664, 0.06394828796386719, 0.06395241546630859, 0.06410134124755859, 0.06379724884033203, 0.06371027374267578, 0.06391852951049805, 0.06410089874267579, 0.06400201416015625, 0.0641712646484375, 0.06400383758544922, 0.06680732727050781, 0.0643075180053711, 0.063498046875, 0.0634125747680664, 0.06329254531860351, 0.06334710311889648, 0.06337728118896484, 0.06342499160766601, 0.06349606323242188, 0.06357427215576172, 0.06343270492553711, 0.06335488128662109, 0.06304950332641601, 0.06343452835083008, 0.06360518264770508, 0.06450518035888672, 0.06429942321777343, 0.06390195083618164, 0.06370918273925781, 0.06347366333007813, 0.06343209457397461, 0.06395676803588868, 0.06338774490356446, 0.06335152053833008, 0.06340403366088868, 0.06347126388549805, 0.06350841522216796, 0.06359446334838867, 0.0636932487487793, 0.06377881622314453, 0.06407107543945313, 0.06393916702270508, 0.06387231826782226, 0.06384710311889648, 0.06372342300415039, 0.06385990524291993, 0.06368963241577148, 0.06368255996704102, 0.06367027282714843, 0.06352860641479492, 0.06339171218872071, 0.06380278396606445, 0.06371424102783203, 0.06366361618041992, 0.0637988166809082, 0.06390886306762696, 0.06385459136962891, 0.06432972717285156, 0.06416492462158203, 0.06428562927246094, 0.06428643035888672, 0.06407315063476562, 0.06356166458129883, 0.06405622100830079, 0.06406269073486329, 0.0639695053100586, 0.06353363037109375, 0.06356329727172852, 0.06368508911132813, 0.0638210563659668, 0.06417052459716797, 0.06428079986572266, 0.06407497406005859, 0.06696959686279297, 0.06391136169433594, 0.0633779525756836, 0.06329523086547852, 0.06324623870849609, 0.06351907348632813, 0.06351871871948242, 0.06339174270629883, 0.06329286575317383, 0.06331763076782226, 0.06325670242309571, 0.06345606231689453, 0.0633364486694336, 0.06363750457763671, 0.06398675155639648, 0.06422576141357422, 0.06389603042602539, 0.0637594223022461, 0.0634766731262207, 0.06343254470825195, 0.0637933120727539, 0.06346956634521485, 0.06337923049926758, 0.06351692962646484, 0.0635186882019043, 0.06369193649291992, 0.06353561782836914, 0.06357427215576172, 0.06354307174682618, 0.06387289428710938, 0.06426611328125, 0.06449177551269532, 0.06397955322265625, 0.06396672058105468, 0.06383081436157227, 0.06380467224121093, 0.0636396484375, 0.06397132873535157, 0.06385520172119141, 0.06387273788452148, 0.0637402572631836, 0.0637583351135254, 0.06375423812866211, 0.06375628662109376, 0.06385001754760743, 0.06420220947265624, 0.0641599349975586, 0.06412313842773437, 0.06412297821044922, 0.0641041259765625, 0.06401055908203125, 0.06368304061889649, 0.06359171295166016, 0.06362163162231445, 0.06357632064819337, 0.0635568962097168, 0.06369708633422852, 0.06366054534912109, 0.06370713424682617, 0.06380134582519531, 0.06434188842773438, 0.06376256179809571, 0.06420403289794922]",tokens/s,15.683352390174202,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 1195, in __init__ self.model = MixtralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in __init__ [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 757, in __init__ self.block_sparse_moe = MixtralSparseMoeBlock(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in __init__ self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 672, in __init__ self.w3 = nn.Linear(self.hidden_dim, self.ffn_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 100.12 MiB is free. Process 178953 has 14.64 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 49.54 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 891, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 823, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 374, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 116, in __init__ self.v_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 98.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 123345 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 3.80 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1262, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1030, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1030, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 797, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 402, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 80694 has 14.73 GiB memory in use. Of the allocated memory 12.27 GiB is allocated by PyTorch, and 2.34 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.378432,3354.329088,0.0,2959.081472,2942.567424,s,1,7.59777783203125,7.59777783203125,0.0,7.59777783203125,7.59777783203125,7.59777783203125,7.59777783203125,[7.59777783203125],,kWh,1.0297370037498391e-05,1.121980236473022e-06,4.496670263996749e-06,1.5916020537968165e-05,,MB,1163.268096,3547.267072,0.0,3139.436544,3105.830912,s,10,0.3488427925109863,0.03488427925109863,0.0011933682254219404,0.034781984329223636,0.03551321678161621,0.036841905021667475,0.037904855613708495,"[0.03817059326171875, 0.03403852844238281, 0.03426652908325195, 0.03401894378662109, 0.03521795272827148, 0.034831169128417966, 0.03476364898681641, 0.03480031967163086, 0.03368713760375976, 0.035047969818115234]",tokens/s,7338.54921173232,kWh,1.27505811024383e-06,1.406140795955646e-07,8.462379404541586e-07,2.2619101302935532e-06,tokens/kWh,113178678.75094402,MB,1196.326912,3589.210112,0.0,3181.379584,3162.0096,s,10,13.277565917968753,1.327756591796875,0.016408054098027577,1.3364856567382812,1.344683605957031,1.3461151794433592,1.3472604382324218,"[1.3368380126953125, 1.33793359375, 1.344365478515625, 1.33613330078125, 1.3231259765625, 1.30058984375, 1.3064454345703125, 1.3065584716796874, 1.338029052734375, 1.3475467529296874]",tokens/s,47.44845583085455,kWh,3.8813193528505906e-05,4.278620239660743e-06,2.502147719814582e-05,6.811329096631246e-05,tokens/kWh,924929.6151489523,,s,630,13.274697385787961,0.021070948231409464,0.00038994736390604915,0.021117759704589845,0.021469849586486817,0.021614044857025148,0.022120639095306396,"[0.021445440292358398, 0.02127846336364746, 0.02119500732421875, 0.023240703582763672, 0.021192703247070312, 0.021016576766967773, 0.02101158332824707, 0.02126265525817871, 0.02144927978515625, 0.021168127059936523, 0.021344255447387696, 0.02114905548095703, 0.02105926322937012, 0.020975711822509766, 0.020939552307128906, 0.021288671493530274, 0.02179475212097168, 0.021631391525268554, 0.021535808563232423, 0.02113801574707031, 0.020969823837280275, 0.021112255096435547, 0.02108448028564453, 0.021238016128540038, 0.021021984100341798, 0.021002559661865233, 0.021172191619873045, 0.020953439712524415, 0.020791391372680663, 0.020873376846313477, 0.02087299156188965, 0.020998144149780275, 0.021208831787109375, 0.021528736114501953, 0.0214399356842041, 0.021305215835571288, 0.02120585632324219, 0.02116399955749512, 0.021085664749145507, 0.02106985664367676, 0.021425920486450194, 0.021189088821411132, 0.021127328872680665, 0.020967456817626955, 0.020961408615112306, 0.02085273551940918, 0.020821535110473632, 0.020932863235473633, 0.020996192932128906, 0.021123327255249024, 0.021369728088378906, 0.021338560104370116, 0.021809728622436523, 0.02177142333984375, 0.02173551940917969, 0.021191423416137695, 0.021202943801879884, 0.021178112030029297, 0.02120729637145996, 0.021104639053344726, 0.02106268882751465, 0.021034175872802735, 0.02096089553833008, 0.021179424285888673, 0.02136684799194336, 0.02144758415222168, 0.02149305534362793, 0.021473983764648437, 0.02106902313232422, 0.021183135986328126, 0.02105308723449707, 0.02142220878601074, 0.02127702331542969, 0.021182464599609374, 0.021155839920043946, 0.02141798400878906, 0.021146623611450196, 0.02104729652404785, 0.02111110305786133, 0.02112553596496582, 0.020848928451538087, 0.021029951095581055, 0.02156572723388672, 0.021389984130859376, 0.0213319034576416, 0.02118796730041504, 0.021289215087890626, 0.0210948486328125, 0.02120832061767578, 0.02116275215148926, 0.021296319961547853, 0.021080896377563475, 0.021213184356689452, 0.021012224197387696, 0.02101478385925293, 0.02110054397583008, 0.021182464599609374, 0.02101862335205078, 0.021123071670532227, 0.02170172882080078, 0.021394271850585938, 0.021262399673461913, 0.021137407302856445, 0.02130454444885254, 0.021207328796386718, 0.021219839096069337, 0.02138230323791504, 0.02120585632324219, 0.02138256072998047, 0.02109644889831543, 0.021119583129882814, 0.021368671417236328, 0.021295263290405275, 0.02111692810058594, 0.021106399536132813, 0.021463199615478514, 0.021354623794555664, 0.021303295135498047, 0.021209087371826172, 0.02117571258544922, 0.021078624725341798, 0.021149696350097655, 0.02117580795288086, 0.02182809638977051, 0.021103647232055663, 0.02117875289916992, 0.02110963249206543, 0.021383007049560546, 0.0211396484375, 0.021096416473388672, 0.021251136779785157, 0.021496768951416015, 0.021287967681884765, 0.021165023803710936, 0.021156991958618164, 0.021197696685791016, 0.021365983963012695, 0.021249984741210936, 0.021094655990600585, 0.021262304306030273, 0.021434944152832033, 0.02131155204772949, 0.021184511184692383, 0.021217279434204102, 0.02126643180847168, 0.02118828773498535, 0.021045568466186524, 0.021123071670532227, 0.021474496841430664, 0.02161337661743164, 0.02187264060974121, 0.021370880126953123, 0.021131263732910157, 0.021167903900146483, 0.021584096908569335, 0.021256223678588867, 0.02131350326538086, 0.021555200576782226, 0.021567487716674806, 0.021338111877441408, 0.021514240264892577, 0.02123366355895996, 0.021151744842529296, 0.021223232269287108, 0.021230783462524414, 0.02108435249328613, 0.02133024024963379, 0.0215314884185791, 0.021751455307006836, 0.02123513603210449, 0.022677120208740235, 0.022493280410766602, 0.02135536003112793, 0.02128691291809082, 0.021217279434204102, 0.021288864135742186, 0.021229663848876954, 0.021280960083007814, 0.021273504257202147, 0.021166175842285157, 0.021211936950683595, 0.02112719917297363, 0.02111859130859375, 0.021059968948364257, 0.020928512573242186, 0.021348352432250976, 0.02146099281311035, 0.02167193603515625, 0.021257247924804688, 0.021234464645385743, 0.021421087265014647, 0.02132476806640625, 0.021207040786743164, 0.02122137641906738, 0.021103679656982424, 0.021092735290527343, 0.021264959335327148, 0.021078016281127928, 0.020862464904785157, 0.021094112396240233, 0.020934656143188478, 0.020713663101196288, 0.02063327980041504, 0.021711807250976562, 0.021617759704589845, 0.021344383239746093, 0.021137311935424806, 0.02104591941833496, 0.0213505916595459, 0.021317632675170898, 0.021964351654052736, 0.02103107261657715, 0.02153923225402832, 0.021331199645996092, 0.021450719833374023, 0.02111964797973633, 0.021041023254394532, 0.021153663635253905, 0.021084384918212892, 0.02099407958984375, 0.02088332748413086, 0.021297279357910155, 0.021346303939819337, 0.021220703125, 0.02121708869934082, 0.021093023300170897, 0.021053119659423827, 0.021240320205688477, 0.02106572723388672, 0.021048416137695314, 0.020918560028076173, 0.021196928024291992, 0.021336576461791993, 0.021102592468261717, 0.02110873603820801, 0.021157503128051758, 0.021155935287475586, 0.021007904052734373, 0.02088217544555664, 0.021341920852661133, 0.021491327285766602, 0.021435039520263672, 0.021211135864257814, 0.02112512016296387, 0.021186784744262697, 0.021044063568115234, 0.021622720718383788, 0.021212160110473634, 0.021292192459106445, 0.021223615646362305, 0.02123632049560547, 0.021653568267822266, 0.02138585662841797, 0.021348352432250976, 0.02147123146057129, 0.021354496002197267, 0.02145280075073242, 0.021336063385009766, 0.021436416625976562, 0.02143436813354492, 0.02126643180847168, 0.021192703247070312, 0.021247135162353516, 0.02115078353881836, 0.021067327499389648, 0.021122528076171876, 0.02129996871948242, 0.0211878719329834, 0.02115452766418457, 0.02101862335205078, 0.021102592468261717, 0.021164031982421876, 0.021204992294311522, 0.021137247085571288, 0.021241439819335937, 0.02109903907775879, 0.02092198371887207, 0.020760992050170898, 0.020862016677856445, 0.020826271057128906, 0.020798240661621094, 0.020998144149780275, 0.02087468719482422, 0.02091587257385254, 0.02099635124206543, 0.020820640563964845, 0.020769920349121094, 0.02072006416320801, 0.020707775115966796, 0.020707328796386718, 0.020762624740600585, 0.02097737693786621, 0.020985599517822265, 0.020823808670043947, 0.020812576293945312, 0.02088140869140625, 0.020736000061035157, 0.02105958366394043, 0.02088742446899414, 0.020717695236206056, 0.020724767684936522, 0.02059676742553711, 0.020926816940307617, 0.02075094413757324, 0.020752384185791017, 0.020864223480224608, 0.020947744369506836, 0.021319679260253906, 0.02101353645324707, 0.020753376007080077, 0.020569440841674804, 0.020750911712646484, 0.02082953643798828, 0.020912191390991212, 0.020945600509643555, 0.020919456481933593, 0.020802431106567383, 0.02071958351135254, 0.02072313690185547, 0.020562496185302735, 0.0205515193939209, 0.020530847549438475, 0.02050444793701172, 0.02077350425720215, 0.020727807998657227, 0.020592607498168946, 0.020575647354125978, 0.020572799682617188, 0.020580352783203124, 0.020647008895874022, 0.020542367935180664, 0.02045916748046875, 0.020492639541625977, 0.020590431213378908, 0.020573503494262697, 0.020677024841308594, 0.020574655532836914, 0.02047385597229004, 0.02051481628417969, 0.021423999786376952, 0.020484224319458007, 0.020592639923095703, 0.02043894386291504, 0.020627552032470704, 0.020736000061035157, 0.020653312683105468, 0.020526880264282225, 0.020646879196166992, 0.02092995262145996, 0.020655839920043946, 0.0206177921295166, 0.02049465560913086, 0.02050614356994629, 0.020902368545532228, 0.020700416564941405, 0.02054956817626953, 0.020578880310058594, 0.02075052833557129, 0.02061235237121582, 0.020673343658447266, 0.020553728103637696, 0.02060697555541992, 0.020692991256713866, 0.020727807998657227, 0.02063279914855957, 0.02056243133544922, 0.0205130558013916, 0.020463008880615235, 0.020629440307617188, 0.020705951690673827, 0.020516319274902342, 0.020511039733886717, 0.0204781436920166, 0.02058448028564453, 0.02067356872558594, 0.021414688110351562, 0.020586368560791015, 0.02069536018371582, 0.02114121627807617, 0.020877599716186523, 0.020788415908813477, 0.020566783905029296, 0.020602943420410157, 0.020592639923095703, 0.020527103424072265, 0.02058995246887207, 0.020757055282592772, 0.020500383377075194, 0.021074079513549806, 0.022122272491455076, 0.02099404716491699, 0.020866847991943358, 0.0208470401763916, 0.02071673583984375, 0.020874048233032228, 0.020932607650756836, 0.020723072052001953, 0.020683391571044922, 0.020619264602661135, 0.021149280548095704, 0.02116383934020996, 0.020619871139526368, 0.020707328796386718, 0.020547584533691408, 0.020711423873901368, 0.020536447525024416, 0.02109324836730957, 0.020639167785644532, 0.02060076713562012, 0.020574047088623048, 0.020548223495483398, 0.02056947135925293, 0.02063567924499512, 0.020515424728393555, 0.020586496353149415, 0.02053036880493164, 0.020601823806762697, 0.020588544845581053, 0.02065932846069336, 0.020699520111083985, 0.020656576156616212, 0.020621055603027343, 0.02063759994506836, 0.02066912078857422, 0.020679967880249023, 0.02057164764404297, 0.02054390335083008, 0.020552223205566406, 0.020550975799560545, 0.020617919921875, 0.020574207305908202, 0.0205963191986084, 0.020729503631591796, 0.020886272430419923, 0.020844127655029295, 0.020903615951538085, 0.020976127624511717, 0.020999679565429686, 0.020793952941894532, 0.0205927677154541, 0.02053046417236328, 0.02078713607788086, 0.02079545593261719, 0.020638879776000978, 0.020562335968017577, 0.0204968318939209, 0.020493375778198243, 0.020574304580688478, 0.020490751266479493, 0.020548959732055665, 0.020580543518066406, 0.020521696090698243, 0.020641056060791016, 0.020594560623168945, 0.02058950424194336, 0.020614559173583985, 0.02038435173034668, 0.02065203285217285, 0.02049967956542969, 0.02053753662109375, 0.020516576766967772, 0.020607200622558594, 0.021666048049926757, 0.02067292785644531, 0.02049804878234863, 0.020607263565063476, 0.020562015533447265, 0.020594688415527345, 0.02305574417114258, 0.021284479141235352, 0.020751359939575196, 0.020789247512817383, 0.02068889617919922, 0.02061884880065918, 0.020610784530639647, 0.02059436798095703, 0.02063052749633789, 0.02068182373046875, 0.02062393569946289, 0.02066876792907715, 0.020573408126831054, 0.020601535797119142, 0.02051900863647461, 0.0206561279296875, 0.02062303924560547, 0.020816192626953126, 0.021222623825073242, 0.02064259147644043, 0.020923391342163086, 0.021956960678100587, 0.021058208465576173, 0.020938751220703124, 0.020699136734008788, 0.020631103515625, 0.020740543365478516, 0.020968767166137697, 0.02064886474609375, 0.020590368270874022, 0.020702943801879883, 0.020609312057495117, 0.020641792297363282, 0.020584447860717774, 0.020641792297363282, 0.02059878349304199, 0.020643775939941406, 0.02055379295349121, 0.020573919296264648, 0.022116640090942382, 0.02077440071105957, 0.020676704406738283, 0.02059679985046387, 0.020631807327270508, 0.020523103713989257, 0.020547903060913086, 0.020552543640136717, 0.020781919479370116, 0.021319360733032228, 0.02127440071105957, 0.02079497528076172, 0.020624095916748048, 0.020623199462890623, 0.02060326385498047, 0.020566015243530272, 0.020602848052978514, 0.020770847320556642, 0.02072719955444336, 0.020900447845458983, 0.021436416625976562, 0.021220800399780273, 0.021301824569702147, 0.021370880126953123, 0.021463359832763672, 0.022995840072631835, 0.02266828727722168, 0.021261632919311522, 0.021561952590942384, 0.021737375259399415, 0.021401248931884765, 0.02136899185180664, 0.021493919372558595, 0.021702688217163087, 0.021547040939331054, 0.02132905578613281, 0.02117510414123535, 0.021421663284301756, 0.02144118309020996, 0.02173516845703125, 0.021614591598510743, 0.02132921600341797, 0.0214649600982666, 0.021336799621582032, 0.02150614356994629, 0.02145859146118164, 0.021423967361450195, 0.02159872055053711, 0.02167724800109863, 0.021338943481445313, 0.021211135864257814, 0.021292512893676757, 0.021432863235473634, 0.021393407821655275, 0.021398815155029297, 0.02126425552368164, 0.021445472717285155, 0.02123776054382324, 0.02138924789428711, 0.021485631942749023, 0.02138096046447754, 0.021364896774291993, 0.021419103622436524, 0.021246047973632814, 0.02134099197387695, 0.021809152603149414, 0.02146054458618164, 0.021373056411743165, 0.0212457275390625, 0.021385759353637696, 0.021511743545532227, 0.02127302360534668, 0.021307071685791015, 0.02141983985900879, 0.021469696044921875, 0.021266111373901365, 0.021385536193847657, 0.021433408737182618, 0.021534751892089844, 0.02166671943664551, 0.02140332794189453, 0.021264543533325197, 0.021381248474121095, 0.02139753532409668, 0.0212807674407959, 0.021340160369873046, 0.021300928115844726, 0.021461343765258788, 0.02139132881164551, 0.021485183715820314, 0.02143680000305176, 0.02128281593322754, 0.021356544494628905, 0.021147647857666017, 0.021593984603881837, 0.021333536148071288, 0.021547615051269533, 0.02127020835876465, 0.02130758476257324, 0.02129913520812988, 0.021469375610351563, 0.021209087371826172, 0.02138038444519043, 0.021467039108276367, 0.02131769561767578, 0.021439231872558594, 0.021608448028564452, 0.021415935516357423, 0.021550752639770507, 0.021209056854248048, 0.02158355140686035, 0.021309823989868165, 0.021288320541381835, 0.021318368911743164, 0.02134448051452637, 0.02135856056213379, 0.021313568115234376, 0.021553375244140624, 0.02132086372375488, 0.02134489631652832, 0.021372928619384765, 0.021188608169555666, 0.021279775619506835]",tokens/s,47.458708977764346,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.153152,3354.329088,0.0,2959.081472,2942.567424,s,1,7.5465380859375,7.5465380859375,0.0,7.5465380859375,7.5465380859375,7.5465380859375,7.5465380859375,[7.5465380859375],,kWh,1.0555499091666812e-05,1.1571333836274744e-06,4.908337259995621e-06,1.6620969735289905e-05,,MB,1184.321536,3547.267072,0.0,3139.436544,3105.830912,s,10,0.35119199752807617,0.03511919975280762,0.0013828462358450484,0.03462977600097656,0.03707234077453613,0.037395865821838375,0.037654685859680175,"[0.037719390869140626, 0.03700044631958008, 0.03378755187988281, 0.03475555038452149, 0.03391804885864258, 0.033634750366210935, 0.03446249771118164, 0.034675006866455076, 0.034584545135498045, 0.03665420913696289]",tokens/s,7289.459947888875,kWh,1.2789389064228766e-06,1.4104498809024023e-07,8.523442044366632e-07,2.27232809894978e-06,tokens/kWh,112659787.16643849,MB,1217.888256,3589.210112,0.0,3181.379584,3162.0096,s,10,13.4394306640625,1.3439430664062502,0.00897286271096474,1.3480936889648438,1.3514184814453125,1.3525001953125,1.35336556640625,"[1.3511781005859376, 1.3503336181640626, 1.346485595703125, 1.35000439453125, 1.324740966796875, 1.3322947998046875, 1.3377889404296874, 1.3497017822265625, 1.3535819091796875, 1.343320556640625]",tokens/s,46.87698577028576,kWh,3.866066860732683e-05,4.263842917494485e-06,2.491350974156387e-05,6.783802126638519e-05,tokens/kWh,928682.7478739787,,s,630,13.436337451934827,0.021327519764975898,0.00045949658176490496,0.02128881549835205,0.02156562919616699,0.021789492511749265,0.02252170030593872,"[0.025484832763671875, 0.02147990417480469, 0.021353759765625, 0.021735328674316406, 0.021275455474853516, 0.02188467216491699, 0.021528831481933595, 0.021968896865844727, 0.02162892723083496, 0.021597440719604493, 0.020981536865234376, 0.02120137596130371, 0.02117683219909668, 0.020975360870361327, 0.021235456466674806, 0.021336576461791993, 0.021432319641113282, 0.021293279647827148, 0.021233728408813476, 0.021144575119018554, 0.02168191909790039, 0.022072288513183595, 0.021098495483398438, 0.021356544494628905, 0.021727231979370116, 0.021579776763916016, 0.021536415100097656, 0.021212959289550783, 0.021146175384521484, 0.021219551086425783, 0.02208745574951172, 0.02122956848144531, 0.02129715156555176, 0.021393760681152344, 0.02120035171508789, 0.021419872283935548, 0.02137481689453125, 0.021351167678833008, 0.02127027130126953, 0.02118560028076172, 0.02111788749694824, 0.021141504287719725, 0.02103500747680664, 0.021503904342651366, 0.022295808792114256, 0.02170128059387207, 0.02116559982299805, 0.021156511306762694, 0.021403200149536134, 0.021295520782470705, 0.02132988739013672, 0.02112828826904297, 0.021343008041381836, 0.021262432098388673, 0.02148467254638672, 0.021503040313720703, 0.02147292709350586, 0.02125644874572754, 0.021250175476074218, 0.02117955207824707, 0.02097545623779297, 0.02098636817932129, 0.021465311050415038, 0.021350112915039063, 0.021180767059326172, 0.021174272537231444, 0.022502527236938477, 0.021385408401489257, 0.021258943557739256, 0.021153791427612305, 0.021301504135131835, 0.021169919967651368, 0.021202943801879884, 0.021227519989013673, 0.021001247406005858, 0.021526655197143554, 0.021387935638427735, 0.02149836730957031, 0.021564960479736328, 0.021033119201660157, 0.02125004768371582, 0.021086208343505858, 0.02129305648803711, 0.021259584426879884, 0.021373632431030274, 0.02117148780822754, 0.021404287338256837, 0.021133407592773438, 0.021807104110717773, 0.027393056869506837, 0.021236703872680663, 0.02124185562133789, 0.021465087890625, 0.021356544494628905, 0.021358591079711914, 0.021346303939819337, 0.021223424911499023, 0.021172224044799806, 0.021239295959472656, 0.02128771209716797, 0.02181011199951172, 0.021377824783325197, 0.021215263366699218, 0.021365760803222656, 0.02114364814758301, 0.02119465637207031, 0.021420160293579103, 0.021414688110351562, 0.02130073547363281, 0.021100191116333007, 0.02103798484802246, 0.02104934310913086, 0.021387264251708983, 0.022521631240844726, 0.021280704498291017, 0.02119708824157715, 0.021329919815063478, 0.02126028823852539, 0.021352479934692383, 0.021065376281738282, 0.021118911743164062, 0.021660032272338866, 0.02143539237976074, 0.021240192413330077, 0.021571647644042968, 0.021176607131958007, 0.021271551132202148, 0.02129180717468262, 0.02112735939025879, 0.021313535690307618, 0.021171680450439455, 0.021158432006835936, 0.021149696350097655, 0.0212108154296875, 0.021391679763793945, 0.021616159439086916, 0.021493728637695313, 0.021182975769042968, 0.021037376403808594, 0.021093503952026367, 0.02125881576538086, 0.021180416107177736, 0.021427711486816405, 0.021248512268066407, 0.021382783889770506, 0.0212392635345459, 0.02144963264465332, 0.021194911956787108, 0.021143392562866212, 0.021522432327270507, 0.02131702423095703, 0.021534303665161132, 0.021300575256347657, 0.021288192749023438, 0.021221792221069336, 0.021159040451049806, 0.02122617530822754, 0.021413408279418945, 0.022235008239746095, 0.02174457550048828, 0.021364223480224608, 0.021289312362670898, 0.021411968231201173, 0.022521728515625, 0.021401599884033205, 0.021296319961547853, 0.02138217544555664, 0.021684160232543947, 0.02135264015197754, 0.021427871704101563, 0.021486879348754883, 0.021301599502563478, 0.021461343765258788, 0.021286943435668945, 0.02123695945739746, 0.021474143981933595, 0.02146268844604492, 0.021434080123901366, 0.021563968658447265, 0.02125971221923828, 0.02124038314819336, 0.02115782356262207, 0.021493824005126953, 0.021478975296020508, 0.021491743087768553, 0.02124355125427246, 0.02125708770751953, 0.021444255828857423, 0.021243360519409178, 0.021372991561889647, 0.021696672439575196, 0.021475488662719727, 0.02132918357849121, 0.021269216537475585, 0.021438304901123046, 0.021353952407836913, 0.02124991989135742, 0.021574464797973633, 0.021442880630493166, 0.021492927551269532, 0.021164287567138673, 0.021096704483032226, 0.021342496871948242, 0.021960416793823243, 0.022095296859741213, 0.021350976943969726, 0.021279104232788087, 0.021372255325317384, 0.021215360641479494, 0.0212010555267334, 0.020927743911743166, 0.021086208343505858, 0.02243168067932129, 0.024996671676635742, 0.02149772834777832, 0.021477504730224608, 0.021243423461914063, 0.02118489646911621, 0.02129007911682129, 0.021154815673828126, 0.02124575996398926, 0.021246143341064453, 0.021217376708984374, 0.02122742462158203, 0.02125619125366211, 0.021130271911621094, 0.021259040832519532, 0.021081279754638672, 0.020943616867065428, 0.02140595245361328, 0.021392576217651366, 0.021216064453125, 0.021114879608154297, 0.021207136154174806, 0.021168031692504884, 0.021233152389526368, 0.021552928924560545, 0.0212891845703125, 0.021281055450439453, 0.021358367919921874, 0.021203392028808592, 0.021184511184692383, 0.021207040786743164, 0.021108896255493163, 0.021249151229858397, 0.021680864334106445, 0.021292160034179688, 0.021727615356445313, 0.023144128799438477, 0.02140652847290039, 0.02125555229187012, 0.021351039886474608, 0.02146268844604492, 0.021379648208618166, 0.02125619125366211, 0.02111724853515625, 0.021103424072265627, 0.02107436752319336, 0.021096895217895508, 0.021220544815063476, 0.021479583740234374, 0.02125686454772949, 0.02109644889831543, 0.021125152587890626, 0.021059551239013673, 0.021129215240478515, 0.021130239486694336, 0.021334943771362306, 0.02104528045654297, 0.021063039779663086, 0.020996799468994142, 0.020996095657348633, 0.021231103897094726, 0.020893440246582032, 0.020781280517578125, 0.021151519775390624, 0.021311616897583006, 0.020980352401733397, 0.020711423873901368, 0.0204902400970459, 0.02066815948486328, 0.02150601577758789, 0.021673824310302733, 0.0210948486328125, 0.02098771286010742, 0.02088159942626953, 0.02082745552062988, 0.021103071212768554, 0.020856224060058593, 0.02084681510925293, 0.021043424606323243, 0.02115519905090332, 0.02082697677612305, 0.020680864334106444, 0.020709375381469726, 0.020779008865356444, 0.020967424392700194, 0.020954944610595702, 0.020994239807128907, 0.020934656143188478, 0.021114879608154297, 0.02090937614440918, 0.020779712677001953, 0.020747711181640625, 0.020931135177612303, 0.02091007995605469, 0.020915456771850586, 0.020951808929443358, 0.020773887634277344, 0.020715551376342775, 0.020848608016967772, 0.020935775756835938, 0.0212042236328125, 0.02102899169921875, 0.021209632873535156, 0.021141504287719725, 0.021004287719726563, 0.02110643196105957, 0.021180416107177736, 0.020915935516357422, 0.020816415786743165, 0.020875423431396485, 0.020854623794555664, 0.021098495483398438, 0.021014528274536134, 0.020785152435302736, 0.020653055191040038, 0.02087424087524414, 0.02104319953918457, 0.020917823791503906, 0.02381430435180664, 0.02199283218383789, 0.021167007446289063, 0.0211844482421875, 0.021076032638549805, 0.020888927459716797, 0.02103932762145996, 0.020947391510009766, 0.021123071670532227, 0.020875263214111327, 0.020959232330322267, 0.021251583099365236, 0.021089887619018553, 0.02106883239746094, 0.021215103149414063, 0.021171455383300782, 0.02113817596435547, 0.021303295135498047, 0.021056640625, 0.021151744842529296, 0.021128063201904298, 0.021137407302856445, 0.021041280746459962, 0.02108403205871582, 0.021168127059936523, 0.021198368072509764, 0.021061279296875, 0.021111007690429687, 0.02100822448730469, 0.021144128799438475, 0.02135264015197754, 0.021245599746704102, 0.02160470390319824, 0.021250207901000975, 0.02115990447998047, 0.021174079895019533, 0.021094463348388673, 0.020938880920410158, 0.020995967864990233, 0.021078079223632813, 0.021150976181030273, 0.021535423278808592, 0.021118431091308593, 0.021120960235595704, 0.02106368064880371, 0.02113539123535156, 0.021119552612304686, 0.02101043128967285, 0.021078655242919922, 0.021012224197387696, 0.021068063735961914, 0.020960607528686524, 0.021207391738891603, 0.02115001678466797, 0.02103500747680664, 0.021021728515625, 0.02104412841796875, 0.021184608459472655, 0.021085792541503907, 0.021053247451782227, 0.021000768661499022, 0.020944896697998046, 0.021090303421020508, 0.020946592330932618, 0.020748640060424806, 0.020672704696655272, 0.020896799087524415, 0.020832895278930664, 0.020897951126098633, 0.02079539108276367, 0.020709632873535156, 0.02071731185913086, 0.02093257522583008, 0.020831935882568358, 0.020732255935668947, 0.020905248641967772, 0.021353183746337892, 0.02127984046936035, 0.02163599967956543, 0.021307392120361326, 0.021403039932250977, 0.02133452796936035, 0.02126857566833496, 0.021310560226440428, 0.021337152481079102, 0.02130518341064453, 0.021485376358032226, 0.021440704345703124, 0.02203228759765625, 0.022298976898193358, 0.02212620735168457, 0.02140787124633789, 0.021329919815063478, 0.02137615966796875, 0.021537696838378906, 0.021384767532348633, 0.021494144439697264, 0.021445791244506837, 0.02131622314453125, 0.021353887557983398, 0.02131974411010742, 0.021448448181152345, 0.021441535949707033, 0.021452863693237303, 0.021415552139282226, 0.0213703670501709, 0.021306175231933594, 0.021349536895751954, 0.021469343185424806, 0.021350080490112305, 0.021399744033813478, 0.021516223907470704, 0.021407615661621093, 0.021469375610351563, 0.02136479949951172, 0.021417919158935546, 0.021399551391601563, 0.021366783142089844, 0.021419136047363282, 0.0212488956451416, 0.021303295135498047, 0.021518047332763673, 0.021420032501220702, 0.021899744033813475, 0.02123347282409668, 0.021344255447387696, 0.021286720275878905, 0.021428415298461914, 0.02128895950317383, 0.02143619155883789, 0.021327232360839842, 0.02189731216430664, 0.021391424179077148, 0.021418527603149416, 0.021340320587158203, 0.02155673599243164, 0.02152931213378906, 0.021307167053222657, 0.021301248550415038, 0.02150809669494629, 0.021329439163208008, 0.021453279495239258, 0.021379072189331053, 0.021465087890625, 0.021364736557006835, 0.02141798400878906, 0.02140563201904297, 0.02131059265136719, 0.021433280944824218, 0.021427967071533202, 0.02122572708129883, 0.021431520462036134, 0.021362720489501955, 0.021348127365112303, 0.02151148796081543, 0.021347999572753906, 0.021342208862304687, 0.021361791610717773, 0.02144879913330078, 0.021672735214233397, 0.02126438331604004, 0.02143951988220215, 0.021262367248535155, 0.02124812889099121, 0.021885408401489257, 0.021545312881469728, 0.021303232192993165, 0.021235103607177733, 0.02154969596862793, 0.02138115119934082, 0.021767967224121092, 0.021264608383178712, 0.021370880126953123, 0.02147532844543457, 0.021577823638916017, 0.02126710319519043, 0.021437984466552734, 0.02164156723022461, 0.021338111877441408, 0.021344160079956053, 0.021549152374267577, 0.021660703659057617, 0.02158896064758301, 0.021397504806518555, 0.021362592697143554, 0.021389408111572264, 0.021489887237548827, 0.02146665573120117, 0.021364992141723632, 0.021227519989013673, 0.021381120681762695, 0.02125619125366211, 0.021521856307983398, 0.021424896240234376, 0.02160416030883789, 0.021456895828247072, 0.021522432327270507, 0.021553152084350585, 0.021831680297851562, 0.021384767532348633, 0.021338560104370116, 0.021425535202026367, 0.021424768447875976, 0.021420032501220702, 0.02146713638305664, 0.02143027114868164, 0.021283136367797852, 0.021311168670654298, 0.021376991271972658, 0.021125152587890626, 0.02144451141357422, 0.02154640007019043, 0.02155926322937012, 0.02156342315673828, 0.02134448051452637, 0.021342079162597657, 0.0216124153137207, 0.02437411117553711, 0.021701536178588866, 0.02130633544921875, 0.02133407974243164, 0.021476703643798827, 0.021541439056396484, 0.021331167221069335, 0.021224159240722656, 0.021302623748779295, 0.0213243522644043, 0.02122083282470703, 0.02181328010559082, 0.02165216064453125, 0.02149580764770508, 0.021630048751831055, 0.02128374481201172, 0.021303295135498047, 0.021321407318115236, 0.02129155158996582, 0.02130668830871582, 0.021362688064575194, 0.02120012855529785, 0.021385728836059572, 0.02110207939147949, 0.021240575790405274, 0.021257663726806642, 0.02102889633178711, 0.021306175231933594, 0.0222696647644043, 0.02151580810546875, 0.021314016342163088, 0.021379072189331053, 0.02126028823852539, 0.02109187126159668, 0.021354719161987303, 0.02109609603881836, 0.021299232482910158, 0.021162559509277343, 0.02127052879333496, 0.021125247955322266, 0.021180288314819336, 0.021178688049316406, 0.021083999633789062, 0.021163040161132813, 0.021089088439941405, 0.02146236801147461, 0.021338783264160156, 0.021349727630615236, 0.021275104522705077, 0.021110847473144533, 0.02122460746765137, 0.021175552368164062, 0.021288671493530274, 0.021370880126953123, 0.02125004768371582, 0.02225107192993164, 0.021290431976318358, 0.02128998374938965, 0.021207040786743164, 0.021319679260253906, 0.021325824737548828, 0.021034112930297853, 0.021149696350097655, 0.02134127998352051, 0.02156224060058594, 0.021418912887573242, 0.021336063385009766, 0.02123366355895996, 0.021237312316894533, 0.021162784576416016, 0.021115776062011718, 0.021231712341308592, 0.02129318428039551, 0.02127145576477051, 0.02251897621154785, 0.021289440155029298, 0.021272319793701172, 0.02138751983642578, 0.021206783294677733, 0.021550432205200195, 0.0216278076171875, 0.02134988784790039, 0.02116441535949707]",tokens/s,46.88777743590243,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 20971 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 718, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 176.12 MiB is free. Process 45474 has 14.57 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,1034.825728,10975.379456,0.0,10580.13184,10162.029568,s,1,7.04631689453125,7.04631689453125,0.0,7.04631689453125,7.04631689453125,7.04631689453125,7.04631689453125,[7.04631689453125],,kWh,6.928084420822718e-06,7.568928127316525e-07,3.626947345999887e-06,1.1311924579554257e-05,,MB,1421.123584,11097.014272,0.0,10689.183744,9358.065152,s,10,1.2058576965332033,0.12058576965332032,0.0057852438894190296,0.12277670288085937,0.12617377014160158,0.12655339584350586,0.12685709640502932,"[0.10859286499023438, 0.12693302154541017, 0.12593856048583985, 0.12311558532714843, 0.11274288177490234, 0.12307100677490235, 0.11689859008789062, 0.11999337768554688, 0.12608940887451173, 0.12248239898681641]",tokens/s,2122.970237168039,kWh,3.523590479668807e-06,3.885736947396416e-07,2.3340480519758895e-06,6.246212226384339e-06,tokens/kWh,40984838.6064505,MB,1454.32576,11097.014272,0.0,10689.183744,9397.6704,s,10,31.172315673828127,3.1172315673828126,0.005840258230599865,3.116716918945312,3.1216128662109375,3.1267613891601562,3.130880207519531,"[3.117585693359375, 3.111726806640625, 3.111638671875, 3.12046875, 3.110628173828125, 3.119263427734375, 3.11584814453125, 3.11538671875, 3.117859375, 3.131909912109375]",tokens/s,20.21024060554282,kWh,9.067647783991333e-05,1.000147588033289e-05,6.018214084982452e-05,0.00016086009457007072,tokens/kWh,391644.6783671209,,s,630,31.16853237915038,0.049473860919286336,0.00038854894768234087,0.0494033432006836,0.049796352767944335,0.049995362854003904,0.05130727104187012,"[0.050460128784179686, 0.04980380630493164, 0.04919206237792969, 0.0491814079284668, 0.04902108764648438, 0.04915814590454102, 0.049065216064453125, 0.04898278427124023, 0.049199134826660156, 0.04906800079345703, 0.04930323028564453, 0.049395870208740235, 0.049067230224609376, 0.04914886474609375, 0.049059486389160155, 0.049500511169433596, 0.049356353759765624, 0.04949651336669922, 0.04918236923217773, 0.050678112030029296, 0.04948188781738281, 0.049555007934570315, 0.049341854095458985, 0.04965465545654297, 0.04933145523071289, 0.049207103729248046, 0.04929983901977539, 0.04924678421020508, 0.049141216278076175, 0.049293502807617184, 0.04922608184814453, 0.049220703125, 0.04938243103027344, 0.04938915252685547, 0.04924563217163086, 0.049423198699951175, 0.04964761734008789, 0.0498661117553711, 0.04938166427612305, 0.04931964874267578, 0.05000172805786133, 0.0502089614868164, 0.049652385711669925, 0.049543136596679686, 0.04940595245361328, 0.04939977645874023, 0.04940188980102539, 0.049280094146728515, 0.04941712188720703, 0.049495136260986325, 0.049683361053466796, 0.049598464965820314, 0.049772544860839846, 0.04962076950073242, 0.050151649475097655, 0.04963670349121094, 0.04978915023803711, 0.04954294586181641, 0.049879711151123045, 0.04974147033691406, 0.04962543869018555, 0.04969244766235351, 0.04970108795166016, 0.051871742248535156, 0.050220863342285156, 0.04935286331176758, 0.049258529663085936, 0.04914575958251953, 0.04926473617553711, 0.049200191497802734, 0.0492545280456543, 0.04923376083374023, 0.04916118240356445, 0.04910492706298828, 0.04961276626586914, 0.04967833709716797, 0.049240062713623044, 0.04917555236816406, 0.04916121673583984, 0.04900233459472656, 0.049167552947998044, 0.0493864631652832, 0.04923798370361328, 0.04946102523803711, 0.049565216064453126, 0.049259231567382815, 0.049162174224853514, 0.049188926696777345, 0.049209342956542966, 0.04911513519287109, 0.04942438507080078, 0.049235969543457034, 0.0493007698059082, 0.04923043060302734, 0.049200321197509764, 0.049197952270507814, 0.04942444610595703, 0.049121280670166016, 0.04918067169189453, 0.04917657470703125, 0.049291263580322264, 0.04926668930053711, 0.049240062713623044, 0.049237598419189454, 0.04945315170288086, 0.049699134826660156, 0.04950991821289062, 0.04923209762573242, 0.049481983184814456, 0.04921753692626953, 0.0493199348449707, 0.04914176177978516, 0.049212928771972655, 0.04925900650024414, 0.049245567321777345, 0.049344959259033205, 0.04955353546142578, 0.04959196853637695, 0.0494159049987793, 0.04930963134765625, 0.04932275390625, 0.04945305633544922, 0.04950764846801758, 0.04951520156860351, 0.04958003234863281, 0.050753280639648436, 0.05113177490234375, 0.050012256622314455, 0.04933071899414063, 0.04909657669067383, 0.04908863830566406, 0.04917619323730469, 0.04918438339233398, 0.049150718688964846, 0.04933222579956055, 0.04908662414550781, 0.04926857757568359, 0.049014785766601565, 0.049152000427246094, 0.04923116683959961, 0.04904006576538086, 0.04916428756713867, 0.04906982421875, 0.04910617446899414, 0.04924262237548828, 0.049365505218505856, 0.04941955184936524, 0.04929404830932617, 0.049329185485839845, 0.04926784133911133, 0.049173599243164064, 0.04914662551879883, 0.049108097076416016, 0.049307937622070315, 0.04944569778442383, 0.04947903823852539, 0.049183135986328126, 0.049086463928222655, 0.04905984115600586, 0.04941209411621094, 0.04915139389038086, 0.04949660873413086, 0.04916844940185547, 0.04915609741210938, 0.04939980697631836, 0.04950630569458008, 0.04943462371826172, 0.049459201812744144, 0.04952678298950195, 0.04961280059814453, 0.049434593200683594, 0.04942851257324219, 0.04936880111694336, 0.0494431037902832, 0.04960617446899414, 0.04942076873779297, 0.049465343475341796, 0.04953395080566406, 0.049509376525878904, 0.049729534149169925, 0.04971724700927734, 0.04940390396118164, 0.04931923294067383, 0.04940047836303711, 0.049554622650146485, 0.04946211242675781, 0.049516159057617186, 0.04965801620483398, 0.05038463973999024, 0.051326526641845706, 0.04979609680175781, 0.04924588775634765, 0.04942038345336914, 0.049066047668457034, 0.04917270278930664, 0.049176513671875, 0.04911513519287109, 0.049168254852294924, 0.049145984649658206, 0.049176128387451175, 0.049242462158203125, 0.04915980911254883, 0.04930403137207031, 0.04931103897094727, 0.049342334747314455, 0.04958448028564453, 0.04954102325439453, 0.049281089782714844, 0.04965836715698242, 0.04938547134399414, 0.04953497695922852, 0.04945129776000977, 0.049479358673095705, 0.04980534362792969, 0.04986646270751953, 0.04959433746337891, 0.04951513671875, 0.04950803375244141, 0.04939974212646484, 0.049240127563476566, 0.04927078247070313, 0.04930508804321289, 0.04938598251342773, 0.0496517105102539, 0.04944486236572266, 0.04923392105102539, 0.04985020828247071, 0.049551422119140626, 0.04969071960449219, 0.04947353744506836, 0.0497806396484375, 0.049500255584716796, 0.04998758316040039, 0.04956774520874024, 0.04955091094970703, 0.050573760986328126, 0.049649024963378904, 0.04947622299194336, 0.049584095001220706, 0.04924623870849609, 0.04974095916748047, 0.04933718490600586, 0.04946236801147461, 0.049623966217041016, 0.04965785598754883, 0.04947763061523437, 0.04976435089111328, 0.0494815673828125, 0.049653854370117184, 0.04990278244018555, 0.0498205451965332, 0.04941209411621094, 0.051748382568359376, 0.05027068710327148, 0.04927449417114258, 0.04914806365966797, 0.04905801773071289, 0.049110912322998045, 0.04903033447265625, 0.04923078536987305, 0.049254016876220705, 0.04920956802368164, 0.049275039672851566, 0.049145503997802736, 0.04908886337280274, 0.04905779266357422, 0.0491069450378418, 0.04900454330444336, 0.049031169891357425, 0.04912332916259766, 0.04919839859008789, 0.04915679931640625, 0.04934041595458984, 0.04946239852905274, 0.04921379089355469, 0.049164798736572264, 0.04940803146362305, 0.04926816177368164, 0.04905017471313477, 0.04913484954833985, 0.049105567932128905, 0.049172542572021485, 0.04917660903930664, 0.04921123123168945, 0.049100959777832034, 0.04922278213500977, 0.04905868911743164, 0.049285118103027346, 0.04927056121826172, 0.04916633605957031, 0.04921571350097656, 0.04936272048950195, 0.04933222579956055, 0.04961507034301758, 0.04945510482788086, 0.04987603378295898, 0.049511070251464846, 0.04947177505493164, 0.049498111724853515, 0.04925417709350586, 0.04935702514648437, 0.04951859283447266, 0.049436511993408205, 0.04968182373046875, 0.04961561584472656, 0.04945078277587891, 0.049574111938476564, 0.04947916793823242, 0.04960009765625, 0.04960255813598633, 0.0493917121887207, 0.04970927810668945, 0.04959020614624023, 0.04969036865234375, 0.049615776062011716, 0.051260128021240234, 0.05005219268798828, 0.04910787200927735, 0.0501739501953125, 0.049094432830810546, 0.049127647399902344, 0.04922345733642578, 0.04940108871459961, 0.04925040054321289, 0.04924095916748047, 0.049258495330810545, 0.04937862396240234, 0.049498817443847654, 0.049188865661621096, 0.04915785598754883, 0.049111328125, 0.04937286376953125, 0.04923337554931641, 0.04936486434936523, 0.04951897430419922, 0.04941382217407227, 0.049361824035644535, 0.049324031829833984, 0.04931379318237305, 0.049192958831787106, 0.0492786865234375, 0.049324321746826175, 0.0492564468383789, 0.04925235366821289, 0.04926668930053711, 0.04905574417114258, 0.04930559921264648, 0.049127422332763675, 0.04929724884033203, 0.049281185150146484, 0.049452064514160156, 0.04922617721557617, 0.04939193725585937, 0.04937750244140625, 0.04965776062011719, 0.04977993774414063, 0.049689247131347654, 0.04962736129760742, 0.04956159973144531, 0.04961075210571289, 0.04969043350219727, 0.04957408142089844, 0.04961248016357422, 0.049731201171875, 0.0496769905090332, 0.049565696716308595, 0.04950537490844727, 0.0497017936706543, 0.04966195297241211, 0.049551551818847656, 0.049514305114746096, 0.049559040069580076, 0.04970137786865234, 0.0510618896484375, 0.04994547271728516, 0.049838081359863284, 0.04979916763305664, 0.04973263931274414, 0.05154457473754883, 0.05004185485839844, 0.0499455680847168, 0.049858558654785154, 0.049202239990234375, 0.049363327026367185, 0.04911980819702148, 0.04913356781005859, 0.0492564468383789, 0.049375232696533204, 0.0490937614440918, 0.04926348876953125, 0.049212543487548825, 0.04910579299926758, 0.0492564468383789, 0.04936198425292969, 0.04915091323852539, 0.04916595077514648, 0.049017215728759764, 0.04930915069580078, 0.04924265670776367, 0.04973283386230469, 0.049339168548583986, 0.04939571380615235, 0.04947558212280274, 0.04934656143188477, 0.049235969543457034, 0.04951638412475586, 0.04934672164916992, 0.04937900924682617, 0.049969470977783204, 0.049798656463623046, 0.04927542495727539, 0.04926665496826172, 0.049225727081298826, 0.049642879486083986, 0.04918540954589844, 0.04941164779663086, 0.04932163238525391, 0.049576736450195315, 0.04944806289672852, 0.049417247772216795, 0.04938313674926758, 0.049500160217285157, 0.04935059356689453, 0.049411937713623046, 0.04929769515991211, 0.04938067245483398, 0.049351009368896484, 0.049379745483398435, 0.049441856384277345, 0.049344993591308596, 0.05028847885131836, 0.04973590469360351, 0.04945888137817383, 0.04949059295654297, 0.04938143920898438, 0.04968236923217773, 0.04941164779663086, 0.04960496139526367, 0.04947574234008789, 0.04941209411621094, 0.04936816024780273, 0.05148672103881836, 0.04995072174072265, 0.049196990966796875, 0.04914182281494141, 0.04938726425170899, 0.04929951858520508, 0.04919929504394531, 0.049210559844970705, 0.049164833068847655, 0.04939158248901367, 0.04911545562744141, 0.04928102493286133, 0.04923542404174805, 0.04922832107543945, 0.04924540710449219, 0.049294239044189454, 0.04909564971923828, 0.049275135040283205, 0.04920182418823242, 0.04946739196777344, 0.04960665512084961, 0.049643199920654295, 0.049543487548828126, 0.04941625595092773, 0.049417377471923825, 0.049385791778564454, 0.04913401412963867, 0.04923600006103516, 0.049332000732421874, 0.0492606086730957, 0.04920131301879883, 0.049274398803710935, 0.04923235321044922, 0.0492308464050293, 0.04920409774780273, 0.04925766372680664, 0.04923897552490234, 0.049317760467529295, 0.04947350311279297, 0.04951830291748047, 0.04947398376464844, 0.049704959869384766, 0.05000806427001953, 0.049606304168701175, 0.04924777603149414, 0.049507137298583984, 0.049501792907714844, 0.04932767868041992, 0.04958089447021485, 0.049616222381591794, 0.04981987380981445, 0.04948854446411133, 0.049471263885498044, 0.049516544342041016, 0.04952217483520508, 0.04940812683105469, 0.04937356948852539, 0.04956076812744141, 0.049498943328857424, 0.04963273620605469, 0.04987516784667969, 0.04967388916015625, 0.04976873779296875, 0.05205401611328125, 0.05026406478881836, 0.04940185546875, 0.049436351776123044, 0.049508575439453126, 0.049401790618896484, 0.04928435134887695, 0.049170398712158205, 0.04925110244750976, 0.05035404968261719, 0.04985065460205078, 0.049186817169189455, 0.04924620819091797, 0.04968790435791016, 0.04928691101074219, 0.049436832427978514, 0.049257217407226564, 0.04926259231567383, 0.049188865661621096, 0.04949606323242187, 0.04938137435913086, 0.049565696716308595, 0.04939932632446289, 0.04929334259033203, 0.04952844619750976, 0.04924208068847656, 0.04934467315673828, 0.049373886108398435, 0.049153217315673826, 0.04925084686279297, 0.04940972900390625, 0.0492652473449707, 0.049344512939453126, 0.049620990753173826, 0.04926873779296875, 0.049282974243164065, 0.04915139389038086, 0.049490623474121094, 0.04931084823608398, 0.04978982543945312, 0.04963087844848633, 0.049610431671142576, 0.04945772933959961, 0.049549407958984375, 0.04954025650024414, 0.04956963348388672, 0.049296382904052735, 0.04934604644775391, 0.04923766326904297, 0.04933241653442383, 0.04927145767211914, 0.049686527252197264, 0.04948160171508789, 0.04940732955932617, 0.04949414443969727, 0.0495513916015625, 0.049422462463378905, 0.04953107070922851, 0.04937350463867188, 0.04952259063720703, 0.0497501106262207, 0.04951244735717773, 0.04943167877197266, 0.052706592559814455, 0.05042403030395508, 0.049576446533203124, 0.04954111862182617, 0.049301502227783206, 0.04938924789428711, 0.04929977416992187, 0.0493704948425293, 0.050090625762939454, 0.04949769592285156, 0.04938742446899414, 0.049621505737304686, 0.04942233657836914, 0.04932825469970703, 0.04940278244018555, 0.04941683197021484, 0.049359199523925784, 0.049469280242919925, 0.04953104019165039, 0.049777759552001956, 0.04985948944091797, 0.04983603286743164, 0.04970905685424805, 0.04964556884765625, 0.04956108856201172, 0.049562110900878906, 0.04950960159301758, 0.04962284851074219, 0.049695713043212894, 0.049635326385498044, 0.04947148895263672, 0.04940185546875, 0.04976844787597656, 0.049917953491210934, 0.04956905746459961, 0.04984707260131836, 0.050421630859375, 0.04969881439208984, 0.04953251266479492, 0.049506782531738285, 0.04972505569458008, 0.04975040054321289, 0.049632640838623045, 0.04988582229614258, 0.04954876708984375, 0.04960720062255859, 0.04940595245361328, 0.049442817687988284, 0.04952473449707031, 0.04968835067749024, 0.049737247467041015, 0.04959507369995117, 0.049522335052490235, 0.04991827011108398, 0.04943628692626953, 0.04945961761474609, 0.04975523376464844, 0.04991244888305664, 0.051052833557128904, 0.049673633575439455, 0.04988988876342773, 0.049887233734130856, 0.04981350326538086]",tokens/s,20.212693762296833,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/xglm/modeling_xglm.py"", line 707, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.96 GiB. GPU 0 has a total capacity of 14.74 GiB of which 662.12 MiB is free. Process 133380 has 14.09 GiB memory in use. Of the allocated memory 13.97 GiB is allocated by PyTorch, and 6.66 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 270, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 68662 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 905.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 234.12 MiB is free. Process 166086 has 14.51 GiB memory in use. Of the allocated memory 14.39 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 362.12 MiB is free. Process 172185 has 14.38 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 1.78 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,887.402496,14333.902848,0.0,13931.380736,13915.964416,s,1,7.4868017578125,7.4868017578125,0.0,7.4868017578125,7.4868017578125,7.4868017578125,7.4868017578125,[7.4868017578125],,kWh,9.086246370846613e-06,9.949137140641125e-07,4.094169941987191e-06,1.4175330026897918e-05,,MB,1333.755904,14753.333248,0.0,14336.131072,14291.630592,s,10,1.883600311279297,0.18836003112792968,0.009202981699474425,0.19181752014160156,0.1949177734375,0.19536541137695312,0.19572352172851562,"[0.1740084228515625, 0.16709414672851564, 0.19219683837890625, 0.19020057678222657, 0.19053733825683594, 0.1929956512451172, 0.19449778747558594, 0.19581304931640625, 0.19143820190429686, 0.19481829833984374]",tokens/s,1359.0993719157482,kWh,5.503337347838371e-06,6.068887830490954e-07,3.6690718652965044e-06,9.779297996183972e-06,tokens/kWh,26177748.147146657,MB,1359.290368,14921.105408,0.0,14503.903232,14463.49568,s,10,40.678680664062504,4.06786806640625,0.003954821202652873,4.067390380859376,4.07419013671875,4.074587622070313,4.074905610351562,"[4.06336865234375, 4.062983642578125, 4.066162109375, 4.064328857421875, 4.065932373046875, 4.069024169921875, 4.06917529296875, 4.06861865234375, 4.074101806640625, 4.074985107421875]",tokens/s,15.487227946322562,kWh,0.00011889261990882712,1.3114187371212462e-05,7.891129975450108e-05,0.00021091810703454064,tokens/kWh,298694.1277150895,,s,630,40.675355663299534,0.06456405660841201,0.0003810027742030221,0.06454355239868165,0.06488264083862305,0.06500082359313965,0.06654322189331055,"[0.06655308532714843, 0.06457414245605468, 0.06409222412109375, 0.06403830718994141, 0.06417878723144531, 0.06408512115478515, 0.06408486175537109, 0.06398726272583008, 0.06402912139892578, 0.06405324554443359, 0.06414335632324218, 0.06436211395263672, 0.06408640289306641, 0.06413539123535156, 0.06444214630126953, 0.06457872009277343, 0.06461062622070313, 0.06453711700439453, 0.06439321899414062, 0.06425174713134765, 0.06428083038330078, 0.06421699523925781, 0.06425599670410156, 0.06429923248291015, 0.06428854370117187, 0.06430924987792969, 0.06430671691894531, 0.06486067199707031, 0.06448294067382812, 0.06446937561035156, 0.06460002899169921, 0.06451609802246094, 0.06476595306396485, 0.06456845092773437, 0.0651825942993164, 0.06451570892333984, 0.06457154846191407, 0.06454434967041016, 0.06443421173095704, 0.06451261138916016, 0.06438690948486328, 0.06444457244873047, 0.06440866851806641, 0.06438185882568359, 0.06471810913085937, 0.06463673400878907, 0.06458822631835938, 0.06447325134277344, 0.06463081359863282, 0.064610595703125, 0.06452019500732421, 0.0644853744506836, 0.06451404571533204, 0.06450784301757813, 0.06466156768798828, 0.06459910583496094, 0.0646542739868164, 0.06477581024169922, 0.06461654663085938, 0.06462834930419922, 0.06477011108398438, 0.06481775665283203, 0.06499488067626953, 0.06732387542724609, 0.06502588653564453, 0.06420467376708984, 0.06426678466796874, 0.06408780670166016, 0.06406143951416016, 0.06397484970092773, 0.06410502624511719, 0.06402835083007813, 0.0640412826538086, 0.06401052856445312, 0.06426112365722657, 0.06424198150634766, 0.06417823791503906, 0.0642911376953125, 0.06454656219482421, 0.06463673400878907, 0.06448528289794922, 0.06434259033203125, 0.0641635513305664, 0.0643156509399414, 0.06422441864013671, 0.06423846435546875, 0.06425599670410156, 0.06422322845458985, 0.06431715393066406, 0.06429519653320312, 0.06438706970214844, 0.06437478637695312, 0.06442598724365234, 0.06457958221435547, 0.06454681396484375, 0.06462258911132812, 0.06461996459960938, 0.06453715515136718, 0.06443411254882812, 0.0643891830444336, 0.06457660675048828, 0.06445126342773437, 0.06445801544189453, 0.06468704223632812, 0.06438092803955078, 0.06439321899414062, 0.06444767761230469, 0.06460205078125, 0.06470336151123048, 0.06460211181640625, 0.06485798645019532, 0.06467596435546875, 0.0644993896484375, 0.06446511840820313, 0.06455510711669922, 0.06450994873046875, 0.06452812957763672, 0.06449587249755859, 0.06472499084472656, 0.0646182098388672, 0.06461468505859375, 0.06460521697998046, 0.06456137847900391, 0.06491007995605469, 0.06473318481445313, 0.0649318389892578, 0.06667491149902344, 0.0646436767578125, 0.06419251251220703, 0.0640953598022461, 0.06406559753417969, 0.06421139526367188, 0.06411097717285157, 0.06420601654052735, 0.06407660675048828, 0.06420022583007813, 0.06407830047607421, 0.0641593246459961, 0.06422978973388672, 0.06438047790527343, 0.06433344268798828, 0.0646313247680664, 0.06460368347167969, 0.06450867462158204, 0.06434815979003906, 0.06438249969482422, 0.06431922912597657, 0.06434684753417969, 0.06429901123046874, 0.06432358551025391, 0.06434754943847656, 0.06428937530517578, 0.06431129455566406, 0.06439936065673828, 0.06447309112548828, 0.06447305297851562, 0.06453763580322265, 0.064500732421875, 0.06467584228515624, 0.06478749084472656, 0.06445494079589843, 0.06466425323486329, 0.064468994140625, 0.06453862762451172, 0.06448915100097656, 0.06445024108886718, 0.06442002868652344, 0.06453024291992188, 0.06452611541748046, 0.06444528198242187, 0.06449152374267578, 0.06461440277099609, 0.0645684814453125, 0.06456610870361328, 0.064780029296875, 0.06459008026123046, 0.0648315200805664, 0.06493590545654297, 0.06471475219726562, 0.06463632202148438, 0.0647399673461914, 0.06461753845214843, 0.06467062377929687, 0.06513043212890625, 0.06558112335205078, 0.06481005096435546, 0.06482972717285156, 0.06479119873046875, 0.06475555419921875, 0.06649552154541015, 0.06456098937988282, 0.06411468505859375, 0.064110595703125, 0.06397091293334961, 0.06404547119140624, 0.06395315170288086, 0.06397721481323242, 0.06404096221923829, 0.06412057495117188, 0.06415744018554688, 0.06409820556640625, 0.06417059326171876, 0.06433177947998046, 0.06457548522949219, 0.06454799652099609, 0.06499523162841797, 0.06458668518066406, 0.06434745788574218, 0.06434272003173828, 0.06471453094482423, 0.06424976348876953, 0.06444473266601562, 0.0644136962890625, 0.064685791015625, 0.0649505615234375, 0.06445260620117188, 0.0643846435546875, 0.06439974212646485, 0.0644131851196289, 0.0645739517211914, 0.06478028869628906, 0.06468825531005859, 0.06454259490966797, 0.06456729888916016, 0.06433382415771484, 0.06433792114257812, 0.06452223968505859, 0.06446099090576173, 0.06437206268310547, 0.06441007995605469, 0.06443587493896484, 0.06445091247558593, 0.06442598724365234, 0.06446201324462891, 0.06453536224365235, 0.06456934356689453, 0.06479779052734375, 0.06479337310791015, 0.064635009765625, 0.06469017791748047, 0.06451132965087891, 0.06458159637451172, 0.06456003570556641, 0.06457318115234376, 0.06468998718261719, 0.06459986877441407, 0.06463938903808594, 0.06463078308105469, 0.06453449249267579, 0.06485958099365234, 0.06487107086181641, 0.06490518188476563, 0.06669830322265625, 0.06477474975585938, 0.06422054290771484, 0.06423206329345703, 0.06410620880126953, 0.06409894561767578, 0.06399385452270508, 0.064, 0.06405734252929687, 0.06413516998291016, 0.064, 0.06424559783935548, 0.06434832000732423, 0.06410844421386719, 0.06454486083984375, 0.06464717102050781, 0.06465491485595704, 0.06454112243652343, 0.06437177276611328, 0.0642503662109375, 0.06433631896972657, 0.06429666900634766, 0.06428905487060547, 0.06444560241699218, 0.06433023834228516, 0.0644201889038086, 0.06436784362792969, 0.06435100555419922, 0.06446489715576172, 0.06446451568603516, 0.0645512924194336, 0.06483968353271484, 0.0646962890625, 0.06448108673095704, 0.06467769622802734, 0.06437315368652344, 0.06447923278808594, 0.06453817749023437, 0.064438720703125, 0.0645260467529297, 0.06450355529785157, 0.06445827484130859, 0.06447811126708984, 0.06460749053955078, 0.06458844757080077, 0.06461459350585938, 0.06476390075683594, 0.06485094451904297, 0.06467791748046875, 0.0645182113647461, 0.06483036804199219, 0.06456102752685547, 0.06464320373535157, 0.0646319351196289, 0.06471475219726562, 0.06463168334960938, 0.06465727996826172, 0.06478246307373046, 0.06501577758789062, 0.06478572845458984, 0.06490723419189454, 0.06493158721923828, 0.06503936004638672, 0.06663916778564453, 0.06465401458740234, 0.06429286193847657, 0.06414950561523437, 0.0640117416381836, 0.06418860626220703, 0.06404726409912109, 0.06406755065917968, 0.06470883178710937, 0.06416941070556641, 0.06402105712890625, 0.06413414764404297, 0.0642933120727539, 0.06431597137451171, 0.06463267517089843, 0.06491056060791016, 0.06470342254638672, 0.06459375762939454, 0.06450150299072266, 0.06422364807128907, 0.06418227386474609, 0.06424575805664062, 0.06444771575927734, 0.06452825927734375, 0.06431951904296875, 0.06426825714111328, 0.06435868835449218, 0.06434880065917968, 0.06439142608642578, 0.06453123474121093, 0.0645475845336914, 0.06469039916992188, 0.06463078308105469, 0.06460620880126954, 0.06455023956298828, 0.06449600219726563, 0.0645771484375, 0.06476866912841797, 0.06448242950439453, 0.06493801879882813, 0.06447395324707031, 0.06450176239013672, 0.0646075210571289, 0.06496924591064453, 0.06474323272705078, 0.06489247894287109, 0.06500003051757812, 0.06481327819824219, 0.06471475219726562, 0.06485935974121093, 0.06448786926269531, 0.06457599639892578, 0.06469798278808593, 0.06461411285400391, 0.06475827026367187, 0.06487449645996093, 0.06473503875732421, 0.06480915069580079, 0.06475775909423828, 0.0647720947265625, 0.06495231628417969, 0.06481455993652344, 0.06507756805419922, 0.06651907348632813, 0.06464729309082032, 0.06436006164550781, 0.06430060577392578, 0.06422908782958985, 0.06430976104736329, 0.0643486099243164, 0.06426419067382813, 0.06441574096679688, 0.06425299072265625, 0.06422214508056641, 0.06427852630615234, 0.06429081726074219, 0.06437625885009765, 0.06444297790527344, 0.06459369659423828, 0.0646551971435547, 0.06462908935546875, 0.06447513580322266, 0.06452428436279296, 0.06439321899414062, 0.06436454772949218, 0.06435846710205079, 0.06433990478515625, 0.06430847930908203, 0.06439398193359375, 0.0644578857421875, 0.06440227508544921, 0.06455500793457031, 0.06460364532470703, 0.0647193603515625, 0.06485782623291016, 0.06473961639404296, 0.06462368011474609, 0.06456195068359374, 0.06494547271728515, 0.06452515411376954, 0.06456524658203125, 0.06450745391845703, 0.06448159790039062, 0.06460428619384766, 0.06445033264160156, 0.06449378967285156, 0.06450969696044923, 0.06453683471679687, 0.06457548522949219, 0.06473017883300781, 0.06455305480957031, 0.06463926696777343, 0.0647008285522461, 0.06471900939941407, 0.06472029113769531, 0.06464163208007813, 0.06464717102050781, 0.06458930969238282, 0.06482176208496093, 0.06457881927490235, 0.0645701141357422, 0.06472707366943359, 0.0656977310180664, 0.06478144073486328, 0.06479110717773437, 0.06494242858886719, 0.066891357421875, 0.06488262176513672, 0.06413565063476563, 0.06425395202636719, 0.06420233917236329, 0.0640516128540039, 0.06399552154541016, 0.0641413116455078, 0.06407615661621094, 0.06436675262451172, 0.064114013671875, 0.0639738883972168, 0.0641328353881836, 0.06413340759277343, 0.06433177947998046, 0.06487593841552734, 0.06455484771728516, 0.06453119659423828, 0.06413680267333985, 0.06413558197021485, 0.06412879943847656, 0.0641824951171875, 0.06427238464355468, 0.0641895980834961, 0.064314208984375, 0.06434774780273438, 0.0643094711303711, 0.06418361663818359, 0.06437065887451172, 0.06454345703125, 0.06456953430175781, 0.0650035171508789, 0.06477823638916015, 0.06452633666992187, 0.06458265686035156, 0.06448831939697265, 0.0645898208618164, 0.06488690948486328, 0.06475161743164062, 0.06449766540527344, 0.06477203369140624, 0.06477769470214843, 0.06463568115234375, 0.0645199966430664, 0.06481049346923828, 0.06476646423339844, 0.06485807800292968, 0.06499126434326172, 0.06474060821533204, 0.0648568344116211, 0.0647083511352539, 0.06475497436523438, 0.06481404876708985, 0.06453862762451172, 0.06465968322753907, 0.06454364776611328, 0.0646556167602539, 0.06488690948486328, 0.06480754852294922, 0.06515289306640625, 0.06537216186523437, 0.06500147247314453, 0.06522672271728516, 0.06681983947753906, 0.06485004425048828, 0.06432994842529297, 0.06428633880615234, 0.06421052551269531, 0.06426035308837891, 0.06422505950927734, 0.06428313446044921, 0.06435363006591797, 0.06454108428955078, 0.06456380462646484, 0.06439859008789063, 0.06434893035888672, 0.06450521850585937, 0.0647125473022461, 0.06478451538085937, 0.064755615234375, 0.0646490249633789, 0.06458268737792969, 0.06439926147460938, 0.06443622589111328, 0.06446694183349609, 0.0645223388671875, 0.06445046234130859, 0.06454271697998047, 0.06461440277099609, 0.0644455337524414, 0.06449858856201172, 0.06446489715576172, 0.06470169830322266, 0.06472489929199218, 0.06457405090332032, 0.06475376129150391, 0.06468351745605469, 0.06464169311523438, 0.06476390075683594, 0.06472640228271484, 0.06442867279052734, 0.06453209686279297, 0.06465984344482421, 0.06466150665283203, 0.06466969299316407, 0.06459187316894531, 0.06464704132080078, 0.06477967834472656, 0.0648465576171875, 0.06476112365722657, 0.06481199645996094, 0.06489881896972656, 0.06478221130371094, 0.0648828125, 0.0646123504638672, 0.06475750732421875, 0.06475596618652343, 0.06466150665283203, 0.06465888214111327, 0.06480057525634765, 0.06472748565673828, 0.06487420654296874, 0.06497138977050781, 0.06502191925048828, 0.0650096664428711, 0.06507513427734375, 0.0663421401977539, 0.06480073547363281, 0.06415606689453125, 0.06419888305664062, 0.06413459014892578, 0.06425878143310547, 0.06411980438232422, 0.06416255950927735, 0.06421849822998046, 0.06449037170410156, 0.06438870239257813, 0.06419894409179687, 0.06424380493164063, 0.06426624298095703, 0.06456732940673827, 0.06463481903076172, 0.06470867156982422, 0.06468982696533203, 0.06464342498779296, 0.06440863800048828, 0.06420575714111328, 0.06433126068115234, 0.06481145477294922, 0.06449155426025391, 0.06437071990966797, 0.06439437103271485, 0.06468838500976562, 0.06454950714111328, 0.06439936065673828, 0.06467906951904297, 0.06466441345214843, 0.06470041656494141, 0.0649288330078125, 0.06465392303466796, 0.0646698226928711, 0.06464672088623047, 0.06483145904541016, 0.06460281372070313, 0.06496211242675781, 0.06473725128173828, 0.0646673583984375, 0.06465801239013672, 0.06458589172363281, 0.06472908782958985, 0.0645959701538086, 0.06485177612304688, 0.06509964752197266, 0.06495878601074219, 0.06495846557617188, 0.06490726470947265, 0.06495426940917969, 0.06477177429199218, 0.06477251434326171, 0.06478028869628906, 0.065240478515625, 0.06527584075927734, 0.06470928192138672, 0.0649150390625, 0.06476966094970703, 0.06490191650390625, 0.06548480224609375, 0.06508953857421874, 0.06502809906005859]",tokens/s,15.488493947415792,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 54.12 MiB is free. Process 74704 has 14.69 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 1.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 272, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 406.12 MiB is free. Process 77775 has 14.34 GiB memory in use. Of the allocated memory 14.23 GiB is allocated by PyTorch, and 1.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 711, in __init__ self.transformer = CodeGenModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 472, in __init__ self.h = nn.ModuleList([CodeGenBlock(config, layer_idx=i) for i in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 472, in self.h = nn.ModuleList([CodeGenBlock(config, layer_idx=i) for i in range(config.n_layer)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 303, in __init__ self.mlp = CodeGenMLP(inner_dim, config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/codegen/modeling_codegen.py"", line 281, in __init__ self.fc_in = nn.Linear(embed_dim, intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 252.12 MiB is free. Process 91791 has 14.49 GiB memory in use. Of the allocated memory 14.38 GiB is allocated by PyTorch, and 2.15 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,789.676032,2266.89024,0.0,1864.368128,1714.486272,s,1,7.62420654296875,7.62420654296875,0.0,7.62420654296875,7.62420654296875,7.62420654296875,7.62420654296875,[7.62420654296875],,kWh,4.522231058293376e-06,4.915489961612077e-07,2.426668607988236e-06,7.44044866244282e-06,,MB,1294.839808,2283.667456,0.0,1868.562432,1692.384256,s,10,0.2889130573272705,0.02889130573272705,0.0002853172877550688,0.0288799524307251,0.029231020736694337,0.02932885398864746,0.02940712059020996,"[0.029426687240600585, 0.02892518424987793, 0.028834720611572266, 0.029209280014038087, 0.02883251190185547, 0.02842131233215332, 0.029014144897460938, 0.028508159637451173, 0.029002559661865233, 0.028738496780395507]",tokens/s,8860.797167433393,kWh,8.953136194018247e-07,9.873656262763903e-08,5.887973353864795e-07,1.5828475174159434e-06,tokens/kWh,161733835.49789393,MB,1341.681664,2432.565248,0.0,2017.460224,1714.458624,s,10,16.605258056640626,1.6605258056640626,0.004991555275441053,1.6601292724609376,1.6665480224609375,1.6671299682617187,1.6675955249023438,"[1.658575927734375, 1.6546309814453124, 1.6587332763671876, 1.662314208984375, 1.666418701171875, 1.6615252685546875, 1.6577652587890626, 1.6659742431640625, 1.6677119140625, 1.6516082763671875]",tokens/s,37.93978978532382,kWh,4.8948415712265135e-05,5.3986435791595065e-06,2.4286833676214028e-05,7.863389296763868e-05,tokens/kWh,801181.2416043967,,s,630,16.597528404235835,0.02634528318132673,0.0003861712283290325,0.026290992736816406,0.026677369499206544,0.02684228277206421,0.027860382900238036,"[0.026074367523193358, 0.0261759033203125, 0.026428991317749024, 0.02620275115966797, 0.02638863945007324, 0.026277856826782228, 0.0264683837890625, 0.02655232048034668, 0.02636412811279297, 0.027002336502075196, 0.026495231628417968, 0.026462335586547852, 0.02641708755493164, 0.027213823318481444, 0.02705776023864746, 0.026721759796142577, 0.026472768783569335, 0.02627984046936035, 0.026309343338012697, 0.02633263969421387, 0.026195552825927733, 0.026229152679443358, 0.02599103927612305, 0.02595859146118164, 0.026638080596923828, 0.026146656036376954, 0.02633407974243164, 0.02615705680847168, 0.026798080444335938, 0.026731903076171876, 0.0267772159576416, 0.02701763153076172, 0.02667990493774414, 0.02616294479370117, 0.02615648078918457, 0.026344064712524415, 0.02636204719543457, 0.02614681625366211, 0.026404863357543946, 0.02603753662109375, 0.02626019287109375, 0.026025279998779297, 0.026004159927368164, 0.026755136489868166, 0.026184671401977538, 0.026081823348999025, 0.026118431091308594, 0.02607529640197754, 0.026382335662841795, 0.02609689521789551, 0.025912063598632813, 0.025950143814086914, 0.02584377670288086, 0.025948095321655273, 0.02580281639099121, 0.02629631996154785, 0.026044063568115235, 0.026520000457763673, 0.02598031997680664, 0.026151359558105467, 0.02618988800048828, 0.02611814308166504, 0.026085023880004884, 0.025810752868652344, 0.0260118408203125, 0.02611520004272461, 0.02601254463195801, 0.02635366439819336, 0.02613043212890625, 0.026246303558349608, 0.02610643196105957, 0.026102048873901367, 0.02600297546386719, 0.026539648056030273, 0.026606239318847657, 0.027357376098632813, 0.026451839447021484, 0.026601600646972656, 0.026849279403686522, 0.026195968627929687, 0.025898784637451173, 0.02594428825378418, 0.025872575759887696, 0.026140480041503905, 0.026089471817016603, 0.02609164810180664, 0.02601356887817383, 0.026496192932128907, 0.02634220886230469, 0.027720767974853514, 0.02709382438659668, 0.02610361671447754, 0.02615737533569336, 0.025916799545288086, 0.02580361557006836, 0.02600655937194824, 0.02636262321472168, 0.026390527725219725, 0.026066944122314452, 0.02641481590270996, 0.02625564765930176, 0.026176864624023438, 0.026129056930541992, 0.02684511947631836, 0.026060928344726564, 0.02647443199157715, 0.026415168762207033, 0.026372032165527345, 0.026236928939819337, 0.026078720092773438, 0.025975263595581055, 0.026727840423583983, 0.02617955207824707, 0.02611062431335449, 0.026281375885009766, 0.026243135452270507, 0.026388191223144532, 0.02626028823852539, 0.0260546875, 0.02613859176635742, 0.02588876724243164, 0.025839616775512695, 0.025894912719726562, 0.026310655593872072, 0.026203359603881836, 0.02592348861694336, 0.025686016082763673, 0.026089471817016603, 0.026185504913330077, 0.026232383728027345, 0.02611062431335449, 0.026078655242919923, 0.02645078468322754, 0.02628895950317383, 0.026171615600585937, 0.025975488662719728, 0.026021888732910156, 0.02591676712036133, 0.026020511627197266, 0.02608527946472168, 0.02657699203491211, 0.02617366409301758, 0.027166400909423828, 0.02716908836364746, 0.026838815689086915, 0.026164512634277343, 0.026165983200073243, 0.026208255767822267, 0.026261503219604493, 0.026335231781005858, 0.025918495178222655, 0.02593827247619629, 0.026092159271240235, 0.026038272857666016, 0.026432512283325195, 0.026128448486328126, 0.026079551696777344, 0.03001772880554199, 0.026381856918334962, 0.026304704666137695, 0.026239711761474608, 0.026306560516357422, 0.02672435188293457, 0.026177536010742186, 0.026109952926635743, 0.026583040237426758, 0.026307872772216797, 0.02602262306213379, 0.026275840759277344, 0.02610585594177246, 0.026270751953125, 0.02615177536010742, 0.026050432205200195, 0.02611840057373047, 0.026242975234985352, 0.026645727157592773, 0.026474943161010744, 0.02619340705871582, 0.026431936264038086, 0.026288639068603514, 0.026419200897216798, 0.026277664184570313, 0.026407136917114257, 0.026224224090576172, 0.02619843292236328, 0.02633344078063965, 0.026299840927124025, 0.026249120712280274, 0.0260982723236084, 0.026695680618286134, 0.026677087783813478, 0.026353439331054686, 0.026359296798706053, 0.026546911239624025, 0.026599359512329102, 0.026435808181762697, 0.02634956741333008, 0.02641878318786621, 0.02677801513671875, 0.02660966491699219, 0.02661302375793457, 0.02640870475769043, 0.026279071807861328, 0.02653289604187012, 0.026589984893798827, 0.026169471740722657, 0.026187936782836915, 0.026171104431152344, 0.026929088592529297, 0.02633497619628906, 0.02639084815979004, 0.026453760147094725, 0.026355968475341798, 0.026216447830200194, 0.026341472625732422, 0.02639366340637207, 0.026395135879516602, 0.026752447128295897, 0.026440607070922852, 0.026503135681152343, 0.0262574405670166, 0.02630860710144043, 0.026294271469116212, 0.02668880081176758, 0.02620195198059082, 0.026237695693969727, 0.02657049560546875, 0.0262741756439209, 0.026173215866088867, 0.0259935359954834, 0.025928863525390623, 0.026143487930297853, 0.026135583877563477, 0.026332128524780275, 0.02623846435546875, 0.026634752273559572, 0.026441728591918946, 0.026345439910888672, 0.026260704040527345, 0.026151199340820313, 0.02618012809753418, 0.026259456634521484, 0.026162176132202147, 0.02628915214538574, 0.026738687515258788, 0.026316608428955078, 0.026590591430664064, 0.026251935958862306, 0.02628652763366699, 0.026281696319580078, 0.02609152030944824, 0.02617728042602539, 0.025737888336181642, 0.02627174377441406, 0.026639583587646485, 0.02632374382019043, 0.026054431915283203, 0.026056575775146484, 0.026351104736328124, 0.026293088912963867, 0.0265031681060791, 0.026158912658691406, 0.026252607345581054, 0.0262685432434082, 0.026317920684814453, 0.026284351348876953, 0.02637788772583008, 0.026352575302124023, 0.0266810245513916, 0.026368576049804686, 0.029374208450317383, 0.026447872161865234, 0.026402816772460938, 0.026617536544799803, 0.026354015350341795, 0.026992607116699218, 0.026631744384765624, 0.026318496704101562, 0.026542879104614257, 0.02637558364868164, 0.026372352600097657, 0.026528095245361327, 0.026562559127807618, 0.026975296020507813, 0.02670060729980469, 0.026540159225463867, 0.02661097526550293, 0.02636841583251953, 0.026450239181518554, 0.026482688903808595, 0.02657004737854004, 0.026642112731933593, 0.026589183807373046, 0.02655948829650879, 0.02636390495300293, 0.026291648864746095, 0.026204576492309572, 0.02619545555114746, 0.0264783992767334, 0.02585481643676758, 0.025970111846923827, 0.025939680099487303, 0.026145727157592773, 0.02686966323852539, 0.026737728118896485, 0.026319807052612304, 0.026136255264282225, 0.026130752563476564, 0.026107616424560547, 0.02635772705078125, 0.026517791748046873, 0.026658847808837892, 0.026564672470092775, 0.026341312408447264, 0.02615113639831543, 0.025675775527954102, 0.026009599685668947, 0.0261693115234375, 0.026044384002685547, 0.026335296630859376, 0.026159008026123046, 0.02647065544128418, 0.026341087341308595, 0.026372224807739257, 0.02627302360534668, 0.026350336074829103, 0.026300416946411134, 0.026566144943237304, 0.026397119522094725, 0.02638051223754883, 0.02645590400695801, 0.026332895278930665, 0.026132768630981445, 0.026738176345825194, 0.026472959518432617, 0.026498271942138673, 0.026303264617919923, 0.026269439697265626, 0.02610406494140625, 0.026077184677124023, 0.026259456634521484, 0.026073087692260744, 0.02613158416748047, 0.026026592254638672, 0.026023775100708007, 0.026263999938964843, 0.026202112197875976, 0.02654742431640625, 0.026409311294555662, 0.026487232208251953, 0.026699167251586914, 0.026352224349975587, 0.02632499122619629, 0.026167295455932618, 0.02630860710144043, 0.02612358474731445, 0.026286783218383788, 0.026472448348999023, 0.02656787109375, 0.026239488601684572, 0.026882368087768553, 0.026994688034057617, 0.026129791259765625, 0.02609436798095703, 0.02609056091308594, 0.0260513916015625, 0.0260153923034668, 0.02648297691345215, 0.02627587127685547, 0.025923583984375, 0.026068992614746093, 0.026430751800537108, 0.02650192070007324, 0.026489887237548828, 0.02971494483947754, 0.026465791702270508, 0.02633526420593262, 0.02654300880432129, 0.02579692840576172, 0.02646883201599121, 0.02641084861755371, 0.02621660804748535, 0.025878559112548827, 0.026015680313110353, 0.026052736282348634, 0.026122112274169922, 0.027486015319824218, 0.02656889533996582, 0.026240320205688478, 0.02605945587158203, 0.0268121280670166, 0.0263110408782959, 0.026229759216308594, 0.0263156795501709, 0.02626790428161621, 0.02617955207824707, 0.025969919204711915, 0.026405536651611328, 0.026049535751342775, 0.026077856063842775, 0.02619808006286621, 0.026007808685302735, 0.026031040191650392, 0.026192863464355468, 0.02654412841796875, 0.02662396812438965, 0.02621036720275879, 0.026531808853149413, 0.02638252830505371, 0.02640070343017578, 0.026507104873657227, 0.026263168334960937, 0.026273536682128906, 0.02625916862487793, 0.026162303924560548, 0.026078399658203126, 0.026308767318725584, 0.026297952651977537, 0.02635683250427246, 0.026079008102416992, 0.02608332824707031, 0.02616048049926758, 0.026296991348266602, 0.026236703872680664, 0.026443328857421875, 0.026816192626953124, 0.026751968383789064, 0.02628927993774414, 0.026297216415405274, 0.026212352752685547, 0.026342496871948243, 0.02604470443725586, 0.02638502311706543, 0.026355712890625, 0.027465728759765624, 0.026514720916748048, 0.025965280532836914, 0.026195968627929687, 0.026241024017333983, 0.026219871520996092, 0.026077856063842775, 0.026088224411010743, 0.026466400146484374, 0.02611507225036621, 0.025963520050048827, 0.026119712829589845, 0.026575328826904297, 0.02634239959716797, 0.02627395248413086, 0.02658390426635742, 0.026285343170166016, 0.02670467185974121, 0.026353599548339844, 0.026384384155273437, 0.026332767486572265, 0.02642780876159668, 0.02634982490539551, 0.026732288360595702, 0.0261911678314209, 0.026015775680541992, 0.026191743850708007, 0.02674358367919922, 0.02799785614013672, 0.02674518394470215, 0.026557504653930666, 0.02670867156982422, 0.02624947166442871, 0.026207616806030273, 0.026141183853149414, 0.026368127822875977, 0.027971807479858397, 0.026459936141967774, 0.02651136016845703, 0.026691328048706053, 0.0270645751953125, 0.026364160537719727, 0.02641641616821289, 0.02639468765258789, 0.02654636764526367, 0.027441471099853516, 0.026836671829223634, 0.02651087951660156, 0.02633337593078613, 0.026171903610229492, 0.026449920654296875, 0.02621785545349121, 0.026330783843994142, 0.026405759811401367, 0.02613462448120117, 0.02630201530456543, 0.026431295394897462, 0.02631318473815918, 0.02642492866516113, 0.026288703918457033, 0.026239072799682617, 0.026588640213012694, 0.026093215942382814, 0.026068864822387697, 0.025993919372558592, 0.026150720596313477, 0.026214719772338867, 0.026280031204223633, 0.02613871955871582, 0.026216384887695312, 0.025799455642700194, 0.026247007369995117, 0.026363679885864258, 0.02622480010986328, 0.026325216293334962, 0.02619171142578125, 0.02629033660888672, 0.026417152404785156, 0.026424383163452147, 0.02643040084838867, 0.026447872161865234, 0.027254079818725584, 0.02814022445678711, 0.026419200897216798, 0.02650435256958008, 0.02649718475341797, 0.02626425552368164, 0.026580799102783204, 0.026129663467407225, 0.026345504760742187, 0.026360000610351562, 0.026276704788208007, 0.02624038314819336, 0.026288320541381836, 0.026630304336547853, 0.02785708808898926, 0.027457408905029297, 0.026411104202270507, 0.026419200897216798, 0.026289695739746093, 0.02629596710205078, 0.026339744567871092, 0.02618204879760742, 0.026385536193847658, 0.026282880783081056, 0.026572799682617186, 0.02655571174621582, 0.026591936111450196, 0.02646246337890625, 0.026378015518188476, 0.026365055084228515, 0.026359840393066405, 0.026333728790283204, 0.026290239334106444, 0.02614463996887207, 0.026198368072509765, 0.02643155288696289, 0.026447616577148437, 0.026346912384033205, 0.02786172866821289, 0.026253376007080078, 0.026255359649658205, 0.02659721565246582, 0.02649513626098633, 0.026185888290405274, 0.02613657569885254, 0.026207647323608398, 0.0265097599029541, 0.026122432708740234, 0.02691436767578125, 0.026356000900268555, 0.026365919113159178, 0.02619001579284668, 0.025765888214111327, 0.026272096633911134, 0.026626047134399415, 0.0261397762298584, 0.026045312881469728, 0.026406656265258788, 0.02605014419555664, 0.02624380874633789, 0.02623276710510254, 0.02610688018798828, 0.026184703826904295, 0.026123455047607422, 0.026302879333496093, 0.02646623992919922, 0.026241695404052735, 0.026414848327636718, 0.02638047981262207, 0.025873695373535156, 0.02590166473388672, 0.025742464065551758, 0.025913312911987306, 0.025783199310302735, 0.025785823822021485, 0.026100255966186522, 0.026214271545410155, 0.026226816177368165, 0.026232831954956053, 0.026183679580688478, 0.026052608489990234, 0.026345279693603514, 0.026043935775756834, 0.025866880416870117, 0.026365983963012696, 0.026081247329711912, 0.026111967086791994, 0.026689632415771485, 0.026366016387939454, 0.026784896850585938, 0.026366432189941405, 0.026355167388916016, 0.02638649559020996, 0.026032159805297852, 0.0258953914642334, 0.025942304611206054, 0.02634752082824707, 0.026238624572753905, 0.02641107177734375, 0.026362239837646486, 0.026365856170654296, 0.026191423416137696, 0.02600595283508301, 0.026271968841552733, 0.026269279479980468, 0.026313152313232422, 0.026474239349365235, 0.02632080078125, 0.026228832244873046, 0.026036031723022462, 0.02613999938964844, 0.026094303131103516, 0.02611827278137207, 0.02650931167602539, 0.02651260757446289]",tokens/s,37.95745876471694,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 344.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 66.12 MiB is free. Process 158491 has 14.67 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 1.71 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 354, in __init__ self.v_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 155636 has 14.74 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 90.39 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 353, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 163200 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,899.100672,15848.046592,0.0,15445.52448,15444.76416,s,1,7.54277099609375,7.54277099609375,0.0,7.54277099609375,7.54277099609375,7.54277099609375,7.54277099609375,[7.54277099609375],,kWh,9.107799516666167e-06,9.971046043738356e-07,4.469170241997583e-06,1.4574074363037586e-05,,MB,1382.52288,16053.567488,0.0,15638.462464,15613.071872,s,10,1.983529525756836,0.19835295257568358,0.006506929766748527,0.20153475189208986,0.20199362640380858,0.20255346908569336,0.20300134323120117,"[0.18991168212890625, 0.2013936309814453, 0.20311331176757813, 0.18199189758300782, 0.20182476806640626, 0.1996480255126953, 0.2018692169189453, 0.20039305114746095, 0.20170806884765624, 0.20167587280273438]",tokens/s,1290.6286328272356,kWh,5.750032983905357e-06,6.34120261169761e-07,3.8073886668235417e-06,1.0191541911898659e-05,tokens/kWh,25118868.39234004,MB,1420.193792,16057.761792,0.0,15640.559616,15627.418112,s,10,39.59763720703125,3.959763720703125,0.0029367801713622147,3.9601295166015626,3.9630472656249998,3.9630578124999998,3.96306625,"[3.963068359375, 3.955595947265625, 3.95819873046875, 3.963044921875, 3.962364013671875, 3.956533203125, 3.955865478515625, 3.961435791015625, 3.9588232421875, 3.96270751953125]",tokens/s,15.910040205331558,kWh,0.00011585696585692773,1.2779317991568613e-05,7.692289813957666e-05,0.000205559181988073,tokens/kWh,306481.07951536507,,s,630,39.59474234390257,0.06284879737127395,0.0004802405793295531,0.06280928039550782,0.06325263214111328,0.06333857822418212,0.06551407791137695,"[0.06552355194091797, 0.06341324615478515, 0.06273638534545899, 0.06269907379150391, 0.06268563079833984, 0.0626954231262207, 0.06251110458374023, 0.06255820846557616, 0.06236102294921875, 0.06239452743530274, 0.06254633712768555, 0.0631459846496582, 0.06257459259033203, 0.06258278274536133, 0.06249871826171875, 0.06280201721191406, 0.06295337677001953, 0.06305801773071289, 0.06296166229248047, 0.06292070388793945, 0.062461952209472656, 0.06263398361206055, 0.06258246231079101, 0.06241020965576172, 0.062427391052246095, 0.06261001586914063, 0.0628655662536621, 0.0627507209777832, 0.06265840148925782, 0.06400355529785157, 0.06282092666625977, 0.06281561660766602, 0.06304419326782226, 0.0630393295288086, 0.06299459075927734, 0.06313299179077149, 0.0630340461730957, 0.06328319931030274, 0.06297520065307617, 0.06296604919433593, 0.06279759979248047, 0.06315043258666993, 0.06296524810791015, 0.06297436904907226, 0.06292732620239258, 0.0629832000732422, 0.06312239837646484, 0.06315369415283204, 0.06315055847167969, 0.06305126571655273, 0.06285977554321288, 0.06346956634521485, 0.06291254425048828, 0.06294524765014649, 0.06282156753540039, 0.06265097427368164, 0.06263372802734375, 0.06266860961914063, 0.06262566375732422, 0.06270032119750976, 0.06301855850219727, 0.06304403305053712, 0.06301494216918946, 0.06546431732177735, 0.06300012969970703, 0.062284225463867186, 0.0623185920715332, 0.06223440170288086, 0.06241097640991211, 0.06291811370849609, 0.06255670547485352, 0.062379295349121094, 0.06244220733642578, 0.06258441543579102, 0.062310817718505856, 0.06223686218261719, 0.062342975616455076, 0.06265856170654296, 0.0632501106262207, 0.06328355026245117, 0.06297801589965821, 0.06264572906494141, 0.06243577575683594, 0.06267036819458008, 0.06243174362182617, 0.062295806884765624, 0.06225443267822266, 0.06227644729614258, 0.0625024642944336, 0.062460159301757814, 0.06239433670043945, 0.06257494354248047, 0.06265036773681641, 0.06290841674804687, 0.0628974723815918, 0.06293702316284179, 0.06301366424560546, 0.06293705749511719, 0.06275686264038086, 0.06257049560546875, 0.0625621109008789, 0.06245356750488281, 0.06242124938964844, 0.062435009002685546, 0.06251359939575195, 0.06244892883300781, 0.0627364158630371, 0.0628988800048828, 0.06330758285522461, 0.06305766296386718, 0.06334646224975586, 0.0634169921875, 0.06306201553344727, 0.06295072174072265, 0.06295008087158203, 0.0632111358642578, 0.06297999954223633, 0.06285769653320313, 0.06283779144287109, 0.06275167846679687, 0.06305180740356445, 0.06301398468017579, 0.0629400634765625, 0.06325603103637695, 0.06336310577392579, 0.06320783996582031, 0.06589030456542969, 0.0628039665222168, 0.06230214309692383, 0.06225312042236328, 0.06229811096191406, 0.06252928161621094, 0.062454017639160156, 0.062296062469482424, 0.06254927825927735, 0.062328609466552734, 0.06226019287109375, 0.06238000106811523, 0.062443519592285154, 0.06251836776733398, 0.06251980972290039, 0.06316073608398437, 0.06329257583618164, 0.0632267189025879, 0.06274867248535156, 0.06260262298583984, 0.06244169616699219, 0.06237177658081055, 0.062468223571777344, 0.06245391845703125, 0.062441665649414065, 0.06301871871948242, 0.0627786865234375, 0.06258787155151367, 0.0628326416015625, 0.0630354232788086, 0.06309065628051758, 0.06322294235229492, 0.06311203384399414, 0.06285107040405273, 0.06302687835693359, 0.0628862075805664, 0.06278758239746093, 0.06280374526977539, 0.06261577606201171, 0.06265142440795898, 0.06272713470458985, 0.06254348754882813, 0.0625483512878418, 0.06280188751220703, 0.06277737426757812, 0.06294054412841797, 0.0632490234375, 0.0630231056213379, 0.06286662292480469, 0.06290054321289062, 0.06302703857421875, 0.06293980789184571, 0.06325024032592773, 0.06305811309814453, 0.06287564849853515, 0.06287311935424805, 0.06322224044799804, 0.06278348922729492, 0.06284288024902343, 0.06304102325439453, 0.06307273483276367, 0.0630354232788086, 0.06319007873535157, 0.06589663696289062, 0.06331660842895508, 0.06233891296386719, 0.06252764892578125, 0.06231039810180664, 0.0626954231262207, 0.06251264190673828, 0.062351295471191406, 0.0627976303100586, 0.06276172637939453, 0.0623963508605957, 0.0625494728088379, 0.06281049728393555, 0.062404830932617186, 0.06248857498168945, 0.06310515213012695, 0.06318272018432618, 0.06329958343505859, 0.06275446319580077, 0.06252374267578124, 0.06256768035888671, 0.06271615982055664, 0.06276966476440429, 0.06255347061157226, 0.06275545501708985, 0.06276710510253906, 0.0629392318725586, 0.06274857711791992, 0.0628223991394043, 0.06288588714599609, 0.06300662231445313, 0.0628851203918457, 0.06303830337524415, 0.06331391906738282, 0.06309036636352539, 0.06295491027832031, 0.06279225540161133, 0.0628719024658203, 0.06255820846557616, 0.06259840011596679, 0.06260403060913086, 0.06275667190551758, 0.06296390533447266, 0.06317827224731445, 0.06278601455688476, 0.06293708801269532, 0.06342233657836914, 0.06302640151977539, 0.06315305709838867, 0.06297190475463867, 0.0632578239440918, 0.06306601715087891, 0.06296012878417968, 0.06310236740112304, 0.06289302444458007, 0.06284902572631836, 0.0627589111328125, 0.06287113571166993, 0.06299280166625977, 0.06310892868041992, 0.06301504135131836, 0.06326383972167969, 0.06309539031982422, 0.06547212982177734, 0.06296012878417968, 0.06226534271240235, 0.06259100723266602, 0.0625909423828125, 0.062453536987304686, 0.06262601470947265, 0.06242009735107422, 0.0624705581665039, 0.06251363372802735, 0.062404830932617186, 0.06243920135498047, 0.06242099380493164, 0.06263808059692383, 0.06305699157714843, 0.06366915130615235, 0.06351414489746093, 0.06322745513916016, 0.06325750350952149, 0.06276710510253906, 0.06265241622924805, 0.06288793563842773, 0.06302515029907227, 0.06294732666015625, 0.06259817504882813, 0.06252851104736327, 0.06251248168945313, 0.06249049758911133, 0.06281814575195313, 0.0629337272644043, 0.06297619247436523, 0.06333142471313477, 0.06326063919067383, 0.0631872329711914, 0.06298588943481445, 0.06280083084106446, 0.06262489700317384, 0.06282336044311523, 0.0625459213256836, 0.0628326416015625, 0.06294435119628906, 0.06308099365234375, 0.06258319854736329, 0.06256374359130859, 0.06285574340820313, 0.06278768157958985, 0.06304963302612304, 0.06308454513549805, 0.06312739181518555, 0.0632578239440918, 0.06332831954956054, 0.06306867218017578, 0.0628901138305664, 0.06286771011352539, 0.06265036773681641, 0.06284188842773437, 0.0632300796508789, 0.06302758407592773, 0.06284703826904296, 0.0628351058959961, 0.06273993682861329, 0.06295721435546875, 0.06298611068725586, 0.06549088287353516, 0.06294147109985351, 0.06246134567260742, 0.06251929473876953, 0.062261631011962894, 0.06255187225341798, 0.06239443206787109, 0.062238849639892575, 0.06228582382202148, 0.062263294219970705, 0.06225420761108398, 0.062216510772705076, 0.0621778564453125, 0.0623267822265625, 0.06289161682128906, 0.06331024169921876, 0.06315827178955079, 0.06310195159912109, 0.062635009765625, 0.06253567886352539, 0.06244966506958008, 0.06242508697509765, 0.06233673477172851, 0.06231785583496094, 0.06256742477416992, 0.06259302520751953, 0.06252339172363282, 0.06257049560546875, 0.06253977584838867, 0.06270361709594727, 0.06293503952026368, 0.06296694564819336, 0.0631383056640625, 0.0633364486694336, 0.0630624008178711, 0.06293910217285156, 0.06303142547607422, 0.06301583862304687, 0.06258172988891601, 0.0625172462463379, 0.06258687973022461, 0.06272585678100585, 0.06258227157592773, 0.0628375358581543, 0.06276505661010742, 0.0631009292602539, 0.06296575927734376, 0.06303891372680664, 0.06372819137573242, 0.06337311935424805, 0.06304374313354492, 0.06318201446533203, 0.06313043212890625, 0.062895263671875, 0.06274137496948243, 0.06284211349487305, 0.06286207962036133, 0.06268678283691406, 0.06266310501098633, 0.06280601501464844, 0.06311881637573243, 0.06295196914672851, 0.06308454513549805, 0.06569782257080078, 0.0631234245300293, 0.06267401504516601, 0.062423454284667966, 0.062435840606689455, 0.062440799713134765, 0.06227215957641601, 0.06225913619995117, 0.06218143844604492, 0.06241484832763672, 0.06228297424316406, 0.062257503509521486, 0.06221868896484375, 0.06241811370849609, 0.06272697448730469, 0.06352444839477539, 0.06346588897705079, 0.06307411193847656, 0.06264851379394532, 0.06263808059692383, 0.062475711822509765, 0.06246380615234375, 0.062352127075195315, 0.06238614273071289, 0.062424320220947266, 0.06257305526733399, 0.06255440139770507, 0.06244099044799805, 0.06262166213989258, 0.06256895828247071, 0.0627072639465332, 0.06308262252807617, 0.06307024002075196, 0.06287798309326172, 0.06285436630249024, 0.06287180709838867, 0.06268982315063476, 0.06265651321411132, 0.06263353729248047, 0.06251516723632812, 0.062419422149658205, 0.06252544021606446, 0.06245171356201172, 0.06249676895141602, 0.0628223991394043, 0.06303318405151367, 0.06289555358886718, 0.06318972778320313, 0.06329068756103516, 0.06326956939697266, 0.06298406219482422, 0.06284643173217773, 0.06317737579345703, 0.0631558723449707, 0.06298163223266602, 0.06278787231445312, 0.06299878311157227, 0.06281852722167969, 0.06283433532714844, 0.06285987091064453, 0.0628836784362793, 0.06355523300170898, 0.06325225448608399, 0.06584870147705078, 0.06310070419311524, 0.06259305572509766, 0.06230435180664062, 0.062648193359375, 0.062384159088134765, 0.06237676620483398, 0.06256623840332032, 0.06278319931030274, 0.062470592498779294, 0.06249795150756836, 0.062408641815185545, 0.06258371353149414, 0.06256419372558594, 0.06281235122680665, 0.06330697631835938, 0.06355984115600585, 0.06335308837890626, 0.06290572738647461, 0.06279663848876953, 0.06251123046875, 0.06251222229003907, 0.06248336029052735, 0.06254524612426758, 0.06240105438232422, 0.06251939010620117, 0.06254915237426757, 0.06269347381591797, 0.06289452743530273, 0.06269782257080078, 0.06302719879150391, 0.06309068679809571, 0.06312543869018555, 0.06331193542480469, 0.06281932830810547, 0.06278860855102539, 0.06277939224243165, 0.06272204971313476, 0.06265161514282226, 0.06252764892578125, 0.06285347366333008, 0.06276240158081055, 0.0625488624572754, 0.06274451065063477, 0.06286460876464844, 0.06341263961791992, 0.0631234245300293, 0.06358883285522461, 0.0633403205871582, 0.06320140838623046, 0.06293513488769531, 0.06291449737548828, 0.0630081901550293, 0.06298892974853515, 0.06287974548339843, 0.06283254241943359, 0.06273443222045899, 0.0629227523803711, 0.0627691535949707, 0.06279145431518554, 0.0627366065979004, 0.06295756912231446, 0.06370281600952149, 0.06592515563964843, 0.06330569458007812, 0.0626583023071289, 0.0623741455078125, 0.06231196975708008, 0.06259759902954101, 0.06237388610839844, 0.0625602569580078, 0.06267289733886719, 0.06257459259033203, 0.06254387283325195, 0.06235955047607422, 0.06228790283203125, 0.062406623840332034, 0.06258655929565429, 0.06299673461914063, 0.06299251174926758, 0.06329087829589844, 0.06304812622070312, 0.06278963088989258, 0.06291865539550781, 0.06258668899536132, 0.06245590209960938, 0.06243337631225586, 0.062459903717041014, 0.0626954231262207, 0.06256639862060547, 0.0627916488647461, 0.06272380828857421, 0.06293731307983398, 0.06325667190551758, 0.06290227127075196, 0.06306406402587891, 0.06304358291625976, 0.06284902572631836, 0.06295964813232421, 0.06286659240722656, 0.06271673583984375, 0.06261264038085937, 0.06253449630737305, 0.06248857498168945, 0.0626250228881836, 0.06252742385864257, 0.06265734481811523, 0.06259711837768554, 0.06291660690307617, 0.06297600173950195, 0.06314191818237305, 0.06332406234741211, 0.06326406478881835, 0.06303615951538086, 0.06314585494995117, 0.06285529708862304, 0.06287753677368164, 0.06297820663452149, 0.06288793563842773, 0.06295913696289063, 0.06271433639526368, 0.06269535827636719, 0.06312531280517578, 0.0627775993347168, 0.06275481414794921, 0.06323199844360351, 0.06604319763183594, 0.06331257629394531, 0.062470142364501956, 0.06219068908691406, 0.06220816040039062, 0.06225382232666016, 0.06221414566040039, 0.06243532943725586, 0.062494720458984375, 0.06254182434082031, 0.06248614501953125, 0.06255171203613281, 0.06238076782226563, 0.06243241500854492, 0.06261587142944336, 0.0633328971862793, 0.06352204895019531, 0.06316320037841797, 0.06294521713256836, 0.06266412734985352, 0.06258950424194336, 0.06258227157592773, 0.062474750518798826, 0.06256643295288086, 0.06254179382324218, 0.06260736083984375, 0.06284492874145507, 0.06285699081420898, 0.06339936065673828, 0.0632119026184082, 0.06322012710571288, 0.06327280044555664, 0.06319939041137695, 0.06301900863647461, 0.06291439819335938, 0.06316255950927735, 0.06279094314575195, 0.0627628173828125, 0.06264511871337891, 0.06263206481933593, 0.06272601699829101, 0.06267884826660156, 0.0627360954284668, 0.06310345458984375, 0.06299033737182617, 0.06303900909423828, 0.06326115036010742, 0.06328524780273438, 0.0632279052734375, 0.06309068679809571, 0.06300057601928712, 0.06315161514282226, 0.0630830078125, 0.06292070388793945, 0.06284441757202149, 0.06287411117553711, 0.06280806350708008, 0.06278348922729492, 0.06275276947021484, 0.0630374412536621, 0.06305324935913086, 0.06320393753051758, 0.06318281555175781]",tokens/s,15.911203425144073,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,888.38144,7981.62944,0.0,7579.107328,7559.873536,s,1,7.745095703125,7.745095703125,0.0,7.745095703125,7.745095703125,7.745095703125,7.745095703125,[7.745095703125],,kWh,6.6303864666754934e-06,7.237404054227987e-07,4.077225484011571e-06,1.1431352356109862e-05,,MB,1334.829056,8237.481984,0.0,7820.279808,7767.767552,s,10,0.8856558151245117,0.08856558151245117,0.002510672103965805,0.08896692657470703,0.09117545852661134,0.09200865821838379,0.09267521797180177,"[0.08367584228515625, 0.0898187484741211, 0.0879810562133789, 0.0859561309814453, 0.08665670776367188, 0.09099030303955079, 0.08957727813720703, 0.08980131530761719, 0.09284185791015626, 0.08835657501220703]",tokens/s,2890.5133984132394,kWh,2.68223593883772e-06,2.9579827668732497e-07,1.7783449496882694e-06,4.756379165213315e-06,tokens/kWh,53822454.24677342,MB,1360.125952,8300.396544,0.0,7883.194368,7838.569472,s,10,21.256050048828122,2.125605004882812,0.002778062475700249,2.124951293945313,2.1296044921875,2.1300098876953126,2.1303342041015627,"[2.122712158203125, 2.12374072265625, 2.128616943359375, 2.125531982421875, 2.125648681640625, 2.12437060546875, 2.1227275390625, 2.122771728515625, 2.130415283203125, 2.129514404296875]",tokens/s,29.63862046583452,kWh,6.196280539907479e-05,6.834281052319041e-06,4.112388381651099e-05,0.00010992097026790482,tokens/kWh,573139.0456839426,,s,630,21.2530713233948,0.03373503384665837,0.00031616748962673614,0.033684080123901367,0.0339513801574707,0.03406387233734131,0.035570267219543455,"[0.03515020751953125, 0.034356319427490234, 0.033753055572509766, 0.03355507278442383, 0.03342979049682617, 0.03348070526123047, 0.03334137725830078, 0.03335558319091797, 0.03339651107788086, 0.03335318374633789, 0.033438720703125, 0.03337011337280273, 0.03330252838134766, 0.03337625503540039, 0.03347561645507813, 0.03359817504882812, 0.03364684677124023, 0.03342111968994141, 0.03349728012084961, 0.033500385284423825, 0.0335695686340332, 0.033495040893554685, 0.0335432014465332, 0.033584095001220705, 0.03370393753051758, 0.03360707092285156, 0.03362652969360352, 0.03364636611938476, 0.03373433685302735, 0.0337108154296875, 0.03381248092651367, 0.033908351898193356, 0.03385103988647461, 0.03409174346923828, 0.0340684814453125, 0.0337756462097168, 0.033689502716064454, 0.03353811264038086, 0.03364863967895508, 0.03371206283569336, 0.033697856903076175, 0.033768608093261716, 0.03363107299804687, 0.033553825378417966, 0.0336596794128418, 0.033715038299560546, 0.03369270324707031, 0.033725982666015626, 0.03376707077026367, 0.033597599029541014, 0.03363900756835937, 0.03377695846557617, 0.0339483528137207, 0.033726463317871096, 0.03375718307495117, 0.03390054321289063, 0.033750720977783207, 0.033737022399902346, 0.03379561614990234, 0.03380828857421875, 0.03381830215454101, 0.033895038604736326, 0.03396396636962891, 0.03557923126220703, 0.03450732803344726, 0.033984737396240236, 0.03372777557373047, 0.033562496185302736, 0.03358793640136719, 0.03345340728759766, 0.03340492630004883, 0.0334486083984375, 0.03346137619018555, 0.03347750473022461, 0.03347398376464844, 0.03350995254516602, 0.03343564987182617, 0.033396736145019534, 0.03342335891723633, 0.03354127883911133, 0.03338636779785156, 0.033491935729980465, 0.03346636962890625, 0.033713760375976565, 0.033492767333984375, 0.0335015983581543, 0.03355039978027344, 0.033519649505615236, 0.03357440185546875, 0.033510017395019534, 0.033568767547607424, 0.033842689514160154, 0.033806846618652346, 0.03386777496337891, 0.03388227081298828, 0.033928768157958984, 0.03391324615478516, 0.03370585632324219, 0.03350732803344727, 0.033691646575927735, 0.03363430404663086, 0.033638240814208985, 0.03365289688110352, 0.03357891082763672, 0.033640033721923826, 0.03366963195800781, 0.033742847442626955, 0.03381363296508789, 0.03399564743041992, 0.03371417617797851, 0.03366912078857422, 0.03365206527709961, 0.03367331314086914, 0.033648193359375, 0.03367200088500977, 0.03376761627197265, 0.03373056030273437, 0.0335912971496582, 0.033735969543457034, 0.03379417419433594, 0.03390022277832031, 0.034032318115234376, 0.033902080535888675, 0.03383801651000977, 0.03402633666992187, 0.03384812927246094, 0.035648159027099606, 0.034721790313720705, 0.034074462890625, 0.03389456176757812, 0.03382067108154297, 0.03351551818847656, 0.03352166366577149, 0.03343075180053711, 0.03360028839111328, 0.033982463836669925, 0.03381657409667969, 0.03359539031982422, 0.03378313446044922, 0.03346448135375977, 0.03353756713867188, 0.03351987075805664, 0.033551071166992186, 0.0336445426940918, 0.03353011322021485, 0.03364019012451172, 0.03360489654541016, 0.033565406799316404, 0.03372601699829102, 0.03346908950805664, 0.03370166397094727, 0.03372851181030274, 0.033646656036376954, 0.033713184356689456, 0.0340079345703125, 0.03388524627685547, 0.03394863891601563, 0.03399190521240234, 0.03387472152709961, 0.03417891311645508, 0.03369910430908203, 0.03366099166870117, 0.033758014678955076, 0.03381043243408203, 0.033751041412353515, 0.03382400131225586, 0.03368217468261719, 0.033751041412353515, 0.033726463317871096, 0.03362815856933594, 0.03361996841430664, 0.033701889038085936, 0.03377529525756836, 0.03367497634887695, 0.03376758575439453, 0.03357331085205078, 0.0336732177734375, 0.033648414611816405, 0.03384751892089844, 0.03376073455810547, 0.033772350311279294, 0.03379785537719727, 0.033764480590820316, 0.03389939117431641, 0.03388761520385742, 0.03390310287475586, 0.03384649658203125, 0.03386608123779297, 0.03388630294799805, 0.03570671844482422, 0.03469587326049805, 0.034047744750976563, 0.033799934387207034, 0.03360409545898437, 0.03372236633300781, 0.03356467056274414, 0.033476608276367184, 0.03346150588989258, 0.03347737503051758, 0.03346432113647461, 0.03337011337280273, 0.03334096145629883, 0.033565120697021486, 0.033492225646972656, 0.033435550689697266, 0.03348160171508789, 0.03355852890014648, 0.033601184844970704, 0.03402070236206055, 0.03384179306030274, 0.03367695999145508, 0.03352835083007812, 0.03357027053833008, 0.03363913726806641, 0.03389654541015625, 0.03395369720458984, 0.033942783355712894, 0.03374281692504883, 0.033735454559326174, 0.03370182418823242, 0.0338015365600586, 0.03374975967407227, 0.03395123291015625, 0.033869407653808595, 0.03364134216308594, 0.03360361480712891, 0.03355648040771484, 0.03367107009887695, 0.0336280632019043, 0.03358329772949219, 0.03363020706176758, 0.033654529571533205, 0.03355878448486328, 0.033627201080322265, 0.033729248046875, 0.03368956756591797, 0.0335546875, 0.03357462310791016, 0.03368105697631836, 0.03380252838134765, 0.03367110443115234, 0.033638816833496094, 0.03372032165527344, 0.03367116928100586, 0.033685504913330076, 0.03386524963378906, 0.03380271911621094, 0.03384643173217773, 0.033759521484375, 0.03383763122558594, 0.033947776794433594, 0.03410316848754883, 0.035548320770263674, 0.03509743881225586, 0.03432447814941406, 0.03381452941894531, 0.033617919921875, 0.033382110595703125, 0.03351968002319336, 0.03365091323852539, 0.0336814079284668, 0.03345612716674805, 0.03349248123168945, 0.033501697540283204, 0.03361587142944336, 0.033546241760253906, 0.03345366287231445, 0.033449886322021484, 0.033632766723632815, 0.03359334564208984, 0.03355033493041992, 0.03376947021484375, 0.03377356719970703, 0.033613056182861326, 0.03368422317504883, 0.033573982238769534, 0.033532833099365236, 0.033710079193115236, 0.033734657287597655, 0.03353190231323242, 0.033694976806640624, 0.03360230255126953, 0.033718177795410156, 0.03382281494140625, 0.03377907180786133, 0.033834976196289064, 0.03375151824951172, 0.03366239929199219, 0.033771678924560546, 0.03370620727539062, 0.03361123275756836, 0.033761951446533205, 0.03373081588745117, 0.033568767547607424, 0.03359686279296875, 0.03357958221435547, 0.03353734588623047, 0.03372281646728516, 0.033618175506591796, 0.033585151672363284, 0.03358924865722656, 0.03383910369873047, 0.03373875045776367, 0.03370393753051758, 0.0337894401550293, 0.03371059036254883, 0.03369375991821289, 0.03382185745239258, 0.0337229118347168, 0.03372208023071289, 0.03380380630493164, 0.033865985870361326, 0.033970943450927736, 0.03401523208618164, 0.033860702514648434, 0.03560243225097656, 0.03485696029663086, 0.03421286392211914, 0.03370275115966797, 0.033533153533935545, 0.03354009628295898, 0.03342563247680664, 0.03348070526123047, 0.03341897583007813, 0.03338956832885742, 0.03340595245361328, 0.03345068740844727, 0.033382720947265625, 0.03354828643798828, 0.033478656768798826, 0.03349235153198242, 0.03353235244750977, 0.033571006774902344, 0.03360768127441406, 0.033617919921875, 0.0335175666809082, 0.03349436950683594, 0.03349571228027344, 0.0335478401184082, 0.033597728729248044, 0.033691390991210934, 0.03361219024658203, 0.03363398361206055, 0.03365903854370117, 0.03374860763549805, 0.03377616119384766, 0.03382204818725586, 0.03378448104858398, 0.033701278686523437, 0.03367987060546875, 0.03362524795532226, 0.033683937072753904, 0.03382729721069336, 0.03367270278930664, 0.033673728942871094, 0.033617919921875, 0.03360768127441406, 0.033552383422851564, 0.03381452941894531, 0.03371027374267578, 0.0337342414855957, 0.03358499145507812, 0.03364662551879883, 0.03371033477783203, 0.03383100891113281, 0.033674816131591796, 0.033736671447753906, 0.033599647521972656, 0.03376131057739258, 0.03393769454956055, 0.03386067199707031, 0.03372127914428711, 0.03382601547241211, 0.033737503051757815, 0.033949790954589845, 0.03397526550292969, 0.03394022369384766, 0.03404000091552734, 0.0356212158203125, 0.03467055892944336, 0.03397430419921875, 0.033785888671875, 0.03369776153564453, 0.03351705551147461, 0.03349660873413086, 0.033641441345214844, 0.0335810546875, 0.033484798431396484, 0.03344179153442383, 0.03352371215820313, 0.03360156631469727, 0.033580734252929685, 0.03349737548828125, 0.03353408050537109, 0.03357628631591797, 0.03362169647216797, 0.033536865234375, 0.03354009628295898, 0.033642494201660156, 0.03361088180541992, 0.03352870559692383, 0.03353200149536133, 0.03353180694580078, 0.033492767333984375, 0.03355219268798828, 0.03350726318359375, 0.03358531188964844, 0.03367353439331055, 0.03372851181030274, 0.03376537704467773, 0.03381248092651367, 0.03368873596191406, 0.03366998291015625, 0.0336445426940918, 0.033686656951904294, 0.03363056182861328, 0.03361439895629883, 0.03350486373901367, 0.033673599243164064, 0.03372032165527344, 0.03368697738647461, 0.033555007934570315, 0.03358643341064453, 0.0336464958190918, 0.03363228988647461, 0.03365679931640625, 0.033766239166259766, 0.033656478881835934, 0.03357059097290039, 0.03357344055175781, 0.03381043243408203, 0.0336732177734375, 0.03371964645385742, 0.03372886276245117, 0.03367728042602539, 0.033702239990234376, 0.03372851181030274, 0.03382681655883789, 0.03376537704467773, 0.0338983039855957, 0.03384092712402344, 0.035306686401367186, 0.03472467041015625, 0.03400627136230469, 0.03368166351318359, 0.033538528442382816, 0.03351145553588867, 0.03352166366577149, 0.03357702255249023, 0.03346803283691406, 0.03336579132080078, 0.03343824005126953, 0.03344390487670899, 0.03339785766601563, 0.0334733772277832, 0.03347455978393555, 0.03346646499633789, 0.03351043319702148, 0.03350320053100586, 0.033573055267333986, 0.03344851303100586, 0.0335300178527832, 0.03347238540649414, 0.03351359939575195, 0.033484447479248045, 0.033546463012695316, 0.03343987274169922, 0.03367935943603516, 0.03353353500366211, 0.03371855926513672, 0.03367123031616211, 0.0336956787109375, 0.03380604934692383, 0.03374655914306641, 0.03371014404296875, 0.03370220947265625, 0.03365868759155274, 0.03370159912109375, 0.03354304122924805, 0.033587200164794925, 0.03354739379882812, 0.0337325439453125, 0.033651649475097654, 0.033643585205078125, 0.033606529235839844, 0.0337061767578125, 0.03363212966918945, 0.03362611389160156, 0.033781440734863284, 0.03375500869750977, 0.033653182983398436, 0.033673057556152346, 0.033939582824707035, 0.033888126373291014, 0.03380752182006836, 0.033928192138671875, 0.03386172866821289, 0.033773601531982424, 0.033805313110351565, 0.03377638244628906, 0.03380028915405273, 0.033871902465820315, 0.034006656646728514, 0.03381667327880859, 0.03585116958618164, 0.03505900955200195, 0.03413452911376953, 0.03368979263305664, 0.033568767547607424, 0.03351551818847656, 0.033457504272460935, 0.033512161254882815, 0.03348988723754883, 0.033395679473876956, 0.033538047790527346, 0.03346432113647461, 0.033461311340332034, 0.03358758544921875, 0.03363488006591797, 0.033468414306640625, 0.033672447204589846, 0.03363488006591797, 0.03371392059326172, 0.03364220809936523, 0.03361251068115234, 0.03369526290893555, 0.033795680999755856, 0.03375807952880859, 0.03366092681884766, 0.03399884796142578, 0.03360358428955078, 0.03361491012573242, 0.03364140701293945, 0.03382886505126953, 0.0340582389831543, 0.03405619049072266, 0.03391897583007813, 0.03401523208618164, 0.03391897583007813, 0.03391385650634766, 0.034272254943847655, 0.03398451232910156, 0.03387907028198242, 0.0338493766784668, 0.03378441619873047, 0.03430640029907227, 0.03378176116943359, 0.03381248092651367, 0.03372236633300781, 0.03376937484741211, 0.03363980865478516, 0.03357974243164062, 0.03370393753051758, 0.033716224670410154, 0.03367116928100586, 0.03374899291992187, 0.03378176116943359, 0.03364777755737305, 0.03392803192138672, 0.033775711059570314, 0.033707935333251955, 0.034150241851806644, 0.033842464447021485, 0.03384819030761719, 0.033896446228027344, 0.03388985443115235, 0.03385184097290039, 0.035598400115966794, 0.03471356964111328, 0.033990367889404294, 0.033761249542236325, 0.03362384033203125, 0.03356703948974609, 0.033562110900878905, 0.033522720336914065, 0.033678462982177734, 0.033587329864501955, 0.03346255874633789, 0.03347270584106445, 0.03345388793945313, 0.03349331283569336, 0.0335728645324707, 0.03353190231323242, 0.033570816040039066, 0.0335728645324707, 0.03376332855224609, 0.03374515151977539, 0.03378464126586914, 0.03383596801757813, 0.033785728454589846, 0.033646175384521484, 0.03375158309936523, 0.033740798950195314, 0.03371212768554688, 0.03365792083740234, 0.03397727966308594, 0.0337756462097168, 0.033842464447021485, 0.03376403045654297, 0.033726463317871096, 0.033650688171386715, 0.03373875045776367, 0.033587200164794925, 0.03361142349243164, 0.03357727813720703, 0.033584449768066404, 0.03394595336914062, 0.03392470550537109, 0.03392729568481445, 0.03377417755126953, 0.03373267364501953, 0.03384524917602539, 0.03389215850830078, 0.03368364715576172, 0.03367926406860351, 0.03383305740356445, 0.03372761535644531, 0.033843742370605466, 0.03389680099487305, 0.03382271957397461, 0.033974369049072264, 0.03412144088745117, 0.03399046325683594, 0.03397577667236328, 0.033974590301513674, 0.033871681213378906, 0.0338889274597168, 0.03393088150024414, 0.034012737274169924, 0.03395270538330078]",tokens/s,29.64277446839008,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 786, in __init__ self.model = RecurrentGemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 660, in __init__ [RecurrentGemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 660, in [RecurrentGemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 490, in __init__ self.mlp_block = RecurrentGemmaMlp(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/recurrent_gemma/modeling_recurrent_gemma.py"", line 472, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 96.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 142456 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 717, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 188.12 MiB is free. Process 37460 has 14.55 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.45 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 1195, in __init__ self.model = MixtralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in __init__ [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 755, in __init__ self.self_attn = MIXTRAL_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 349, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 72.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 38.12 MiB is free. Process 181977 has 14.70 GiB memory in use. Of the allocated memory 14.55 GiB is allocated by PyTorch, and 41.65 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,811.667456,12523.077632,0.0,12127.830016,12122.08896,s,1,7.14426513671875,7.14426513671875,0.0,7.14426513671875,7.14426513671875,7.14426513671875,7.14426513671875,[7.14426513671875],,kWh,1.1392105812499171e-05,1.1714825149596346e-06,6.170004935999746e-06,1.8733593263458553e-05,,MB,1160.593408,12697.141248,0.0,12289.31072,12248.586752,s,10,1.8632452697753905,0.1863245269775391,0.0030487712772315654,0.18495963287353517,0.18985733184814454,0.19079213790893554,0.19153998275756837,"[0.18131852722167968, 0.18408697509765626, 0.18430841064453124, 0.18478448486328125, 0.185038818359375, 0.18488044738769532, 0.1880411834716797, 0.18940988159179686, 0.18964959716796875, 0.19172694396972656]",tokens/s,1373.9468665381885,kWh,5.6292487803686115e-06,6.203937768888961e-07,3.7304463604230814e-06,9.98008891768059e-06,tokens/kWh,25651074.064728413,MB,1211.277312,12705.529856,0.0,12297.699328,12248.589312,s,10,33.32199340820313,3.3321993408203125,0.005015908607296032,3.3335286865234375,3.3371913818359373,3.338296496582031,3.3391805883789063,"[3.3286611328125, 3.324007568359375, 3.325651611328125, 3.32798779296875, 3.33694580078125, 3.332620361328125, 3.33443701171875, 3.335543701171875, 3.33673681640625, 3.339401611328125]",tokens/s,18.906431925675495,kWh,9.750761309254812e-05,1.0755634699114251e-05,6.469694171477692e-05,0.0001729601895064393,tokens/kWh,364245.6693634376,,s,630,33.319364505767844,0.05288788016788543,0.0004587038709020352,0.05283310508728027,0.05316370964050293,0.05333808536529541,0.05552307079315186,"[0.05734841537475586, 0.05428630447387695, 0.05302272033691406, 0.052803585052490234, 0.052482048034667966, 0.052830142974853514, 0.052633663177490235, 0.052587711334228515, 0.0524637451171875, 0.05246214294433594, 0.0527402229309082, 0.05251686477661133, 0.05231766510009766, 0.05264028930664062, 0.05259014511108399, 0.05245491027832031, 0.05253011322021484, 0.05258975982666016, 0.05281465530395508, 0.05302259063720703, 0.05293683242797852, 0.053115966796875, 0.05283321762084961, 0.0528337287902832, 0.052582977294921875, 0.05254348754882812, 0.05244723129272461, 0.05250809478759766, 0.05246828842163086, 0.05251606369018555, 0.05244387054443359, 0.05254716873168945, 0.052545921325683594, 0.052770912170410154, 0.05246976089477539, 0.05261052703857422, 0.052816417694091795, 0.05272934341430664, 0.05267302322387695, 0.052926464080810545, 0.05300841522216797, 0.05309222412109375, 0.05288320159912109, 0.05312752151489258, 0.05267385482788086, 0.05265049743652344, 0.05257161712646485, 0.05254422378540039, 0.05251398468017578, 0.052732223510742186, 0.052750049591064455, 0.05285763168334961, 0.0526192626953125, 0.052596736907958984, 0.05299814224243164, 0.053133312225341796, 0.05301248168945313, 0.053008384704589843, 0.053185569763183595, 0.05312364959716797, 0.052830623626708983, 0.053026817321777345, 0.05299817657470703, 0.05548448181152344, 0.053284832000732425, 0.052547584533691405, 0.05254553604125976, 0.05231206512451172, 0.052410369873046876, 0.0523570556640625, 0.05260003280639648, 0.05244185638427734, 0.052516960144042966, 0.052563488006591795, 0.05261052703857422, 0.05270140838623047, 0.05256070327758789, 0.052402145385742185, 0.05242060852050781, 0.05246944046020508, 0.052914497375488284, 0.05344054412841797, 0.052880638122558596, 0.05283299255371094, 0.052910079956054686, 0.05275568008422851, 0.0525709114074707, 0.052450366973876957, 0.0526894416809082, 0.052579967498779294, 0.052575008392333984, 0.05248819351196289, 0.052523006439208986, 0.052866943359375, 0.052637214660644534, 0.05254819107055664, 0.05251894378662109, 0.05272777557373047, 0.052932193756103516, 0.0526192626953125, 0.052969886779785154, 0.052868415832519534, 0.052938465118408204, 0.05284310531616211, 0.05270771026611328, 0.05265983963012695, 0.05270694351196289, 0.052591297149658205, 0.05292348861694336, 0.05270019149780274, 0.05257209777832031, 0.052678398132324215, 0.052604927062988284, 0.05276847839355469, 0.05260857772827148, 0.052548030853271484, 0.05295363235473633, 0.05277040100097656, 0.05268307113647461, 0.053071582794189456, 0.053051265716552734, 0.05296371078491211, 0.05288560104370117, 0.05303696060180664, 0.05309247970581055, 0.05286502456665039, 0.05526432037353515, 0.05338822555541992, 0.052539390563964845, 0.052602336883544924, 0.05231660842895508, 0.05246166229248047, 0.05253276824951172, 0.0525255355834961, 0.05254947280883789, 0.052652191162109375, 0.05240627288818359, 0.05260198211669922, 0.05257100677490235, 0.05269094467163086, 0.052540447235107424, 0.05246870422363281, 0.052400127410888675, 0.052761791229248046, 0.0529898567199707, 0.053182559967041014, 0.05284249496459961, 0.0528023681640625, 0.05271756744384765, 0.052582401275634766, 0.05264527893066406, 0.05265996932983399, 0.05255254364013672, 0.05270025634765625, 0.05258127975463867, 0.05261423873901367, 0.052507232666015625, 0.052660511016845706, 0.05255964660644531, 0.05250073623657227, 0.052703231811523435, 0.05269676971435547, 0.052642112731933595, 0.05308415985107422, 0.053036865234375, 0.05301264190673828, 0.052967105865478516, 0.052902240753173825, 0.05292851257324219, 0.052802974700927735, 0.05271612930297852, 0.052760318756103514, 0.05275222396850586, 0.05268521499633789, 0.052670463562011716, 0.05284249496459961, 0.05278668975830078, 0.05274211120605469, 0.05279510498046875, 0.05299897766113281, 0.05281766510009766, 0.052949153900146484, 0.052772960662841796, 0.05285683059692383, 0.05306316757202149, 0.05293423843383789, 0.052929439544677735, 0.05319683074951172, 0.05294895935058594, 0.055698974609375, 0.053991905212402345, 0.0527496337890625, 0.05255782318115235, 0.05243936157226563, 0.05262374496459961, 0.05244313430786133, 0.05244927978515625, 0.05232025527954102, 0.052580352783203124, 0.05256777572631836, 0.052698623657226565, 0.052515201568603516, 0.052680320739746093, 0.05251561737060547, 0.05245868682861328, 0.052410400390625, 0.052539520263671875, 0.052856639862060545, 0.05334102249145508, 0.052950336456298826, 0.052863681793212894, 0.05256806564331055, 0.05262745666503906, 0.05249433517456055, 0.052453216552734376, 0.05257436752319336, 0.05265340805053711, 0.052748958587646486, 0.05285270309448242, 0.05260086441040039, 0.05269094467163086, 0.05295308685302735, 0.052938335418701174, 0.05277328109741211, 0.052655200958251956, 0.052963390350341796, 0.053392223358154293, 0.053134368896484374, 0.053040096282958984, 0.05305059051513672, 0.052873409271240235, 0.052744670867919924, 0.052678783416748046, 0.05265817642211914, 0.05271039962768555, 0.052724735260009765, 0.052717151641845705, 0.05267283248901367, 0.0527279052734375, 0.05267251205444336, 0.05266227340698242, 0.052770816802978515, 0.052940574645996094, 0.05282595062255859, 0.05314156723022461, 0.05293619155883789, 0.052910911560058595, 0.053065727233886716, 0.052992000579833984, 0.052864032745361327, 0.05309065628051758, 0.052932510375976564, 0.05588336181640625, 0.053460990905761716, 0.05261103820800781, 0.05272419357299805, 0.05245897674560547, 0.052644351959228515, 0.05269504165649414, 0.0526940803527832, 0.05314214324951172, 0.05280931091308594, 0.0525700798034668, 0.05292927932739258, 0.052811775207519535, 0.052730911254882815, 0.052653022766113285, 0.05261417770385742, 0.0525401611328125, 0.05300166320800781, 0.05324675369262695, 0.053320766448974606, 0.05295926284790039, 0.05284751892089844, 0.05267254257202148, 0.05276259231567383, 0.053007678985595705, 0.05275689697265625, 0.05264822387695312, 0.05278656005859375, 0.05271958541870117, 0.052789920806884764, 0.05295513534545898, 0.052924320220947264, 0.052908096313476566, 0.052676513671875, 0.052559009552001955, 0.053002849578857425, 0.053031295776367185, 0.053266433715820315, 0.05321900939941406, 0.05324582290649414, 0.05311328125, 0.052893695831298826, 0.052994049072265625, 0.05319680023193359, 0.05288332748413086, 0.052956958770751954, 0.052959583282470704, 0.05292201614379883, 0.05306777572631836, 0.05290422439575195, 0.05296953582763672, 0.05306777572631836, 0.052819103240966794, 0.05301129531860352, 0.053001537322998046, 0.05321798324584961, 0.05326972961425781, 0.05305219268798828, 0.05292230224609375, 0.052881473541259764, 0.05289516830444336, 0.05324857711791992, 0.05313536071777344, 0.05556966400146485, 0.05402220916748047, 0.05306256103515625, 0.052735904693603515, 0.052545440673828124, 0.052830398559570314, 0.05255372619628906, 0.05260287857055664, 0.05239603042602539, 0.05272576141357422, 0.05289779281616211, 0.052977664947509766, 0.052539295196533206, 0.05258393478393555, 0.05250313568115234, 0.05276793670654297, 0.05254022216796875, 0.05291212844848633, 0.052770816802978515, 0.05297078323364258, 0.0529087028503418, 0.05288534545898437, 0.05268502426147461, 0.05296297454833984, 0.052779361724853514, 0.05269094467163086, 0.052571807861328125, 0.052735774993896485, 0.052615745544433594, 0.05274211120605469, 0.05270735931396484, 0.052547584533691405, 0.05253324890136719, 0.05291417694091797, 0.05284793472290039, 0.05306233596801758, 0.05318000030517578, 0.05310915374755859, 0.05292806243896484, 0.052908031463623044, 0.05298425674438476, 0.05311283111572265, 0.05297151947021484, 0.053040447235107424, 0.052937408447265626, 0.05288729476928711, 0.05290390396118164, 0.0527690544128418, 0.052760574340820314, 0.05289315032958984, 0.0527913932800293, 0.05306320190429688, 0.05277993774414062, 0.05286092758178711, 0.05293670272827149, 0.05294233703613281, 0.05294956970214844, 0.05307795333862304, 0.052864734649658206, 0.053020030975341796, 0.05286800003051758, 0.05316396713256836, 0.05294838333129883, 0.05629574584960938, 0.05414918518066406, 0.052848865509033206, 0.05268035125732422, 0.05243328094482422, 0.052719615936279295, 0.052538654327392575, 0.052570846557617186, 0.05275027084350586, 0.052641281127929686, 0.05257049560546875, 0.05262969589233398, 0.0526376953125, 0.05294480133056641, 0.05270662307739258, 0.05261187362670899, 0.052751937866210935, 0.05268320083618164, 0.05316543960571289, 0.053109375, 0.052817214965820314, 0.05296380615234375, 0.05279151916503906, 0.05297875213623047, 0.052994110107421874, 0.052822593688964845, 0.052657791137695316, 0.052656352996826174, 0.05268473434448242, 0.052685344696044925, 0.05274832153320313, 0.05288140869140625, 0.052623104095458985, 0.052781280517578126, 0.052918270111083986, 0.052780895233154296, 0.05281324768066406, 0.05293129730224609, 0.053026817321777345, 0.05315961456298828, 0.05281824111938477, 0.05294208145141602, 0.05309926223754883, 0.052983070373535154, 0.05284105682373047, 0.05286310577392578, 0.05293260955810547, 0.05304089736938476, 0.05287142562866211, 0.053053279876708985, 0.0529634895324707, 0.05285043334960938, 0.052728065490722655, 0.052836353302001954, 0.052985855102539066, 0.053133312225341796, 0.05303500747680664, 0.053141502380371096, 0.053083774566650394, 0.05291251373291016, 0.052989761352539064, 0.05286076736450195, 0.05306304168701172, 0.055521183013916016, 0.05359487915039062, 0.052647167205810544, 0.05256268692016602, 0.052618240356445314, 0.052716545104980465, 0.05269094467163086, 0.05266230392456055, 0.05250454330444336, 0.05264179229736328, 0.052672126770019534, 0.05286950302124024, 0.05277798461914063, 0.05290256118774414, 0.05316368103027344, 0.05268755340576172, 0.0525926399230957, 0.052875072479248046, 0.0531352653503418, 0.05334249496459961, 0.05294432067871094, 0.05297209548950195, 0.05281587219238281, 0.052803585052490234, 0.052674560546875, 0.05282815933227539, 0.05260902404785156, 0.05268431854248047, 0.05260745620727539, 0.052722686767578124, 0.05300735855102539, 0.05284249496459961, 0.05290111923217773, 0.05269580841064453, 0.0532022705078125, 0.053103263854980466, 0.052872318267822266, 0.05317631912231445, 0.053089153289794924, 0.05310464096069336, 0.05325619125366211, 0.05299932861328125, 0.05287974548339844, 0.05305392074584961, 0.05286896133422852, 0.05291974258422852, 0.053078079223632814, 0.05291689682006836, 0.05288345718383789, 0.05294044876098633, 0.052762943267822264, 0.05281795120239258, 0.05282815933227539, 0.0529714241027832, 0.052956993103027344, 0.0531583023071289, 0.05316908645629883, 0.05302150344848633, 0.05286515045166015, 0.053215232849121094, 0.05301174545288086, 0.05302470397949219, 0.05286515045166015, 0.05575183868408203, 0.053664512634277343, 0.05269855880737305, 0.05265478515625, 0.052402145385742185, 0.0527749137878418, 0.052463390350341796, 0.052744415283203124, 0.05274003219604492, 0.052666046142578124, 0.052636032104492185, 0.0526143684387207, 0.052679454803466794, 0.052784191131591794, 0.05313017654418945, 0.0530513916015625, 0.05279334259033203, 0.052864864349365236, 0.053299072265625, 0.05311308670043945, 0.053006366729736326, 0.05304115295410156, 0.05284592056274414, 0.05278787231445312, 0.05272371292114258, 0.0527988166809082, 0.0528430404663086, 0.05267836761474609, 0.05274988937377929, 0.0528798713684082, 0.052654430389404296, 0.052762496948242185, 0.0526091537475586, 0.052579647064208986, 0.05280223846435547, 0.05283225631713867, 0.052829345703125, 0.053010623931884764, 0.05293052673339844, 0.05302339172363281, 0.052850719451904296, 0.05307932662963867, 0.05294768142700195, 0.05288345718383789, 0.053000190734863284, 0.053096511840820315, 0.052958847045898434, 0.05302508926391602, 0.052780895233154296, 0.05298601531982422, 0.05307372665405274, 0.05300038528442383, 0.05291811370849609, 0.05297782516479492, 0.05312054443359375, 0.05333449554443359, 0.05311283111572265, 0.05305452728271484, 0.05318547058105469, 0.05353881454467774, 0.053384929656982424, 0.053265697479248045, 0.05296844863891602, 0.05552384185791016, 0.053332321166992186, 0.052687007904052736, 0.052741119384765625, 0.052468734741210936, 0.05258195114135742, 0.052500926971435546, 0.05278307342529297, 0.05263552093505859, 0.052746112823486326, 0.052859169006347656, 0.052926464080810545, 0.05285657501220703, 0.05263385772705078, 0.052692928314208985, 0.052735198974609376, 0.05263241577148438, 0.052844192504882814, 0.05311936187744141, 0.05345593643188477, 0.053144481658935545, 0.052944862365722656, 0.052799518585205076, 0.05299574279785156, 0.05278726577758789, 0.05295270538330078, 0.05262931060791016, 0.05269171142578125, 0.05282972717285156, 0.052861503601074215, 0.05280767822265625, 0.05266636657714844, 0.05276671981811523, 0.05288345718383789, 0.052671775817871094, 0.05339619064331055, 0.05359004974365234, 0.05353606414794922, 0.05343913650512695, 0.05336262512207031, 0.05311699295043945, 0.053176158905029296, 0.05283855819702148, 0.05290979385375977, 0.052926624298095706, 0.05289139175415039, 0.05298732757568359, 0.05321350479125977, 0.053023361206054685, 0.0528438720703125, 0.052752063751220706, 0.05310976028442383, 0.05312211227416992, 0.053234592437744144, 0.05284659194946289, 0.05318041610717773, 0.0533256950378418, 0.05345289611816406, 0.05313049697875977, 0.053160736083984375, 0.053381118774414066, 0.05310259246826172, 0.05291215896606445]",tokens/s,18.90792364575088,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 510, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 201, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 94608 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.374912,806.289408,0.0,411.041792,391.374848,s,1,7.21324560546875,7.21324560546875,0.0,7.21324560546875,7.21324560546875,7.21324560546875,7.21324560546875,[7.21324560546875],,kWh,5.089960633351135e-06,5.542499003759846e-07,9.938896839908895e-07,6.63810021771801e-06,,MB,1164.005376,881.78688,0.0,473.956352,454.832128,s,15,0.18734287929534915,0.01248952528635661,0.000214234636375093,0.012487648010253906,0.012725933074951172,0.012832464122772218,0.012959427242279054,"[0.012574463844299317, 0.012199999809265137, 0.012470656394958497, 0.012207615852355956, 0.012267423629760741, 0.012245023727416992, 0.012525504112243652, 0.012668160438537598, 0.01249782371520996, 0.012487648010253906, 0.012633376121520996, 0.012485119819641113, 0.012991168022155762, 0.012324447631835938, 0.012764448165893555]",tokens/s,20497.176164065335,kWh,3.6605550480445295e-07,4.0369153673917844e-08,2.2922400883563672e-07,6.356486673140075e-07,tokens/kWh,402738199.8325455,MB,1197.735936,909.049856,0.0,501.219328,454.834688,s,15,10.493724060058597,0.6995816040039063,0.01224672635514582,0.7041112670898437,0.7111708374023438,0.7148606750488281,0.7194729162597656,"[0.6981555786132813, 0.6862251586914062, 0.6848902587890625, 0.6789227294921875, 0.6823243408203125, 0.687145263671875, 0.7206259765625, 0.7078164672851562, 0.7072764892578125, 0.7093423461914062, 0.7089010620117188, 0.7041112670898437, 0.6998896484375, 0.7123898315429688, 0.7057076416015625]",tokens/s,90.05382594315361,kWh,2.0434243422139875e-05,2.2535672322146246e-06,8.911857378097216e-06,3.1599668032451724e-05,tokens/kWh,1993691.830411043,,s,945,10.485675329208373,0.01109595272932103,0.00030792222763291303,0.011096672058105468,0.011409036827087403,0.011485177421569823,0.011998213195800777,"[0.01104911994934082, 0.011024031639099121, 0.011106495857238769, 0.011065312385559082, 0.01126137638092041, 0.011096672058105468, 0.010999808311462403, 0.010935615539550782, 0.010844544410705566, 0.010780351638793945, 0.010936927795410157, 0.010760383605957031, 0.010741888046264648, 0.010798815727233887, 0.01100595188140869, 0.010903552055358886, 0.010816960334777833, 0.010865216255187988, 0.010797408103942871, 0.010756928443908692, 0.010740575790405273, 0.01074995231628418, 0.011136544227600098, 0.011620832443237304, 0.011495424270629882, 0.01131935977935791, 0.011285568237304687, 0.01123136043548584, 0.011076064109802245, 0.011066944122314452, 0.010973119735717773, 0.01094320011138916, 0.010902912139892577, 0.010864800453186035, 0.010868351936340332, 0.010746784210205078, 0.011253824234008789, 0.011358112335205077, 0.011898271560668945, 0.011962623596191406, 0.011379039764404297, 0.011049087524414063, 0.010927712440490723, 0.01104313564300537, 0.011003904342651367, 0.01093126392364502, 0.011457695960998535, 0.010942432403564453, 0.010966848373413086, 0.011017215728759766, 0.010966015815734862, 0.011309023857116698, 0.011564736366271972, 0.01138268756866455, 0.011343615531921387, 0.011266559600830077, 0.011426015853881836, 0.011126144409179688, 0.011018943786621094, 0.010956831932067871, 0.010886048316955567, 0.010931039810180664, 0.010751168251037597, 0.01044863986968994, 0.010719488143920899, 0.010649056434631347, 0.01068841552734375, 0.010758463859558105, 0.010770751953125, 0.010919936180114746, 0.010789024353027343, 0.01073523235321045, 0.010826016426086426, 0.010846143722534179, 0.010852352142333984, 0.01083801555633545, 0.010760191917419434, 0.0107741117477417, 0.01074022388458252, 0.01088092803955078, 0.011402912139892578, 0.011421759605407715, 0.011421919822692871, 0.011208767890930177, 0.0110447998046875, 0.01108572769165039, 0.011053183555603027, 0.011057184219360352, 0.01083407974243164, 0.010747743606567382, 0.010690560340881347, 0.010719231605529785, 0.01075814437866211, 0.01062502384185791, 0.010637151718139648, 0.010708831787109374, 0.010704256057739258, 0.010912704467773437, 0.010979328155517578, 0.01099078369140625, 0.010918047904968262, 0.010865632057189942, 0.010946368217468262, 0.010929216384887695, 0.010896479606628418, 0.010808256149291992, 0.01098198413848877, 0.010878944396972656, 0.010889439582824708, 0.011057151794433593, 0.011282431602478027, 0.011354111671447753, 0.01139737606048584, 0.011128576278686523, 0.011085215568542481, 0.011079968452453614, 0.010791007995605468, 0.010821663856506347, 0.010729280471801758, 0.010681056022644042, 0.010876031875610351, 0.01062332820892334, 0.010674367904663086, 0.010680319786071778, 0.010654879570007323, 0.010726143836975097, 0.0106397123336792, 0.010995231628417968, 0.010972960472106934, 0.011233632087707519, 0.011010272026062012, 0.011095968246459961, 0.010992863655090331, 0.011216927528381347, 0.011000672340393066, 0.010983424186706543, 0.010877951622009278, 0.010894335746765137, 0.011149312019348144, 0.011224191665649414, 0.011041407585144042, 0.010964703559875489, 0.010746399879455566, 0.010637663841247558, 0.01067155170440674, 0.010698975563049317, 0.010717023849487304, 0.010756256103515625, 0.0108373441696167, 0.010782431602478028, 0.010775487899780273, 0.010851840019226074, 0.010862688064575195, 0.010758560180664062, 0.01086019229888916, 0.010879327774047851, 0.010836095809936523, 0.010879039764404297, 0.010874752044677735, 0.010817472457885741, 0.01081884765625, 0.010820544242858887, 0.010900351524353027, 0.01079798412322998, 0.01074790382385254, 0.01074176025390625, 0.010674176216125488, 0.010721280097961425, 0.010739392280578613, 0.010749759674072266, 0.010780672073364257, 0.01095030403137207, 0.011154272079467774, 0.011109631538391113, 0.011065792083740235, 0.0111560640335083, 0.011046976089477538, 0.010964768409729004, 0.010825152397155762, 0.010770879745483398, 0.010755071640014649, 0.010667008399963379, 0.010804479598999023, 0.010699520111083985, 0.010690048217773437, 0.010725312232971192, 0.010682720184326172, 0.010695199966430664, 0.01063043212890625, 0.010497920036315918, 0.010758272171020508, 0.01084812831878662, 0.01076643180847168, 0.01075334358215332, 0.01064793586730957, 0.010676575660705566, 0.01073516845703125, 0.010676671981811523, 0.010854432106018067, 0.010921952247619628, 0.010878975868225099, 0.010799263954162598, 0.010745311737060547, 0.010667936325073242, 0.010664416313171386, 0.01071718406677246, 0.010697728157043456, 0.01064633560180664, 0.010694016456604004, 0.010651488304138184, 0.010693568229675293, 0.010694208145141602, 0.01067363166809082, 0.010705951690673829, 0.010690655708312988, 0.010759903907775878, 0.010771807670593261, 0.010814271926879882, 0.010762335777282715, 0.010835871696472169, 0.010841664314270019, 0.011089344024658204, 0.011036767959594726, 0.0108307523727417, 0.010821632385253906, 0.010809632301330566, 0.011261055946350098, 0.01085910415649414, 0.010768383979797362, 0.010727295875549317, 0.01073964786529541, 0.01066966438293457, 0.010711711883544922, 0.010669631958007813, 0.010686847686767578, 0.010788864135742187, 0.010780672073364257, 0.010767904281616211, 0.010731904029846192, 0.010722720146179199, 0.010708767890930176, 0.010791999816894531, 0.010847519874572754, 0.010791296005249024, 0.01079097557067871, 0.010859904289245605, 0.010831968307495117, 0.010840736389160156, 0.010746111869812012, 0.010733311653137206, 0.01075814437866211, 0.010729663848876953, 0.010457759857177734, 0.010731391906738282, 0.010743136405944825, 0.010785375595092773, 0.010763456344604492, 0.010677056312561035, 0.010666048049926758, 0.010682111740112304, 0.010790271759033203, 0.01087168025970459, 0.010755423545837403, 0.010717087745666504, 0.010778719902038575, 0.011233951568603516, 0.010901535987854004, 0.010813407897949219, 0.010947872161865234, 0.010881759643554688, 0.010932448387145997, 0.010948479652404785, 0.010923359870910644, 0.010750240325927734, 0.010725024223327637, 0.011152000427246093, 0.011138272285461426, 0.010686592102050781, 0.01065231990814209, 0.01067024040222168, 0.010700639724731445, 0.011630592346191406, 0.010881024360656738, 0.010842111587524414, 0.010786751747131347, 0.010938431739807129, 0.010811391830444337, 0.010931296348571777, 0.010865887641906739, 0.010866463661193848, 0.01082096004486084, 0.010855072021484375, 0.010894880294799805, 0.010890656471252442, 0.010869728088378907, 0.01070899200439453, 0.010757599830627441, 0.010746208190917969, 0.010696160316467284, 0.010686207771301269, 0.010886207580566406, 0.010743935585021973, 0.010710111618041992, 0.010701567649841308, 0.010696640014648437, 0.01066105556488037, 0.010761024475097657, 0.010789055824279786, 0.011100192070007325, 0.010786591529846191, 0.010820639610290528, 0.010715519905090332, 0.01102460765838623, 0.010737407684326171, 0.010736255645751954, 0.010813952445983887, 0.011146623611450196, 0.010843232154846191, 0.010769824028015136, 0.010809727668762207, 0.010800224304199219, 0.010845151901245118, 0.010896672248840332, 0.010940735816955566, 0.011014495849609375, 0.010987520217895508, 0.01094976043701172, 0.01087993621826172, 0.010794943809509277, 0.010741503715515136, 0.010705151557922364, 0.010786815643310547, 0.01075820827484131, 0.010828736305236817, 0.010698080062866211, 0.010687520027160644, 0.010813632011413575, 0.010749600410461425, 0.01073027229309082, 0.010702848434448242, 0.010732959747314454, 0.010756799697875977, 0.010694239616394043, 0.01068889617919922, 0.011093952178955079, 0.010784640312194824, 0.010803327560424805, 0.010737088203430176, 0.010715904235839844, 0.010644831657409669, 0.010717663764953613, 0.010730719566345215, 0.01083471965789795, 0.010810720443725585, 0.010859487533569336, 0.01081107234954834, 0.010799103736877442, 0.010862144470214843, 0.010758591651916503, 0.010692831993103027, 0.010692543983459472, 0.010872672080993653, 0.011042240142822266, 0.011163871765136718, 0.011182784080505371, 0.011209792137145996, 0.011177727699279785, 0.011141695976257325, 0.0110862398147583, 0.01116966438293457, 0.011372096061706543, 0.011176416397094727, 0.01115062427520752, 0.01117689609527588, 0.011197312355041504, 0.011204671859741212, 0.011212896347045899, 0.011098719596862794, 0.011572319984436035, 0.011252927780151366, 0.011436511993408203, 0.011271167755126953, 0.011287551879882812, 0.01120076847076416, 0.011609919548034668, 0.01349180793762207, 0.01287987232208252, 0.01152239990234375, 0.01136451244354248, 0.011394911766052247, 0.011286527633666991, 0.011564959526062011, 0.011374688148498536, 0.011431936264038087, 0.011382975578308106, 0.011333279609680176, 0.011485152244567871, 0.011306976318359375, 0.011460096359252929, 0.011399200439453125, 0.011693984031677247, 0.011389951705932617, 0.011428895950317383, 0.011506752014160156, 0.011342559814453125, 0.01187119960784912, 0.011423135757446289, 0.011309599876403809, 0.011347840309143067, 0.011371999740600587, 0.011391488075256348, 0.011335904121398926, 0.01136025619506836, 0.011308320045471191, 0.011335807800292969, 0.011286656379699707, 0.011336095809936523, 0.011331583976745606, 0.011313535690307618, 0.011157183647155762, 0.011183903694152833, 0.011530495643615723, 0.01118553638458252, 0.011270208358764649, 0.011323936462402344, 0.011409279823303222, 0.01146236801147461, 0.011372960090637207, 0.01141875171661377, 0.011410304069519043, 0.011312543869018555, 0.011222880363464355, 0.011207712173461915, 0.011255552291870117, 0.011171520233154297, 0.011385408401489258, 0.011320256233215331, 0.01142249584197998, 0.01143712043762207, 0.011305536270141602, 0.011267840385437012, 0.01099129581451416, 0.011155776023864746, 0.011012096405029297, 0.01113868808746338, 0.011151488304138184, 0.0111843843460083, 0.011106304168701172, 0.011167648315429688, 0.011253376007080079, 0.011305439949035644, 0.011089792251586914, 0.011315327644348144, 0.011237407684326172, 0.011183263778686524, 0.011149312019348144, 0.011125503540039062, 0.011338111877441406, 0.011343296051025391, 0.011342240333557128, 0.011418623924255371, 0.011331775665283202, 0.011313823699951172, 0.011190048217773437, 0.011138463973999023, 0.011123519897460937, 0.011363679885864258, 0.011389599800109864, 0.011231231689453124, 0.011895968437194824, 0.010958815574645996, 0.011047807693481446, 0.011148896217346192, 0.011245984077453614, 0.01125376033782959, 0.011280703544616699, 0.011279552459716798, 0.011327103614807129, 0.011361472129821778, 0.011388031959533692, 0.011409983634948731, 0.011214847564697266, 0.011165151596069336, 0.011143327713012696, 0.011190303802490234, 0.011106559753417969, 0.01111248016357422, 0.011040160179138184, 0.011199135780334473, 0.011200511932373047, 0.011243519783020019, 0.011611328125, 0.011419520378112794, 0.011431039810180664, 0.011313055992126465, 0.011175359725952148, 0.010944992065429688, 0.010928095817565918, 0.010939935684204101, 0.011167679786682129, 0.011417759895324706, 0.011245823860168458, 0.01115561580657959, 0.011192319869995117, 0.010995712280273438, 0.01122441577911377, 0.011407072067260743, 0.011357119560241699, 0.011495743751525879, 0.011495008468627929, 0.011298912048339844, 0.011280384063720703, 0.011234848022460938, 0.011164128303527832, 0.011122943878173829, 0.011105088233947754, 0.011102656364440917, 0.01113548755645752, 0.011032575607299805, 0.011053248405456543, 0.010995519638061524, 0.010946559906005859, 0.010966560363769532, 0.010967519760131836, 0.010989567756652833, 0.010992799758911133, 0.011024928092956543, 0.01202617645263672, 0.011493375778198242, 0.011376064300537109, 0.011268671989440917, 0.011485183715820312, 0.011239423751831054, 0.0123985595703125, 0.011307040214538574, 0.011469056129455566, 0.011386624336242676, 0.011368608474731446, 0.011378496170043946, 0.011263456344604492, 0.01121951961517334, 0.011192319869995117, 0.011122688293457032, 0.01103667163848877, 0.010975232124328613, 0.010950528144836425, 0.010840448379516601, 0.01084832000732422, 0.010852031707763672, 0.010981023788452149, 0.011083647727966309, 0.011108192443847656, 0.011068032264709473, 0.01114521598815918, 0.011182080268859864, 0.011204607963562012, 0.01136025619506836, 0.011312416076660156, 0.011197152137756348, 0.011326815605163574, 0.011356831550598145, 0.01133561611175537, 0.011237343788146973, 0.011327232360839844, 0.01129916763305664, 0.011276288032531738, 0.011059200286865235, 0.010835712432861327, 0.011186431884765625, 0.011399359703063964, 0.011280192375183105, 0.011257439613342286, 0.011248031616210937, 0.01137664031982422, 0.01139673614501953, 0.011561344146728516, 0.011340031623840332, 0.011351807594299317, 0.011415552139282227, 0.011659263610839844, 0.011695679664611816, 0.011420096397399902, 0.01130726432800293, 0.011189408302307129, 0.011092063903808593, 0.01117852783203125, 0.011042783737182617, 0.01098464012145996, 0.011218879699707031, 0.011351167678833008, 0.011310848236083984, 0.011214847564697266, 0.011167743682861327, 0.011070879936218261, 0.01111017608642578, 0.011307840347290038, 0.01125532817840576, 0.011067872047424316, 0.011081248283386231, 0.011223520278930664, 0.011192319869995117, 0.01101414394378662, 0.010973279953002929, 0.011024288177490234, 0.01115135955810547, 0.011157088279724121, 0.011008416175842285, 0.010968928337097167, 0.010987648010253906, 0.011042847633361816, 0.011169695854187011, 0.011120896339416503, 0.011048640251159668, 0.011135328292846679, 0.01110313606262207, 0.011030559539794921, 0.01101296043395996, 0.010987551689147949, 0.010962944030761718, 0.011157504081726074, 0.011356287956237793, 0.011736031532287597, 0.012051360130310058, 0.011530367851257325, 0.01259712028503418, 0.011542528152465821, 0.011416895866394043, 0.011295424461364747, 0.01118723201751709, 0.011197728157043457, 0.011091551780700683, 0.011229696273803711, 0.011439871788024903, 0.01127609634399414, 0.011262463569641114, 0.011289759635925293, 0.011218751907348633, 0.01128275203704834, 0.011302656173706054, 0.011234399795532226, 0.011259200096130371, 0.011311840057373047, 0.01154201602935791, 0.01136415958404541, 0.011208255767822266, 0.011117600440979003, 0.011123616218566895, 0.011203392028808594, 0.011112000465393066, 0.01105174446105957, 0.011013919830322266, 0.010962271690368652, 0.011043359756469726, 0.010995424270629883, 0.010889856338500977, 0.010909152030944825, 0.011052895545959472, 0.010978015899658203, 0.011380479812622071, 0.011441920280456543, 0.011305343627929688, 0.011347200393676758, 0.011344160079956055, 0.011542880058288574, 0.011118656158447265, 0.011388863563537598, 0.011596927642822266, 0.011412575721740722, 0.011286304473876954, 0.011293760299682617, 0.011409503936767578, 0.011481792449951171, 0.011387040138244629, 0.011245023727416993, 0.011227328300476075, 0.011364704132080078, 0.01145036792755127, 0.01133568000793457, 0.01123737621307373, 0.011194368362426758, 0.011214847564697266, 0.011233247756958008, 0.011237407684326172, 0.011367456436157226, 0.011307392120361328, 0.011221887588500977, 0.011260992050170898, 0.011263808250427247, 0.011189087867736817, 0.011233280181884766, 0.011116095542907715, 0.011063424110412598, 0.011079104423522949, 0.010767999649047852, 0.010945440292358399, 0.010964223861694336, 0.010966879844665527, 0.01101318359375, 0.011089759826660156, 0.01132953643798828, 0.011138751983642578, 0.011221088409423829, 0.011124959945678711, 0.01103872013092041, 0.01121008014678955, 0.011263903617858886, 0.011186016082763673, 0.011266176223754882, 0.011132960319519042, 0.011192288398742676, 0.011242431640625, 0.011206239700317382, 0.011083935737609863, 0.011008319854736328, 0.01102729606628418, 0.010988479614257812, 0.0109366397857666, 0.011249343872070312, 0.011360128402709961, 0.01129257583618164, 0.011498784065246582, 0.011520959854125977, 0.011290623664855956, 0.01121452808380127, 0.011243807792663573, 0.011202591896057129, 0.011096351623535157, 0.01106710433959961, 0.01099135971069336, 0.010990847587585449, 0.011031104087829589, 0.011129152297973633, 0.01108176040649414, 0.011298912048339844, 0.011208703994750976, 0.011146528244018555, 0.011084863662719727, 0.011103903770446778, 0.011155776023864746, 0.011342975616455078, 0.011397695541381836, 0.011343968391418458, 0.011261856079101563, 0.011204607963562012, 0.011218655586242675, 0.011323360443115235, 0.011333056449890137, 0.011299615859985351, 0.011040575981140137, 0.010975520133972167, 0.01091811180114746, 0.010937503814697266, 0.011178912162780762, 0.011408160209655762, 0.011390111923217773, 0.011337727546691894, 0.011051072120666503, 0.011112031936645507, 0.011070112228393554, 0.011136704444885254, 0.010982720375061036, 0.010898112297058105, 0.010849504470825196, 0.01094643211364746, 0.011166496276855469, 0.01102451229095459, 0.010778431892395019, 0.010739904403686523, 0.01118019199371338, 0.011407199859619141, 0.01139129638671875, 0.01145030403137207, 0.011411199569702149, 0.011441472053527833, 0.011251423835754394, 0.011129823684692383, 0.011181983947753906, 0.011085439682006835, 0.011002559661865235, 0.011032352447509766, 0.010959872245788574, 0.01076863956451416, 0.010812159538269042, 0.010827072143554687, 0.011067808151245117, 0.011442560195922852, 0.01150592041015625, 0.011335328102111816, 0.011306943893432618, 0.011147071838378906, 0.011100159645080567, 0.010934528350830077, 0.010952704429626465, 0.011068639755249023, 0.010965375900268555, 0.010866751670837402, 0.011052415847778321, 0.011061759948730468, 0.010787296295166015, 0.010821632385253906, 0.01073305606842041, 0.010627584457397461, 0.010682368278503418, 0.01123472023010254, 0.011379487991333008, 0.011349823951721192, 0.011260064125061035, 0.011258943557739258, 0.01130780792236328, 0.011544575691223144, 0.011243776321411133, 0.011101951599121094, 0.011242783546447754, 0.01134665584564209, 0.01136844825744629, 0.011286175727844239, 0.01112508773803711, 0.0108721923828125, 0.010924448013305664, 0.010645824432373047, 0.010887104034423828, 0.010950655937194824, 0.011018207550048828, 0.011208736419677734, 0.011189663887023926, 0.011245632171630859, 0.01142630386352539, 0.011228927612304688, 0.011171104431152344, 0.011117568016052246, 0.011016192436218262, 0.011216287612915038, 0.011459168434143066, 0.011472000122070312, 0.011723135948181152, 0.01140287971496582, 0.011362624168395997, 0.011343487739562988, 0.011295680046081544, 0.011384736061096192, 0.011240703582763672, 0.011266912460327148, 0.011290016174316407, 0.01105731201171875, 0.011125280380249023, 0.011456352233886719, 0.012119359970092773, 0.013842623710632324, 0.011282560348510742, 0.010936448097229004, 0.010832192420959473, 0.010807295799255372, 0.010979104042053222, 0.011178208351135253, 0.011069439888000488, 0.010944512367248535, 0.01096291160583496, 0.01108137607574463, 0.011225024223327636, 0.01112723159790039, 0.01107148838043213, 0.011108448028564453, 0.01094883155822754, 0.010983424186706543, 0.011296544075012207, 0.011419551849365234, 0.011460576057434082, 0.011364383697509766, 0.011425248146057129, 0.011336511611938476, 0.011235039710998535, 0.011241472244262696, 0.011183648109436036, 0.011020959854125977, 0.011261247634887695, 0.011479552268981934, 0.011475008010864257, 0.012947392463684083, 0.012400959968566895, 0.01123078441619873, 0.011155232429504395, 0.011171199798583985, 0.011152480125427246, 0.011137951850891113, 0.011108223915100097, 0.011312416076660156, 0.011361056327819825, 0.011227519989013671, 0.011253439903259277, 0.011395071983337402, 0.011228896141052247, 0.010987808227539062, 0.0109683837890625, 0.010974911689758301, 0.011014431953430175, 0.011070176124572754, 0.011122688293457032, 0.011214912414550781, 0.011094112396240235, 0.011091103553771972, 0.01101484775543213, 0.010966272354125976, 0.011159584045410157, 0.01137235164642334, 0.011465023994445801, 0.011530879974365235, 0.01153657627105713, 0.011378111839294434, 0.011411808013916015, 0.011200480461120606, 0.011111712455749512, 0.011121472358703614, 0.010887200355529786, 0.010950559616088868, 0.011131168365478516, 0.011090911865234376, 0.011148032188415527, 0.01117199993133545, 0.011201696395874023, 0.011233983993530273, 0.011408672332763672, 0.011634464263916016, 0.011401408195495606, 0.01137331199645996, 0.011298208236694337, 0.011332159996032714, 0.011511839866638184, 0.011273568153381347, 0.011416223526000977, 0.011294719696044921, 0.011350048065185548, 0.011157343864440918, 0.011272319793701172, 0.011065119743347169, 0.010928352355957032, 0.010923295974731446, 0.010938431739807129, 0.01119324779510498, 0.011082880020141602, 0.011051872253417968, 0.01114038372039795, 0.011309568405151366, 0.010903136253356933, 0.010908063888549804, 0.01115116786956787]",tokens/s,90.1229506284307,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.472064,14274.199552,0.0,13878.951936,13865.632768,s,1,7.765259765625,7.765259765625,0.0,7.765259765625,7.765259765625,7.765259765625,7.765259765625,[7.765259765625],,kWh,1.3779381262509106e-05,1.4764872093362495e-06,7.662228351995592e-06,2.2918096823840947e-05,,MB,1187.766272,14697.824256,0.0,14289.993728,14241.298944,s,10,1.9902919158935548,0.19902919158935545,0.005375349645325746,0.20046517944335937,0.20273565673828123,0.20393392181396483,0.2048925338745117,"[0.18456034851074218, 0.19604205322265625, 0.19803517150878908, 0.1994424591064453, 0.2007923126220703, 0.20222332763671874, 0.20013804626464843, 0.20246937561035155, 0.20513218688964843, 0.20145663452148438]",tokens/s,1286.2434799423236,kWh,5.892661629166924e-06,6.496177779984675e-07,3.9293142545601675e-06,1.047159366172556e-05,tokens/kWh,24447090.697922964,MB,1210.658816,14865.596416,0.0,14457.765888,14413.156352,s,10,41.13065380859375,4.113065380859375,0.008199940178961002,4.114877197265625,4.122566162109375,4.123821899414062,4.124826489257813,"[4.101380859375, 4.10262841796875, 4.10452734375, 4.10723095703125, 4.11317529296875, 4.1165791015625, 4.11658154296875, 4.121185546875, 4.122287109375, 4.12507763671875]",tokens/s,15.317043170083748,kWh,0.00012011495879833228,1.3249209875758396e-05,7.958091922023958e-05,0.00021294508789433027,tokens/kWh,295850.9192344577,,s,630,41.126564216613794,0.06528026066129171,0.00045338844554255027,0.06525511932373046,0.06575869522094727,0.06594753646850586,0.06722484588623047,"[0.06688768005371094, 0.0653589096069336, 0.06472592163085937, 0.0646266860961914, 0.06463184356689453, 0.06459696197509765, 0.06469532775878906, 0.06461334228515625, 0.06452194976806641, 0.0645348129272461, 0.06508707427978516, 0.06486466979980468, 0.06461350250244141, 0.06462886047363281, 0.06497321319580078, 0.0652353286743164, 0.06527587127685547, 0.065017822265625, 0.06500313568115235, 0.06471926116943359, 0.06495184326171875, 0.06478665924072266, 0.06488703918457031, 0.0648458251953125, 0.06476387023925781, 0.06487042999267578, 0.06487654113769531, 0.06481430053710938, 0.06491593933105469, 0.06508985900878907, 0.06543974304199218, 0.0653148193359375, 0.06519344329833984, 0.06514332580566407, 0.06540220642089843, 0.06521878051757812, 0.06534803009033203, 0.06488880157470703, 0.06495030212402343, 0.06569369506835937, 0.06514482879638672, 0.06507724761962891, 0.06527356719970703, 0.06526595306396485, 0.06537529754638671, 0.06530758666992187, 0.06532918548583984, 0.06528809356689454, 0.06539884948730469, 0.06524720001220703, 0.06525257873535156, 0.06532908630371094, 0.06523792266845703, 0.0654130859375, 0.06503977966308594, 0.06519667053222657, 0.065091552734375, 0.0650785903930664, 0.06526412963867187, 0.06522393798828124, 0.06533010864257813, 0.06534143829345704, 0.0655376968383789, 0.06706604766845703, 0.0655789794921875, 0.06449359893798828, 0.06445193481445312, 0.06458777618408203, 0.06454319763183594, 0.0646362533569336, 0.06454262542724609, 0.06444127655029297, 0.06461849975585937, 0.06464511871337891, 0.06460415649414063, 0.06521218872070313, 0.06493001556396484, 0.06509772491455078, 0.06539266967773437, 0.06538851165771484, 0.06515302276611327, 0.06483660888671874, 0.06467276763916016, 0.06472492980957031, 0.06475167846679687, 0.06474931335449219, 0.06476534271240235, 0.0649277114868164, 0.06512115478515625, 0.06497689819335938, 0.06477401733398437, 0.06499894714355468, 0.0650451202392578, 0.06520829010009765, 0.06542131042480469, 0.06580429077148438, 0.06508748626708985, 0.06525468444824219, 0.06502473449707032, 0.06510591888427734, 0.0649233627319336, 0.06504889678955078, 0.06507929229736328, 0.06504035186767577, 0.06524269104003906, 0.06511660766601562, 0.06510358428955078, 0.0651833953857422, 0.06536851501464844, 0.06554803466796875, 0.06540684509277343, 0.0655038070678711, 0.06531276702880859, 0.06527740478515626, 0.06528797149658203, 0.06524390411376953, 0.06513452911376953, 0.06530368041992188, 0.06523385620117188, 0.0651878433227539, 0.065506591796875, 0.06527980804443359, 0.06539907073974609, 0.06554483032226563, 0.0657940444946289, 0.06553600311279296, 0.06737567901611329, 0.06539405059814453, 0.06488127899169922, 0.0645711669921875, 0.0648911361694336, 0.06461030578613282, 0.0646121597290039, 0.06468624114990235, 0.0646176986694336, 0.06475615692138671, 0.06463299560546874, 0.0644935073852539, 0.06460838317871094, 0.06470873260498047, 0.06521446228027344, 0.06551760101318359, 0.06529430389404296, 0.06512985229492188, 0.0649591064453125, 0.06519308471679687, 0.06478265380859374, 0.06483411407470703, 0.06483763122558593, 0.06497478485107422, 0.0647496337890625, 0.06488835144042969, 0.06473571014404297, 0.06473113250732422, 0.06510797119140625, 0.06515007781982422, 0.06530258941650391, 0.06550611114501953, 0.06550732421875, 0.06520809936523438, 0.06514089965820312, 0.06483116912841796, 0.06506739044189454, 0.06518169403076172, 0.06491340637207031, 0.06493593597412109, 0.06508099365234375, 0.06481308746337891, 0.06491986846923828, 0.0650890884399414, 0.06516297912597656, 0.0655101089477539, 0.06561532592773438, 0.06540956878662109, 0.06574479675292969, 0.06530262756347656, 0.06517056274414063, 0.06522073364257812, 0.06521910095214843, 0.06528594970703125, 0.06535004425048828, 0.06526499176025391, 0.06539126586914062, 0.06515711975097656, 0.06522390747070313, 0.0655565414428711, 0.0665013427734375, 0.06575862121582031, 0.06585814666748047, 0.06715280151367188, 0.06544179534912109, 0.06503628540039062, 0.06464102172851563, 0.0646737289428711, 0.06466764831542969, 0.06465340423583985, 0.06470150756835938, 0.06468685150146485, 0.06462592315673828, 0.06469110107421874, 0.06471820831298829, 0.06480140686035156, 0.06497702026367187, 0.06532406616210938, 0.06555734252929687, 0.06548252868652343, 0.06522207641601563, 0.06510262298583984, 0.06478230285644532, 0.06479440307617187, 0.06468019104003907, 0.06480076599121094, 0.0648740463256836, 0.06481881713867188, 0.06490767669677734, 0.06485443115234375, 0.06513664245605469, 0.06508287811279297, 0.06518985748291016, 0.06558979034423829, 0.06535987091064453, 0.0654028778076172, 0.06580633544921875, 0.0653985595703125, 0.06500784301757813, 0.06503823852539062, 0.06486605072021484, 0.06498544311523438, 0.06499132537841797, 0.06516726684570312, 0.06509363555908203, 0.06518169403076172, 0.06511980438232422, 0.06542176055908203, 0.06553314971923828, 0.06557366180419921, 0.06539398193359375, 0.065635009765625, 0.06539059448242188, 0.06529222106933594, 0.06523091125488281, 0.0652465591430664, 0.06516598510742187, 0.0652262420654297, 0.06542582702636719, 0.06529430389404296, 0.06525555419921875, 0.06548617553710938, 0.06556237030029297, 0.06580521392822265, 0.06560972595214844, 0.06624050903320312, 0.06744220733642578, 0.06557148742675781, 0.06496630096435548, 0.0646654052734375, 0.06454496002197266, 0.06466012573242187, 0.06463203430175782, 0.06473369598388672, 0.06467929840087891, 0.06479964447021484, 0.06580633544921875, 0.06494617462158203, 0.06483753967285157, 0.06493193817138672, 0.06500064086914062, 0.06536585235595703, 0.06531747436523437, 0.06526374053955078, 0.06513279724121093, 0.06482943725585938, 0.06487245178222656, 0.06498099517822266, 0.06481919860839844, 0.06488473510742188, 0.0649318389892578, 0.06490716552734375, 0.06483891296386719, 0.06512521362304688, 0.06495027160644531, 0.0652735366821289, 0.06539910125732422, 0.06547014617919922, 0.06551570892333984, 0.06560781097412109, 0.06541311645507812, 0.06504857635498047, 0.06523280334472656, 0.06510128021240234, 0.06515776062011719, 0.0652081298828125, 0.06574713897705078, 0.06526361846923828, 0.0653755874633789, 0.06538009643554688, 0.06562438201904297, 0.06562060546875, 0.06575305938720703, 0.06556671905517578, 0.06570393371582031, 0.06556192016601563, 0.0653790054321289, 0.06529369354248046, 0.06561219024658203, 0.06528227233886719, 0.06577356719970703, 0.0653864974975586, 0.06535753631591797, 0.06555471801757813, 0.06574694061279297, 0.06564371490478516, 0.06572319793701172, 0.06572637176513672, 0.0657652130126953, 0.06747750091552734, 0.06561996459960938, 0.06490876770019531, 0.06489888000488281, 0.06483017730712891, 0.06473932647705079, 0.06481510162353515, 0.06484786987304687, 0.06470188903808594, 0.06469058990478516, 0.06496012878417969, 0.06480732727050781, 0.06488451385498047, 0.06496806335449219, 0.06563645172119141, 0.06619840240478515, 0.06566502380371093, 0.06528585815429687, 0.0651328353881836, 0.06496988677978516, 0.06477664184570313, 0.06488105773925781, 0.06492070770263672, 0.0649716796875, 0.06507068634033203, 0.06515545654296875, 0.06492313385009765, 0.06508780670166016, 0.06519417572021484, 0.06551065826416015, 0.06601321411132813, 0.06557360076904296, 0.06541926574707031, 0.06537014770507812, 0.06519350433349609, 0.06511251068115234, 0.06536809539794922, 0.065244384765625, 0.06512102508544922, 0.06525718688964843, 0.06529164886474609, 0.06518415832519531, 0.06531737518310547, 0.06520012664794922, 0.06549504089355469, 0.06563024139404297, 0.06569945526123047, 0.06567155456542968, 0.06583293151855468, 0.0653680648803711, 0.06572646331787109, 0.065617919921875, 0.0655257568359375, 0.06546227264404297, 0.06581247711181641, 0.06568946838378906, 0.06548492431640625, 0.06545986938476563, 0.06546435546875, 0.06545030212402343, 0.06598451232910156, 0.06579750061035156, 0.06576396942138672, 0.0672542724609375, 0.0656096954345703, 0.06496192169189453, 0.06472767639160157, 0.06465126037597656, 0.06480451202392579, 0.06479087829589844, 0.06474092864990234, 0.06479837036132813, 0.06467174530029297, 0.06484166717529297, 0.06471670532226563, 0.06477426910400391, 0.06474396514892578, 0.06559734344482422, 0.065474365234375, 0.06551404571533204, 0.0652452163696289, 0.0653967056274414, 0.06493132781982422, 0.0651514892578125, 0.06501286315917969, 0.06515532684326172, 0.0650244140625, 0.06516553497314453, 0.06494822692871094, 0.06515449523925781, 0.0650505599975586, 0.06535395050048828, 0.06574956512451172, 0.0657113265991211, 0.06560157012939453, 0.06572707366943359, 0.06553097534179687, 0.06549801635742188, 0.0652943344116211, 0.06530035400390626, 0.06513062286376953, 0.06535686492919922, 0.06528070068359375, 0.06540438079833985, 0.06518863677978516, 0.06537792205810547, 0.06551181030273437, 0.06573868560791016, 0.06567123413085937, 0.06559334564208984, 0.06573670196533203, 0.06581238555908203, 0.06545801544189453, 0.06527922821044922, 0.06514176177978516, 0.06545315551757812, 0.0652747802734375, 0.06555033874511719, 0.06536969757080079, 0.0655294418334961, 0.06544255828857422, 0.06558060455322266, 0.06575081634521485, 0.0658746566772461, 0.06610908508300781, 0.0657899169921875, 0.06775190734863282, 0.06590262603759765, 0.06522672271728516, 0.06472499084472656, 0.064753662109375, 0.06484130859375, 0.06490902709960937, 0.06479328155517577, 0.06489907073974609, 0.06527177429199219, 0.06489295959472656, 0.06478972625732422, 0.06481158447265625, 0.06518169403076172, 0.06533519744873047, 0.0654830093383789, 0.06574457550048828, 0.06567769622802734, 0.06541417694091797, 0.06509164428710937, 0.06494636535644531, 0.0650159683227539, 0.06520480346679687, 0.06508544158935547, 0.06519580841064453, 0.06513072204589844, 0.0650505599975586, 0.06538041687011718, 0.06518716430664062, 0.06530319976806641, 0.06554217529296875, 0.0657828140258789, 0.06559142303466797, 0.06582701110839843, 0.0653359375, 0.06535987091064453, 0.06640435028076172, 0.06528147125244141, 0.06522528076171875, 0.06535561370849609, 0.06511756896972656, 0.06566172790527344, 0.06533529663085938, 0.06544300842285156, 0.06554300689697265, 0.06544380950927735, 0.06611148834228515, 0.06583881378173828, 0.06558134460449219, 0.06561795043945312, 0.06554569244384766, 0.06561023712158204, 0.06540624237060547, 0.06537904357910156, 0.06547049713134766, 0.06545760345458984, 0.06544233703613281, 0.06537830352783203, 0.06542950439453125, 0.06567526245117188, 0.0656527328491211, 0.06594319915771485, 0.06604428863525391, 0.0673634262084961, 0.06581206512451172, 0.06515158081054688, 0.06481100463867187, 0.06483708953857421, 0.06468617248535156, 0.06494214630126953, 0.06487197113037109, 0.06490402984619141, 0.0654233627319336, 0.06484105682373047, 0.06487110137939453, 0.06503347015380859, 0.06497917175292969, 0.0651817626953125, 0.06543389129638671, 0.0657020492553711, 0.06557449340820312, 0.06529065704345703, 0.06503628540039062, 0.064997314453125, 0.06498297882080079, 0.0650466537475586, 0.06523289489746094, 0.06522182464599609, 0.06523372650146485, 0.06532278442382812, 0.0650071029663086, 0.06498377227783203, 0.0655175323486328, 0.06573638153076172, 0.06569760131835937, 0.06574748992919922, 0.06573465728759766, 0.06554214477539062, 0.06538444519042969, 0.06534963226318359, 0.0651325454711914, 0.06543292999267578, 0.06554051208496094, 0.06532470703125, 0.0654546890258789, 0.06535987091064453, 0.06573465728759766, 0.06547993469238281, 0.06552243041992188, 0.06599593353271484, 0.06598278045654297, 0.06605007934570313, 0.06588262176513672, 0.06598860931396484, 0.06574899291992188, 0.06547660827636718, 0.06551532745361328, 0.06557328033447266, 0.0655398712158203, 0.06539469146728516, 0.06532466888427735, 0.06553826904296875, 0.06557097625732422, 0.06604393768310547, 0.0657735366821289, 0.06596598052978515, 0.06756524658203125, 0.06577455902099609, 0.0651182098388672, 0.06502371215820313, 0.0649136962890625, 0.06488790130615234, 0.06487133026123047, 0.065074462890625, 0.06495919799804688, 0.06495846557617188, 0.06493798065185546, 0.06492774200439454, 0.06509158325195312, 0.06503977966308594, 0.06561251068115234, 0.06659059143066406, 0.06583296203613281, 0.065512451171875, 0.06530764770507813, 0.06511366271972656, 0.06510636901855468, 0.0654028778076172, 0.06491913604736328, 0.06517504119873047, 0.06513062286376953, 0.06566095733642578, 0.06507997131347656, 0.06512387084960937, 0.06542189025878906, 0.06552783966064453, 0.06614422607421876, 0.06560959625244141, 0.06575936126708984, 0.0655946273803711, 0.06543846130371093, 0.06510387420654297, 0.06534143829345704, 0.0652943344116211, 0.06520003509521484, 0.06519391632080078, 0.0654109115600586, 0.06519609832763672, 0.06540927886962891, 0.06536713409423828, 0.06594239807128906, 0.06594489288330078, 0.06595855712890625, 0.06581584167480468, 0.06581123352050781, 0.06551142120361328, 0.06539059448242188, 0.0654172134399414, 0.06541516876220703, 0.06560562896728515, 0.06551145935058594, 0.0654620132446289, 0.06562019348144531, 0.06601113891601562, 0.06582886505126953, 0.06568345642089844, 0.06599680328369141, 0.06605619049072266, 0.06594969940185547]",tokens/s,15.318566284355471,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 891, in __init__ self.model = OPTModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 823, in __init__ self.decoder = OPTDecoder(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in __init__ self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 606, in self.layers = nn.ModuleList([OPTDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 374, in __init__ self.self_attn = OPT_ATTENTION_CLASSES[config._attn_implementation](config=config, is_decoder=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/opt/modeling_opt.py"", line 116, in __init__ self.v_proj = nn.Linear(self.embed_dim, self.embed_dim, bias=self.enable_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 120495 has 14.73 GiB memory in use. Of the allocated memory 14.61 GiB is allocated by PyTorch, and 3.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,783.03232,14129.496064,0.0,13734.248448,13728.777216,s,1,7.25836767578125,7.25836767578125,0.0,7.25836767578125,7.25836767578125,7.25836767578125,7.25836767578125,[7.25836767578125],,kWh,6.925197970815589e-06,7.436715488168721e-07,3.4991694660008177e-06,1.1168038985633278e-05,,MB,1233.65376,14144.176128,0.0,13736.3456,13487.53408,s,10,1.6703685150146486,0.16703685150146486,0.004848025152046232,0.166406494140625,0.1731939682006836,0.17395778579711915,0.17456883987426758,"[0.1601844482421875, 0.16687123107910157, 0.16592703247070312, 0.16594175720214843, 0.1653955841064453, 0.17062611389160157, 0.17302423095703126, 0.16906687927246095, 0.1747216033935547, 0.15860963439941406]",tokens/s,1532.5959373567034,kWh,4.842775066940069e-06,5.340705179180566e-07,3.2104898179343525e-06,8.587335402792478e-06,tokens/kWh,29811342.86623444,MB,1266.315264,14144.176128,0.0,13736.3456,13661.262848,s,10,38.00502197265625,3.800502197265625,0.0021377293039880367,3.8004615478515626,3.802883837890625,3.8029917236328123,3.8030780322265625,"[3.797611572265625, 3.799089599609375, 3.79715478515625, 3.800767578125, 3.7989755859375, 3.802486083984375, 3.80285986328125, 3.800155517578125, 3.803099609375, 3.80282177734375]",tokens/s,16.5767566310913,kWh,0.00011113897474181071,1.225885846887993e-05,7.404023864926565e-05,0.00019743807185995625,tokens/kWh,319087.3948803866,,s,630,38.00184012985231,0.06032038115849571,0.00030563377178528095,0.06028327941894531,0.060649496841430665,0.06078943614959717,0.06154451679229736,"[0.061326976776123046, 0.06069696044921875, 0.05997571182250976, 0.05996540832519531, 0.05980716705322266, 0.05988336181640625, 0.05981798553466797, 0.059936576843261716, 0.05995203018188477, 0.06001049423217773, 0.06030092620849609, 0.060076446533203126, 0.06005942535400391, 0.06000864028930664, 0.05992784118652344, 0.060072158813476564, 0.06029129409790039, 0.060499969482421874, 0.060373279571533205, 0.06006143951416015, 0.060006656646728516, 0.05996748733520508, 0.059934688568115235, 0.060106273651123046, 0.060013057708740235, 0.060053150177001954, 0.059961151123046875, 0.05996723175048828, 0.060147552490234374, 0.06008329772949219, 0.060262462615966794, 0.060464927673339844, 0.060391422271728515, 0.06104643249511719, 0.060432415008544925, 0.06025564956665039, 0.06032681655883789, 0.06093619155883789, 0.060047359466552735, 0.06011423873901367, 0.060121791839599606, 0.060200065612792966, 0.06007628631591797, 0.06010275268554687, 0.06027727890014648, 0.06037664031982422, 0.060262847900390625, 0.06050406265258789, 0.06056735992431641, 0.06038256072998047, 0.060424190521240234, 0.060897537231445316, 0.060754081726074216, 0.06042464065551758, 0.060372577667236325, 0.0603939208984375, 0.060385246276855466, 0.06107353591918945, 0.060340255737304685, 0.060596065521240236, 0.06022553634643555, 0.060540351867675785, 0.0604218864440918, 0.061515777587890626, 0.060631038665771485, 0.06007807922363281, 0.06035865783691406, 0.0599818229675293, 0.05992451095581055, 0.05990979385375977, 0.05995100784301758, 0.06000051116943359, 0.05997395324707031, 0.059983711242675784, 0.060208480834960935, 0.06007875061035156, 0.06015180969238281, 0.06005347061157226, 0.060295230865478514, 0.06056547164916992, 0.06062694549560547, 0.06047110366821289, 0.06025558471679687, 0.060257118225097654, 0.06012527847290039, 0.05998729705810547, 0.059963424682617186, 0.06005574417114258, 0.06000060653686524, 0.05994249725341797, 0.06014812850952148, 0.06028076934814453, 0.06040582275390625, 0.06037116622924805, 0.06022121429443359, 0.06073747253417969, 0.060631168365478515, 0.060424129486083986, 0.060487873077392576, 0.06032729721069336, 0.06042259216308594, 0.06015375900268555, 0.06018182373046875, 0.06021200180053711, 0.060558719635009764, 0.06020156860351562, 0.06027676773071289, 0.06017228698730469, 0.06028054428100586, 0.06040518569946289, 0.0605030403137207, 0.060393310546875, 0.06059132766723633, 0.06043084716796875, 0.060631072998046875, 0.060323776245117186, 0.06040198516845703, 0.06034966278076172, 0.06037190246582031, 0.06047711944580078, 0.06032400131225586, 0.06036479949951172, 0.060298942565917966, 0.06029708862304688, 0.060413921356201175, 0.06036332702636719, 0.0618741455078125, 0.06078464126586914, 0.06020297622680664, 0.05995219039916992, 0.05990291213989258, 0.059876384735107424, 0.05986812973022461, 0.059939903259277345, 0.05986358261108399, 0.059967456817626955, 0.0599617919921875, 0.06001663970947266, 0.059908096313476565, 0.0599736328125, 0.06014976119995117, 0.06037641525268555, 0.06065430450439453, 0.06062278366088867, 0.060490848541259766, 0.06039440155029297, 0.06016022491455078, 0.060071712493896486, 0.05994316864013672, 0.06003244781494141, 0.0599513931274414, 0.060032863616943356, 0.059991233825683596, 0.060148735046386716, 0.060080127716064455, 0.05994035339355469, 0.05990655899047852, 0.05999411010742187, 0.06056675338745117, 0.060693023681640625, 0.06043423843383789, 0.06067868804931641, 0.060419361114501954, 0.060299903869628906, 0.06020505523681641, 0.06013558578491211, 0.06008816146850586, 0.060227489471435545, 0.06002495956420899, 0.060098529815673825, 0.060493824005126956, 0.06043033599853516, 0.06028083038330078, 0.06022553634643555, 0.06056537628173828, 0.06067007827758789, 0.06062080001831055, 0.060568737030029296, 0.060599136352539065, 0.06054502487182617, 0.060374942779541016, 0.06028294372558594, 0.060323871612548825, 0.06033772659301758, 0.06032223892211914, 0.060232929229736325, 0.060427040100097654, 0.06044569778442383, 0.06020764923095703, 0.06155625534057617, 0.061092353820800784, 0.06032179260253906, 0.06014710235595703, 0.059920543670654296, 0.05999660873413086, 0.05989785766601562, 0.060015743255615234, 0.05998665618896484, 0.060055038452148435, 0.05995289611816406, 0.060048286437988284, 0.05997286224365234, 0.05991913604736328, 0.06003859329223633, 0.0602542724609375, 0.06045708847045898, 0.060729694366455075, 0.060424190521240234, 0.060993438720703126, 0.06012527847290039, 0.06010879898071289, 0.06005350494384765, 0.05999788665771484, 0.0600162239074707, 0.06010902404785156, 0.06046160125732422, 0.06007804870605469, 0.060295169830322265, 0.06006784057617188, 0.06009833526611328, 0.060174560546875, 0.0604417610168457, 0.060513118743896484, 0.060485633850097656, 0.06049782562255859, 0.06043247985839844, 0.060227615356445316, 0.060327552795410154, 0.06021696090698242, 0.060152542114257815, 0.06027814483642578, 0.06017254257202148, 0.060307838439941405, 0.06029305648803711, 0.06034233474731445, 0.060268543243408204, 0.06033926391601562, 0.060427200317382815, 0.0610447998046875, 0.06079654312133789, 0.06068563079833984, 0.060521472930908204, 0.06053273773193359, 0.060329216003417965, 0.060311649322509764, 0.060578464508056644, 0.06047244644165039, 0.06027523040771485, 0.06033964920043945, 0.060555614471435544, 0.06043910217285156, 0.060456031799316405, 0.06150627136230469, 0.06099763107299805, 0.060053665161132815, 0.06001571273803711, 0.059773696899414065, 0.05990195083618164, 0.05993881607055664, 0.06005759811401367, 0.06002687835693359, 0.0600203857421875, 0.05996121597290039, 0.06011747360229492, 0.05999526214599609, 0.06005583953857422, 0.05994124984741211, 0.060276065826416016, 0.060633983612060544, 0.060813312530517576, 0.06053241729736328, 0.06038060760498047, 0.06030185699462891, 0.06018476867675781, 0.060106239318847655, 0.060117504119873044, 0.06018473434448242, 0.06010060882568359, 0.06004073715209961, 0.06021356964111328, 0.06007814407348633, 0.06029321670532226, 0.059998046875, 0.060209312438964845, 0.06049286270141602, 0.06042031860351563, 0.06063955307006836, 0.06046556854248047, 0.060352512359619144, 0.060422080993652344, 0.06022355270385742, 0.060180225372314454, 0.06010889434814453, 0.060264606475830075, 0.06019891357421875, 0.06016409683227539, 0.06035625457763672, 0.06031600189208984, 0.060230911254882814, 0.0602426872253418, 0.06028902435302735, 0.060620288848876956, 0.06070867156982422, 0.06053897476196289, 0.06053481674194336, 0.06048416137695312, 0.06048166275024414, 0.06031961441040039, 0.060391422271728515, 0.06028902435302735, 0.06026444625854492, 0.06050559997558594, 0.06041446304321289, 0.060421695709228514, 0.06045251083374024, 0.06179651260375976, 0.06098108673095703, 0.060186752319335936, 0.0600761604309082, 0.05995916748046875, 0.05996892929077149, 0.05996384048461914, 0.06009190368652344, 0.05988828659057617, 0.06007731246948242, 0.06020377731323242, 0.0600002555847168, 0.060109088897705075, 0.060383041381835936, 0.060304286956787106, 0.06044569778442383, 0.06067609786987305, 0.06065347290039062, 0.06046432113647461, 0.060107681274414064, 0.05999980926513672, 0.06012895965576172, 0.059973983764648436, 0.06010416030883789, 0.06013174438476562, 0.060076576232910156, 0.06004326248168945, 0.060112895965576174, 0.060101856231689454, 0.06013555145263672, 0.06015046310424805, 0.06026031875610351, 0.06028902435302735, 0.06049587249755859, 0.06037299346923828, 0.06079257583618164, 0.060610752105712894, 0.06025632095336914, 0.06020710372924805, 0.06030950546264648, 0.06028214263916016, 0.060283615112304685, 0.06027171325683594, 0.06032681655883789, 0.06025625610351563, 0.06029919815063477, 0.06027833557128906, 0.0602874870300293, 0.060443809509277344, 0.060670814514160155, 0.06046105575561524, 0.06076620864868164, 0.06061676788330078, 0.06084396743774414, 0.060647422790527344, 0.06069465637207031, 0.06043648147583008, 0.06031756973266601, 0.060286975860595705, 0.06047334289550781, 0.060405086517333985, 0.06112435150146484, 0.06074857711791992, 0.0616860466003418, 0.06076870346069336, 0.06042313766479492, 0.06029747009277344, 0.06007270431518555, 0.06005750274658203, 0.05993203353881836, 0.060072288513183594, 0.0600682258605957, 0.060303359985351565, 0.06018060684204102, 0.060237697601318356, 0.06009036636352539, 0.060194206237792966, 0.060190654754638674, 0.060502689361572264, 0.06069638442993164, 0.06068428802490235, 0.06088857650756836, 0.060452960968017576, 0.06030809783935547, 0.0602562255859375, 0.060203006744384766, 0.06045455932617187, 0.06017020797729492, 0.060302974700927735, 0.06015663909912109, 0.06016412734985352, 0.06018396759033203, 0.06010319900512695, 0.06012115097045898, 0.06032179260253906, 0.06041961669921875, 0.060631423950195315, 0.060490848541259766, 0.060619518280029296, 0.0605228157043457, 0.060516094207763674, 0.06028044891357422, 0.06021791839599609, 0.06017622375488281, 0.060264606475830075, 0.060139488220214844, 0.06015798568725586, 0.06014511871337891, 0.06025455856323242, 0.06033833694458008, 0.06055081558227539, 0.06061094284057617, 0.060639232635498044, 0.06047129440307617, 0.060636863708496094, 0.060427776336669924, 0.06058844757080078, 0.06041846466064453, 0.06029312133789062, 0.06035811233520508, 0.060381729125976565, 0.060284927368164064, 0.06031689453125, 0.060211360931396486, 0.0603570556640625, 0.06030329513549805, 0.061901023864746094, 0.06087286376953125, 0.060235774993896485, 0.060104705810546874, 0.05997532653808594, 0.0598900146484375, 0.05985798263549805, 0.05990700912475586, 0.05997568130493164, 0.05996966552734375, 0.06007596969604492, 0.05998140716552734, 0.05983881759643555, 0.05992668914794922, 0.06003884887695313, 0.060225662231445314, 0.06036896133422852, 0.06071292877197266, 0.06051839828491211, 0.06043843078613281, 0.06033939361572266, 0.060148639678955076, 0.06007398223876953, 0.06003507232666016, 0.06001996612548828, 0.060152576446533206, 0.060037025451660155, 0.060137214660644533, 0.060094497680664063, 0.06005702209472656, 0.06020761489868164, 0.06024435043334961, 0.060385280609130856, 0.06054899215698242, 0.06148896026611328, 0.06064972686767578, 0.06059014511108399, 0.06035359954833985, 0.060272575378417965, 0.06018361663818359, 0.060100543975830076, 0.060155742645263674, 0.06021075057983399, 0.060170623779296876, 0.060131553649902345, 0.060262401580810546, 0.060364864349365235, 0.060899040222167966, 0.06054115295410156, 0.060474655151367185, 0.06082857513427734, 0.06068204879760742, 0.06057347106933594, 0.0604420166015625, 0.060558143615722655, 0.060483585357666014, 0.06034451293945312, 0.060295135498046874, 0.06027251052856445, 0.06041759872436524, 0.060281246185302735, 0.06026342391967773, 0.06026953506469727, 0.061730911254882816, 0.06078559875488281, 0.06014575958251953, 0.060164192199707034, 0.06052534484863281, 0.06007398223876953, 0.05998096084594726, 0.06006256103515625, 0.059998207092285157, 0.06002483367919922, 0.05997568130493164, 0.060065406799316406, 0.05996582412719727, 0.0599920654296875, 0.06007523345947265, 0.06019971084594727, 0.06062195205688477, 0.06090243148803711, 0.060652801513671875, 0.06039206314086914, 0.06018435287475586, 0.060527870178222656, 0.060549598693847656, 0.06008009719848633, 0.06015830230712891, 0.0601723518371582, 0.0601396484375, 0.05996335983276367, 0.060086177825927733, 0.06059535980224609, 0.06118700790405274, 0.06032137680053711, 0.06042665481567383, 0.06059212875366211, 0.06062694549560547, 0.06052793502807617, 0.06049862289428711, 0.06041996765136719, 0.06046326446533203, 0.060247295379638674, 0.06031824111938477, 0.06018694305419922, 0.06018873596191406, 0.06020281600952149, 0.06018048095703125, 0.06031507110595703, 0.06025484848022461, 0.060327232360839846, 0.060488319396972655, 0.0606453742980957, 0.06057350540161133, 0.06069676971435547, 0.060649471282958986, 0.06050611114501953, 0.06034431838989258, 0.060388481140136716, 0.060252254486083984, 0.060384033203125, 0.06021104049682617, 0.060536991119384764, 0.06037680053710937, 0.06032592010498047, 0.06034159851074219, 0.06159977722167969, 0.060993793487548825, 0.060229633331298826, 0.06056963348388672, 0.06005539321899414, 0.06010617446899414, 0.06025900650024414, 0.06004518508911133, 0.05993689727783203, 0.06004076766967773, 0.060033344268798826, 0.06003315353393555, 0.05996291351318359, 0.060087936401367184, 0.06008627319335937, 0.06023072052001953, 0.060630977630615236, 0.06065955352783203, 0.06061670303344727, 0.06045491027832031, 0.06027017593383789, 0.06003750228881836, 0.06003299331665039, 0.06012944030761719, 0.06013123321533203, 0.06016963195800781, 0.06015615844726562, 0.060133216857910156, 0.06022809600830078, 0.06009846496582031, 0.06006179046630859, 0.06028287887573242, 0.06047334289550781, 0.06074367904663086, 0.06057984161376953, 0.06040563201904297, 0.06051036834716797, 0.06040105438232422, 0.06025888061523438, 0.06030873489379883, 0.06034009552001953, 0.06023667144775391, 0.06029094314575195, 0.06034649658203125, 0.06029087829589844, 0.060168384552001956, 0.060210975646972656, 0.06057187271118164, 0.06056755065917969, 0.06049782562255859, 0.06068000030517578, 0.06066204833984375, 0.06074367904663086, 0.060626785278320314, 0.06035676956176758, 0.06060031890869141, 0.06055267333984375, 0.060509822845458985, 0.060420543670654296, 0.06047808074951172, 0.06060435104370117, 0.06032374572753906, 0.06038735961914062]",tokens/s,16.578144580559517,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,868.18816,2698.903552,0.0,2296.38144,2202.20672,s,1,8.06414404296875,8.06414404296875,0.0,8.06414404296875,8.06414404296875,8.06414404296875,8.06414404296875,[8.06414404296875],,kWh,5.089702216658528e-06,5.538197825575577e-07,9.052785019947329e-07,6.548800501210819e-06,,MB,1331.462144,2797.469696,0.0,2382.364672,2267.889152,s,10,0.3403370513916016,0.03403370513916016,0.0006636908345610495,0.033882991790771484,0.03428664169311524,0.03512394409179687,0.035793786010742186,"[0.035961246490478514, 0.03345283126831055, 0.03388323211669922, 0.03396777725219727, 0.03373542404174805, 0.03388275146484375, 0.03372521591186523, 0.0337215347290039, 0.033906463623046876, 0.034100574493408205]",tokens/s,7521.955042897725,kWh,1.1608573576388102e-06,1.2793806167819527e-07,7.664721740237941e-07,2.055267593340799e-06,tokens/kWh,124557989.83521983,MB,1387.700224,2799.566848,0.0,2382.364672,2267.891712,s,10,14.763947631835936,1.4763947631835939,0.004473255126855131,1.47609033203125,1.4817590576171875,1.4821623657226561,1.4824850122070312,"[1.477118408203125, 1.482565673828125, 1.4800728759765625, 1.48166943359375, 1.47489697265625, 1.4802218017578126, 1.475062255859375, 1.4690135498046875, 1.4706085205078125, 1.4727181396484375]",tokens/s,42.671514130916606,kWh,4.309398517111364e-05,4.7529548965290245e-06,2.4215047811175857e-05,7.206198787881853e-05,tokens/kWh,874247.3230955352,,s,630,14.757545539855965,0.02342467546008882,0.0003265261539941664,0.023371791839599608,0.023652556228637697,0.023915282344818115,0.024804554405212405,"[0.023637184143066405, 0.023497472763061522, 0.023382240295410157, 0.02325503921508789, 0.023414783477783203, 0.023662752151489257, 0.023484256744384764, 0.023615007400512696, 0.023438880920410157, 0.02407110404968262, 0.023642303466796875, 0.023779359817504883, 0.02360300827026367, 0.023543935775756836, 0.024028383255004882, 0.02352601623535156, 0.02344550323486328, 0.02342483139038086, 0.02360870361328125, 0.023339967727661132, 0.023447423934936523, 0.02330169677734375, 0.02348431968688965, 0.023228767395019532, 0.023173215866088868, 0.023158527374267577, 0.023326751708984374, 0.023283967971801756, 0.02338617515563965, 0.02329599952697754, 0.023533472061157225, 0.02394940757751465, 0.025253952026367186, 0.02347327995300293, 0.023480607986450196, 0.023505407333374022, 0.02320796775817871, 0.023326656341552735, 0.023252639770507812, 0.023355039596557617, 0.02316979217529297, 0.023191839218139648, 0.02323344039916992, 0.023026399612426758, 0.023216192245483398, 0.023238048553466797, 0.023185375213623047, 0.023190176010131836, 0.023373823165893554, 0.023121568679809572, 0.023124319076538086, 0.02319331169128418, 0.02378112030029297, 0.02355990409851074, 0.02360198402404785, 0.02325299263000488, 0.023219648361206054, 0.023237375259399413, 0.02324460792541504, 0.023377727508544922, 0.02350105667114258, 0.023308223724365234, 0.023283327102661133, 0.02332480049133301, 0.023454463958740235, 0.023369728088378908, 0.023416831970214845, 0.023599103927612306, 0.02329599952697754, 0.023262720108032226, 0.023343616485595704, 0.023611391067504883, 0.023172384262084962, 0.02335817527770996, 0.023306495666503908, 0.02318720054626465, 0.02339561653137207, 0.024173280715942384, 0.023105728149414063, 0.023273279190063476, 0.023435264587402343, 0.02325433540344238, 0.023283872604370117, 0.02410086441040039, 0.02334364891052246, 0.023369728088378908, 0.023240480422973633, 0.02326527976989746, 0.023381919860839845, 0.023494976043701172, 0.02354380798339844, 0.023472127914428712, 0.02337596893310547, 0.02324675178527832, 0.023910272598266603, 0.023451776504516603, 0.02346598434448242, 0.02329395294189453, 0.023666688919067383, 0.023413951873779298, 0.023546688079833983, 0.023408639907836915, 0.023355295181274414, 0.023244895935058595, 0.02349785614013672, 0.02349555206298828, 0.023436479568481446, 0.023269344329833984, 0.023460063934326172, 0.023337600708007812, 0.023276832580566405, 0.023288543701171876, 0.023225664138793945, 0.023293727874755858, 0.023957855224609376, 0.02387334442138672, 0.023615264892578126, 0.023888959884643554, 0.023374944686889648, 0.0234192008972168, 0.024465824127197267, 0.023416767120361327, 0.02314371109008789, 0.02345871925354004, 0.02637001609802246, 0.02510553550720215, 0.023575616836547852, 0.023521120071411133, 0.02332694435119629, 0.023463008880615234, 0.023451871871948242, 0.023587295532226563, 0.023366943359375, 0.02329583930969238, 0.025109376907348633, 0.024831039428710937, 0.023458751678466797, 0.023328800201416016, 0.02337148857116699, 0.023378175735473634, 0.02332582473754883, 0.02335139274597168, 0.02323971176147461, 0.02325049591064453, 0.023310047149658203, 0.023175647735595703, 0.02324448013305664, 0.023443199157714843, 0.02324127960205078, 0.023391231536865235, 0.02355824089050293, 0.0236409912109375, 0.023240703582763672, 0.023397760391235353, 0.023407232284545897, 0.02364563179016113, 0.023464256286621094, 0.024059776306152345, 0.02354038429260254, 0.023539424896240235, 0.023576383590698243, 0.023279199600219725, 0.02355177688598633, 0.02342969512939453, 0.023351072311401367, 0.023262752532958984, 0.02355526351928711, 0.02316262435913086, 0.023203903198242188, 0.023217952728271485, 0.023287967681884767, 0.023287136077880858, 0.02355062484741211, 0.023633920669555664, 0.023523008346557617, 0.023442911148071288, 0.023449440002441407, 0.023400640487670897, 0.023751264572143556, 0.024180095672607423, 0.02357088088989258, 0.02344592094421387, 0.023364831924438476, 0.023312288284301756, 0.023380191802978515, 0.02327743911743164, 0.02332659149169922, 0.023435903549194337, 0.023689504623413085, 0.02345497512817383, 0.023458656311035157, 0.023244512557983397, 0.023248287200927736, 0.023243167877197265, 0.023207616806030274, 0.023132863998413085, 0.02326323127746582, 0.023603200912475586, 0.02352463912963867, 0.02341551971435547, 0.02330828857421875, 0.02356211280822754, 0.023171199798583984, 0.023179264068603517, 0.023228416442871092, 0.02354380798339844, 0.023431264877319335, 0.023428991317749025, 0.023313888549804686, 0.02326540756225586, 0.023404991149902344, 0.023314336776733398, 0.023328863143920898, 0.02326918411254883, 0.023298240661621093, 0.024377599716186523, 0.024581151962280272, 0.02330451202392578, 0.023261600494384766, 0.023356992721557616, 0.024106847763061524, 0.023659008026123047, 0.02368115234375, 0.024219615936279297, 0.02570854377746582, 0.023500511169433594, 0.02340656089782715, 0.023439680099487305, 0.023386112213134767, 0.023486688613891603, 0.023580448150634765, 0.023668512344360352, 0.023305919647216795, 0.023399103164672853, 0.023799776077270508, 0.023400320053100585, 0.02332896041870117, 0.023448736190795898, 0.02332476806640625, 0.023542047500610352, 0.023428800582885743, 0.023445375442504884, 0.023550687789916994, 0.023625343322753907, 0.023509376525878905, 0.023519359588623046, 0.02352115249633789, 0.023371583938598634, 0.023470272064208986, 0.02349465560913086, 0.023473344802856445, 0.023509824752807617, 0.0235296630859375, 0.023627840042114257, 0.02344528007507324, 0.023573984146118165, 0.023555648803710937, 0.02326419258117676, 0.023261215209960936, 0.023183647155761718, 0.023251743316650392, 0.023712703704833984, 0.023654399871826173, 0.023524608612060547, 0.02374323272705078, 0.023468032836914062, 0.023191551208496093, 0.023130111694335938, 0.023140127182006837, 0.023070976257324217, 0.023084991455078124, 0.023090240478515624, 0.023114847183227538, 0.023227935791015626, 0.023789920806884766, 0.024727903366088867, 0.023633567810058594, 0.023116863250732422, 0.023135168075561523, 0.023156543731689454, 0.023524703979492186, 0.02359177589416504, 0.02333679962158203, 0.023361696243286132, 0.023234560012817384, 0.023209856033325194, 0.02323263931274414, 0.02353139114379883, 0.023361663818359375, 0.023804159164428712, 0.024387264251708986, 0.02473971176147461, 0.023427263259887695, 0.0232857608795166, 0.023330848693847658, 0.023277023315429687, 0.02336614418029785, 0.02311497688293457, 0.02309404754638672, 0.023295808792114257, 0.023218368530273436, 0.023170495986938478, 0.02308358383178711, 0.023074176788330077, 0.023311168670654296, 0.02385465621948242, 0.023199871063232423, 0.023532960891723635, 0.023481056213378905, 0.023244287490844725, 0.02326140785217285, 0.023170879364013672, 0.023142816543579102, 0.023221376419067383, 0.023368736267089844, 0.023160831451416015, 0.023235584259033205, 0.02348271942138672, 0.023517568588256835, 0.023629503250122072, 0.02334783935546875, 0.023361696243286132, 0.023508224487304687, 0.02348908805847168, 0.023640064239501952, 0.02347769546508789, 0.02347884750366211, 0.023610559463500977, 0.023579456329345702, 0.023783424377441405, 0.02346188735961914, 0.023486591339111327, 0.023594879150390626, 0.023368736267089844, 0.023374687194824217, 0.023504735946655274, 0.023531808853149414, 0.02346403121948242, 0.02343734359741211, 0.02336934471130371, 0.02356857681274414, 0.02355606460571289, 0.023506303787231446, 0.023517919540405274, 0.02352854347229004, 0.02365235137939453, 0.023917600631713866, 0.024065664291381836, 0.023709888458251952, 0.02362169647216797, 0.02350284767150879, 0.02368716812133789, 0.023447519302368165, 0.023600671768188478, 0.02346444892883301, 0.023547903060913086, 0.023590080261230467, 0.023505727767944337, 0.02339955139160156, 0.023492576599121094, 0.023315359115600585, 0.023262559890747072, 0.023322656631469728, 0.023323455810546876, 0.02341868782043457, 0.023596736907958986, 0.02315500831604004, 0.02320915222167969, 0.02327231979370117, 0.02325497627258301, 0.023188991546630858, 0.023259071350097655, 0.023309919357299806, 0.023481311798095702, 0.023387680053710936, 0.023314655303955076, 0.024180736541748047, 0.023565727233886717, 0.023503488540649414, 0.02340614318847656, 0.023341503143310547, 0.02326675224304199, 0.023351871490478515, 0.023369760513305665, 0.023175424575805664, 0.02330735969543457, 0.023400352478027343, 0.02318614387512207, 0.02391244888305664, 0.023527679443359376, 0.02353126335144043, 0.02350464057922363, 0.023416191101074218, 0.023214975357055665, 0.023318527221679687, 0.023318304061889648, 0.02322640037536621, 0.023119903564453124, 0.02320342445373535, 0.023201887130737304, 0.023484895706176758, 0.023387168884277342, 0.023221216201782226, 0.023205888748168944, 0.02315657615661621, 0.023541791915893555, 0.02363222312927246, 0.02328758430480957, 0.023390207290649414, 0.02338595199584961, 0.023489919662475586, 0.023197919845581054, 0.023501375198364257, 0.023377344131469725, 0.02332480049133301, 0.02351148796081543, 0.02332876777648926, 0.02327552032470703, 0.02341417694091797, 0.023317087173461915, 0.02331443214416504, 0.023382015228271484, 0.023504896163940428, 0.02342515182495117, 0.023373695373535158, 0.02350806427001953, 0.023999391555786134, 0.02345369529724121, 0.02345385551452637, 0.023449440002441407, 0.023607295989990236, 0.02383695983886719, 0.023700735092163087, 0.023493087768554688, 0.023422975540161133, 0.023450815200805664, 0.023262016296386717, 0.02345881652832031, 0.02326016044616699, 0.023391231536865235, 0.023452543258666993, 0.02304857635498047, 0.023377376556396483, 0.023240928649902345, 0.02325641632080078, 0.023330879211425782, 0.02326643180847168, 0.02415184020996094, 0.02329155158996582, 0.023344480514526367, 0.023251487731933595, 0.023195903778076173, 0.023151008605957032, 0.023312192916870117, 0.02316230392456055, 0.02324742317199707, 0.023500799179077148, 0.023401664733886718, 0.023519136428833007, 0.023525920867919922, 0.02343155288696289, 0.023357440948486328, 0.023395488739013672, 0.023360095977783202, 0.023250431060791017, 0.023341632843017577, 0.02323859214782715, 0.023277824401855468, 0.023343103408813477, 0.023226207733154296, 0.023125696182250976, 0.023265663146972655, 0.023226463317871093, 0.02405567932128906, 0.02347225570678711, 0.023243839263916016, 0.023268287658691406, 0.023123584747314452, 0.02306399917602539, 0.023202367782592773, 0.02340287971496582, 0.023467039108276366, 0.023434175491333007, 0.02317919921875, 0.023201887130737304, 0.02304204750061035, 0.02305843162536621, 0.02326118469238281, 0.023299999237060547, 0.023256191253662108, 0.02334000015258789, 0.023310335159301757, 0.023291231155395508, 0.02340323257446289, 0.023174688339233397, 0.023354816436767577, 0.02312883186340332, 0.023062463760375976, 0.0233407039642334, 0.02315839958190918, 0.023125152587890625, 0.023224159240722655, 0.023590816497802734, 0.02339955139160156, 0.02326323127746582, 0.023387168884277342, 0.023290176391601563, 0.023391136169433592, 0.02373811149597168, 0.02339423942565918, 0.023169343948364257, 0.023283424377441405, 0.023287071228027343, 0.02335136032104492, 0.0234400634765625, 0.024031295776367187, 0.023326656341552735, 0.023746559143066406, 0.02335887908935547, 0.023238527297973634, 0.023374624252319336, 0.023087039947509765, 0.023168256759643555, 0.023251167297363283, 0.023476768493652343, 0.023371135711669922, 0.023447872161865235, 0.023222591400146483, 0.023123039245605468, 0.02304707145690918, 0.022974464416503908, 0.02304204750061035, 0.02335286331176758, 0.02322185516357422, 0.023274368286132812, 0.02326518440246582, 0.024395519256591797, 0.023281856536865233, 0.022945600509643553, 0.023041439056396485, 0.02286892890930176, 0.0229171199798584, 0.023109632492065428, 0.02311510467529297, 0.02330851173400879, 0.023257280349731447, 0.02313590431213379, 0.023108415603637696, 0.02326812744140625, 0.023198368072509766, 0.023134559631347657, 0.023002399444580077, 0.022939552307128908, 0.023058944702148438, 0.026091775894165038, 0.02336774444580078, 0.023400447845458985, 0.023433183670043944, 0.02367695999145508, 0.0235413761138916, 0.023413120269775392, 0.023407712936401367, 0.023212032318115236, 0.0231506233215332, 0.02311667251586914, 0.023307552337646486, 0.023384799957275392, 0.023218175888061524, 0.02367487907409668, 0.023388160705566406, 0.023436447143554688, 0.023396480560302736, 0.023290592193603514, 0.023701471328735353, 0.023308319091796877, 0.02342691230773926, 0.02317638397216797, 0.023286752700805664, 0.023252607345581055, 0.02333535957336426, 0.023127967834472657, 0.02326038360595703, 0.023321407318115234, 0.02344316864013672, 0.023343263626098634, 0.023472415924072267, 0.02318671989440918, 0.023340736389160156, 0.0233255672454834, 0.02345369529724121, 0.023297151565551757, 0.023368576049804687, 0.02333695983886719, 0.023389440536499023, 0.02342524719238281, 0.023308128356933595, 0.023331520080566406, 0.023413759231567383, 0.023376895904541017, 0.023556095123291015, 0.0239554557800293, 0.02365235137939453, 0.023657503128051757, 0.023451616287231445, 0.023409664154052736, 0.023324832916259766, 0.023301183700561525, 0.023397151947021484, 0.023463935852050782, 0.023248672485351562, 0.023284191131591796, 0.023123775482177734, 0.023130048751831056, 0.023173120498657225, 0.023238815307617188, 0.02318707275390625, 0.023652320861816407, 0.023385759353637695, 0.023371999740600585, 0.023546239852905274, 0.023373823165893554, 0.023394304275512694, 0.02332784080505371, 0.023288736343383788, 0.02337289619445801, 0.023208864212036134, 0.023226367950439454, 0.02341196823120117, 0.023181631088256837, 0.02337808036804199]",tokens/s,42.69002581076563,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.693824,6223.233024,0.0,5827.985408,5712.718848,s,1,7.30068115234375,7.30068115234375,0.0,7.30068115234375,7.30068115234375,7.30068115234375,7.30068115234375,[7.30068115234375],,kWh,1.03632283999976e-05,1.1358689035069413e-06,3.512780588001141e-06,1.5011877891505682e-05,,MB,1169.432576,6453.919744,0.0,6046.089216,5989.425664,s,10,0.8358417587280274,0.08358417587280273,0.0030288228730164053,0.0849144630432129,0.08570270614624023,0.08598114433288574,0.08620389488220215,"[0.07983344268798828, 0.08402767944335937, 0.08529759979248047, 0.08336016082763673, 0.08564083099365234, 0.07612076568603515, 0.08546422576904297, 0.08453132629394532, 0.08530614471435546, 0.08625958251953125]",tokens/s,3062.7806917612893,kWh,2.6076465513019306e-06,2.87574112138717e-07,1.7332479143750208e-06,4.628468577815668e-06,tokens/kWh,55309871.0072296,MB,1202.556928,6516.834304,0.0,6109.003776,6090.851328,s,10,20.951066650390622,2.0951066650390624,0.006738284949658318,2.0973076171875,2.1025949951171876,2.1030796020507814,2.1034672875976566,"[2.0802802734375, 2.09609912109375, 2.098637939453125, 2.091754150390625, 2.087480712890625, 2.098539794921875, 2.1024873046875, 2.09851611328125, 2.103564208984375, 2.09370703125]",tokens/s,30.07006805490039,kWh,6.0810682840364244e-05,6.707186377326684e-06,4.030136904662445e-05,0.00010781923826431536,tokens/kWh,584311.3067220671,,s,630,20.94752470016479,0.033250039206610786,0.0006555988821365542,0.03314591979980469,0.03360196647644043,0.0339087797164917,0.036691193504333496,"[0.03542473602294922, 0.034076736450195315, 0.03308556747436524, 0.03282115173339844, 0.03268815994262695, 0.03273315048217774, 0.03255094528198242, 0.032616416931152345, 0.03264678573608398, 0.03254009628295899, 0.032613311767578125, 0.032511070251464845, 0.03255305480957031, 0.03256403350830078, 0.03256899261474609, 0.03253692626953125, 0.032679233551025394, 0.032723648071289066, 0.03267583847045898, 0.032675201416015626, 0.03317382431030273, 0.032868576049804685, 0.03279449462890625, 0.03276319885253906, 0.03288140869140625, 0.032833087921142576, 0.0327599983215332, 0.03270019149780273, 0.03288719940185547, 0.032895198822021486, 0.032950271606445314, 0.033200096130371094, 0.032971870422363284, 0.03293894577026367, 0.03312966537475586, 0.03307193756103516, 0.032894977569580076, 0.03300966262817383, 0.03295804977416992, 0.03290768051147461, 0.03304227066040039, 0.03290867233276367, 0.03294464111328125, 0.033003807067871094, 0.033157119750976564, 0.03312153625488281, 0.03321446228027344, 0.03318860626220703, 0.03344332885742188, 0.03486310577392578, 0.033057376861572264, 0.033228702545166015, 0.033108097076416015, 0.03300518417358399, 0.03314259338378906, 0.033083839416503905, 0.033157119750976564, 0.03321241760253906, 0.03317145538330078, 0.03309568023681641, 0.033255550384521486, 0.033352672576904295, 0.033366207122802735, 0.0367059211730957, 0.034375679016113284, 0.033544063568115234, 0.03307551956176758, 0.032939582824707034, 0.03287919998168945, 0.03272662353515625, 0.03269472122192383, 0.03282944107055664, 0.03292979049682617, 0.0329167366027832, 0.03286415863037109, 0.03293679809570312, 0.03287859344482422, 0.032888801574707034, 0.033001216888427734, 0.03294806289672852, 0.03297529602050781, 0.03297280120849609, 0.03290726470947265, 0.033277950286865234, 0.03293289566040039, 0.03303094482421875, 0.0329606704711914, 0.032939136505126955, 0.03303071975708008, 0.03315542221069336, 0.03305628967285156, 0.03309936141967774, 0.03318009567260742, 0.03338899230957031, 0.033570816040039066, 0.03340697479248047, 0.03336806488037109, 0.03321571350097656, 0.033151775360107424, 0.03339174270629883, 0.033186687469482425, 0.03322262573242187, 0.033219936370849606, 0.03317216110229492, 0.03313808059692383, 0.03327376174926758, 0.033208030700683594, 0.033129440307617185, 0.033189407348632814, 0.03331097412109375, 0.03338652801513672, 0.03335187149047852, 0.03323494338989258, 0.03330825424194336, 0.03322915267944336, 0.03347257614135742, 0.03327356719970703, 0.03320979309082031, 0.03334979248046875, 0.03357766342163086, 0.033288192749023435, 0.033529857635498046, 0.033333217620849606, 0.03405769729614258, 0.03425667190551758, 0.033648799896240235, 0.03823616027832031, 0.03472793579101562, 0.033503231048583985, 0.03302220916748047, 0.03283766555786133, 0.03282710266113281, 0.03275980758666992, 0.03289606475830078, 0.0328221435546875, 0.0327347183227539, 0.03278905487060547, 0.03275900650024414, 0.03285686492919922, 0.03294003295898437, 0.03289478302001953, 0.032831680297851565, 0.032868350982666016, 0.0328392333984375, 0.033067264556884766, 0.03305683135986328, 0.03296063995361328, 0.03330799865722656, 0.03297859191894531, 0.033037311553955076, 0.033323009490966796, 0.033040382385253905, 0.03313663864135742, 0.03294617462158203, 0.033097984313964844, 0.0332224006652832, 0.03357263946533203, 0.03332937622070312, 0.03339878463745117, 0.0333496322631836, 0.033261566162109374, 0.03316716766357422, 0.0333191032409668, 0.033538047790527346, 0.03320774459838867, 0.03320479965209961, 0.033159168243408206, 0.033310527801513674, 0.033285888671875, 0.03339923095703125, 0.03326284790039063, 0.033078014373779295, 0.03311174392700195, 0.03323436737060547, 0.03330918502807617, 0.033278335571289064, 0.03324230575561524, 0.03339347076416015, 0.033453342437744144, 0.033551071166992186, 0.0334205436706543, 0.033342208862304684, 0.03478428649902344, 0.03326051330566406, 0.03362105560302735, 0.03353900909423828, 0.033562625885009766, 0.0336629753112793, 0.03336771011352539, 0.03665513610839844, 0.03467667388916015, 0.03360156631469727, 0.03312844848632813, 0.03298918533325195, 0.03280812835693359, 0.03278726577758789, 0.03272284698486328, 0.03279459381103516, 0.03274969482421875, 0.03301990509033203, 0.03295641708374023, 0.0330742073059082, 0.034243457794189455, 0.03277628707885742, 0.03272457504272461, 0.03304415893554687, 0.03286627197265625, 0.033143550872802734, 0.03294793701171875, 0.03303798294067383, 0.03291196823120117, 0.032995361328125, 0.03285715103149414, 0.032959423065185546, 0.032918815612792966, 0.03290800094604492, 0.03299737548828125, 0.03321241760253906, 0.03362329483032227, 0.033641216278076175, 0.03342156982421875, 0.03347840118408203, 0.033409278869628904, 0.03323468780517578, 0.03325523376464844, 0.03329267120361328, 0.033189697265625, 0.033122303009033204, 0.033027198791503905, 0.03295935821533203, 0.033036384582519535, 0.033071006774902344, 0.03318937683105469, 0.03320060729980469, 0.03310518264770508, 0.03314896011352539, 0.03314352035522461, 0.033175552368164066, 0.03302918243408203, 0.0331396484375, 0.03311577606201172, 0.033154624938964844, 0.03325215911865234, 0.033173057556152345, 0.033062496185302735, 0.033046592712402345, 0.03309344100952148, 0.03323545455932617, 0.0332295036315918, 0.033371936798095705, 0.033140735626220705, 0.03316121673583984, 0.03710134506225586, 0.03480403137207031, 0.03370102310180664, 0.03291836929321289, 0.0326446418762207, 0.03279062271118164, 0.03388412857055664, 0.032764129638671875, 0.03265907287597656, 0.03261702346801758, 0.03284377670288086, 0.03255507278442383, 0.03268739318847656, 0.03289564895629883, 0.03277545547485351, 0.032613086700439456, 0.03271475219726563, 0.03277164840698242, 0.032747615814208986, 0.03268233489990234, 0.03279209518432617, 0.03283155059814453, 0.03283599853515625, 0.033006816864013674, 0.03300150299072266, 0.0330860481262207, 0.03278351974487305, 0.03293491363525391, 0.03292979049682617, 0.03301580810546875, 0.033037887573242185, 0.03298144149780274, 0.03317327880859375, 0.03300921630859375, 0.03314067077636719, 0.03314556884765625, 0.033187744140625, 0.03305187225341797, 0.03302864074707031, 0.03302844619750977, 0.03309568023681641, 0.03297795104980469, 0.03307414245605469, 0.03323904037475586, 0.03306092834472656, 0.03302739334106446, 0.03308009719848633, 0.03308697509765625, 0.033050975799560546, 0.03311001586914063, 0.03311215972900391, 0.033114017486572264, 0.03322880172729492, 0.033230846405029296, 0.03314627075195312, 0.03330879974365234, 0.033408958435058596, 0.03325993728637695, 0.03331676864624023, 0.03373897552490234, 0.03364044952392578, 0.033314815521240236, 0.03336959838867187, 0.034716575622558594, 0.034632736206054685, 0.03375510406494141, 0.03307465744018555, 0.03297683334350586, 0.03298992156982422, 0.03289199829101563, 0.032946720123291015, 0.033046783447265624, 0.03294822311401367, 0.032835582733154296, 0.03292598342895508, 0.033029823303222655, 0.03295849609375, 0.03294800186157226, 0.03292559814453125, 0.03290758514404297, 0.032884735107421875, 0.03296051025390625, 0.03303014373779297, 0.03313459014892578, 0.032904670715332034, 0.03377411270141602, 0.03342051315307617, 0.03333814239501953, 0.033078624725341794, 0.033034591674804686, 0.033022335052490234, 0.033183521270751956, 0.03317907333374023, 0.03341567993164062, 0.03330860900878906, 0.033556766510009765, 0.03337372970581055, 0.03336172866821289, 0.033563297271728514, 0.03322880172729492, 0.03341516876220703, 0.033688961029052736, 0.033202816009521484, 0.0332492790222168, 0.03318726348876953, 0.03329391860961914, 0.03335638427734375, 0.03330086517333984, 0.03315507125854492, 0.03328956985473633, 0.03341331100463867, 0.03330915069580078, 0.03350527954101563, 0.03330047988891602, 0.03320431900024414, 0.033398143768310545, 0.033331199645996096, 0.03346691131591797, 0.033564350128173825, 0.033331520080566404, 0.03339263916015625, 0.03355401611328125, 0.03388457489013672, 0.03392054367065429, 0.033460609436035155, 0.03369993591308594, 0.039167934417724606, 0.03525568008422852, 0.03392374420166016, 0.033279998779296875, 0.03319193649291992, 0.03288848114013672, 0.0328175048828125, 0.032798431396484376, 0.03278467178344727, 0.03294822311401367, 0.03291654586791992, 0.0327894401550293, 0.03286592102050781, 0.0328353271484375, 0.03284352111816406, 0.03315776062011719, 0.03307545471191406, 0.03299020767211914, 0.03311215972900391, 0.0330720329284668, 0.033050273895263674, 0.03319843292236328, 0.03302809524536133, 0.03306278228759765, 0.03306304168701172, 0.033062240600585935, 0.03301043319702148, 0.03301161575317383, 0.03318374252319336, 0.03361177444458008, 0.03389440155029297, 0.03335168075561523, 0.033667198181152345, 0.033529727935791016, 0.033808383941650394, 0.0335175666809082, 0.03325276947021484, 0.03324364852905273, 0.03324528121948242, 0.03323494338989258, 0.033255424499511715, 0.03309497451782226, 0.03316585540771484, 0.033163425445556644, 0.033295455932617186, 0.03316419219970703, 0.033331199645996096, 0.03337612915039063, 0.033511550903320315, 0.033209823608398435, 0.03324924850463867, 0.03343999862670898, 0.033265983581542966, 0.033331199645996096, 0.03338652801513672, 0.03321171188354492, 0.03327862548828125, 0.03341107177734375, 0.03335168075561523, 0.03374694442749023, 0.03379404830932617, 0.033421184539794924, 0.033875137329101565, 0.03704867172241211, 0.034586334228515626, 0.033444095611572265, 0.03322675323486328, 0.033041824340820314, 0.03285420989990234, 0.03288515090942383, 0.03285414505004883, 0.03293171310424805, 0.03288195037841797, 0.03285475158691406, 0.032877632141113285, 0.03303519821166992, 0.0328724479675293, 0.032849918365478514, 0.03292694473266602, 0.032922401428222656, 0.032882816314697264, 0.03301532745361328, 0.03315289688110352, 0.03321084976196289, 0.03296236801147461, 0.032996574401855466, 0.03295945739746094, 0.03315209579467773, 0.03308022308349609, 0.03316707229614258, 0.03312847900390625, 0.03314092636108398, 0.033276096343994144, 0.03480115127563477, 0.03352819061279297, 0.03344793701171875, 0.033377601623535154, 0.033384864807128906, 0.033181472778320314, 0.03318425750732422, 0.03316227340698242, 0.033377246856689455, 0.03323849487304688, 0.03321500778198242, 0.033213760375976564, 0.03336588668823242, 0.033379135131835935, 0.033271808624267575, 0.033253185272216795, 0.03328838348388672, 0.03314688110351562, 0.033165313720703124, 0.03328947067260742, 0.03345280075073242, 0.03316326522827148, 0.03331603240966797, 0.03344057464599609, 0.033271808624267575, 0.033320159912109376, 0.03358390426635742, 0.03352979278564453, 0.033491008758544924, 0.03336601638793945, 0.03364422225952148, 0.033605567932128905, 0.033462238311767575, 0.03948457717895508, 0.03513177490234375, 0.033874401092529295, 0.03327590560913086, 0.03342480087280274, 0.03303484725952149, 0.03295974349975586, 0.03298175811767578, 0.03299123382568359, 0.03282534408569336, 0.03284707260131836, 0.03286044692993164, 0.03282505416870117, 0.03278287887573242, 0.03297491073608398, 0.032993408203125, 0.03297443389892578, 0.032860641479492185, 0.03301580810546875, 0.03303424072265625, 0.032891136169433594, 0.03304544067382813, 0.0329552001953125, 0.033124256134033206, 0.03317769622802735, 0.03296432113647461, 0.032986942291259765, 0.033032318115234376, 0.033337600708007814, 0.0334431037902832, 0.03361171340942383, 0.03341196823120117, 0.03336601638793945, 0.03360752105712891, 0.033552032470703125, 0.033231361389160156, 0.03323875045776367, 0.03401052856445313, 0.03316169738769531, 0.03324143981933594, 0.033353855133056644, 0.03314828872680664, 0.03314950561523437, 0.03331891250610351, 0.03382684707641601, 0.0337632942199707, 0.033215648651123045, 0.033452896118164065, 0.03328432083129883, 0.033158302307128906, 0.03310211181640625, 0.033446239471435546, 0.0333496322631836, 0.033494430541992186, 0.03349155044555664, 0.03321855926513672, 0.03338572692871094, 0.03343030548095703, 0.033545246124267576, 0.03351238250732422, 0.033923072814941405, 0.033393665313720705, 0.03364742279052734, 0.03776847839355469, 0.03490886306762695, 0.03363388824462891, 0.033143199920654294, 0.03300742340087891, 0.03288899230957031, 0.032900768280029295, 0.03290560150146484, 0.03305043029785156, 0.03300166320800781, 0.03295641708374023, 0.032876190185546876, 0.03294831848144531, 0.03287260818481445, 0.032890270233154294, 0.032791072845458985, 0.03287030410766602, 0.03281878280639648, 0.03290793609619141, 0.03291449737548828, 0.03292006301879883, 0.032860607147216794, 0.03296649551391601, 0.03278160095214844, 0.03301055908203125, 0.0329890251159668, 0.03290332794189453, 0.032916961669921876, 0.03305936050415039, 0.03329391860961914, 0.0334381103515625, 0.03321446228027344, 0.03385935974121094, 0.033315040588378905, 0.03323904037475586, 0.03316454315185547, 0.03316368103027344, 0.03313423919677734, 0.03307180786132812, 0.03311001586914063, 0.03295155334472656, 0.032989566802978515, 0.03300508880615234, 0.03304889678955078, 0.03311465454101563, 0.03309308624267578, 0.03343395233154297, 0.033029953002929685, 0.03305654525756836, 0.032952510833740234, 0.03293788909912109, 0.03295187377929688, 0.03290617752075195, 0.032942081451416014, 0.03337625503540039, 0.03297280120849609, 0.03303974533081055, 0.03307379150390625, 0.03317510223388672, 0.0332845458984375, 0.035880767822265625, 0.03451091384887695, 0.03320230484008789]",tokens/s,30.07515250692335,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 54.12 MiB is free. Process 86771 has 14.69 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 1.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,836.231168,1903.099904,0.0,1507.852288,1469.840384,s,1,7.52754248046875,7.52754248046875,0.0,7.52754248046875,7.52754248046875,7.52754248046875,7.52754248046875,[7.52754248046875],,kWh,9.503668124996995e-06,1.0401331762255093e-06,3.1658358660008323e-06,1.3709637167223338e-05,,MB,1226.58816,1945.042944,0.0,1537.212416,1426.272256,s,10,0.27097811317443843,0.02709781131744384,0.0003786107303017915,0.027143296241760255,0.027483865928649903,0.02757210931777954,0.02764270402908325,"[0.02708073616027832, 0.027005279541015625, 0.026945632934570314, 0.026169376373291017, 0.026936735153198242, 0.02766035270690918, 0.02728428840637207, 0.027205856323242187, 0.02722559928894043, 0.027464256286621094]",tokens/s,9447.257455630874,kWh,7.997867326598076e-07,8.820181491284647e-08,5.278565866685053e-07,1.4158451342411593e-06,tokens/kWh,180810735.44615212,MB,1255.46496,1955.528704,0.0,1547.698176,1426.274816,s,10,14.865883300781249,1.4865883300781249,0.022758679453859385,1.4982611694335937,1.5034682861328124,1.5080211303710938,1.5116634057617186,"[1.4967606201171875, 1.4830277099609375, 1.4520919189453125, 1.438096435546875, 1.50245654296875, 1.512573974609375, 1.500075927734375, 1.5007305908203126, 1.49976171875, 1.480307861328125]",tokens/s,42.37891467686226,kWh,4.2963057214423454e-05,4.738280142252147e-06,2.0673327040931977e-05,6.837466439760757e-05,tokens/kWh,921393.9191517899,,s,630,14.863477323532083,0.023592821148463658,0.0005836044082770866,0.023636879920959475,0.02402796497344971,0.024213948917388917,0.025956943264007576,"[0.02380396842956543, 0.02381167984008789, 0.023557727813720702, 0.023343296051025392, 0.02356902313232422, 0.02336511993408203, 0.0234901123046875, 0.02411516761779785, 0.023535648345947267, 0.023407552719116213, 0.02365964889526367, 0.023612287521362303, 0.023468032836914062, 0.02350284767150879, 0.02510652732849121, 0.02385296058654785, 0.024130975723266602, 0.02401340866088867, 0.02364825630187988, 0.02354694366455078, 0.02357548713684082, 0.0235231990814209, 0.02374176025390625, 0.0236429443359375, 0.023686975479125977, 0.023616863250732423, 0.023540576934814452, 0.023545856475830077, 0.027241952896118166, 0.02977574348449707, 0.02368783950805664, 0.023632959365844728, 0.023755584716796875, 0.023781280517578125, 0.02357788848876953, 0.023829439163208007, 0.024065120697021485, 0.023741344451904296, 0.02341587257385254, 0.02346659278869629, 0.02344585609436035, 0.023464128494262694, 0.02360704040527344, 0.023590335845947264, 0.023494688034057618, 0.023583616256713867, 0.023550687789916994, 0.023415487289428712, 0.023342815399169922, 0.023204448699951172, 0.023337055206298828, 0.02344108772277832, 0.023666559219360353, 0.023576927185058594, 0.023317600250244142, 0.023651519775390626, 0.02339596748352051, 0.023322368621826173, 0.023351648330688476, 0.023317760467529296, 0.023206655502319335, 0.023297088623046875, 0.023542720794677733, 0.023474336624145508, 0.02344918441772461, 0.023247039794921875, 0.023474239349365236, 0.023444799423217772, 0.023517887115478517, 0.023377727508544922, 0.023298240661621093, 0.023475807189941408, 0.02336502456665039, 0.023376895904541017, 0.023472127914428712, 0.02368716812133789, 0.0234105281829834, 0.023914655685424804, 0.023959775924682618, 0.02378489685058594, 0.023439712524414062, 0.023349407196044922, 0.02360918426513672, 0.02393087959289551, 0.02349459266662598, 0.023458911895751954, 0.0233973445892334, 0.02371513557434082, 0.023389888763427735, 0.02345881652832031, 0.02343462371826172, 0.023675519943237303, 0.02333251190185547, 0.02329840087890625, 0.023619583129882812, 0.023445407867431642, 0.023549280166625976, 0.023488704681396484, 0.023410303115844727, 0.023566591262817384, 0.023802528381347655, 0.02349270439147949, 0.023351104736328124, 0.02369139289855957, 0.023855104446411132, 0.02337126350402832, 0.023773311614990234, 0.023439712524414062, 0.023996448516845702, 0.02345779228210449, 0.023513088226318358, 0.023468032836914062, 0.023365631103515624, 0.023500799179077148, 0.023600959777832033, 0.023433216094970705, 0.023655872344970703, 0.023603967666625977, 0.023349248886108398, 0.023858463287353516, 0.023343807220458986, 0.02329360008239746, 0.02428351974487305, 0.023580768585205077, 0.023627487182617188, 0.023293567657470704, 0.023424383163452148, 0.023358079910278322, 0.02338809585571289, 0.023304256439208984, 0.023494239807128905, 0.02319561576843262, 0.022937536239624023, 0.02311961555480957, 0.02314931106567383, 0.023158784866333007, 0.023568384170532225, 0.023158784866333007, 0.02304355239868164, 0.023231008529663085, 0.023225919723510742, 0.022958528518676757, 0.022872064590454103, 0.022906879425048828, 0.02306252861022949, 0.02305638313293457, 0.022957632064819336, 0.022868415832519532, 0.023344736099243164, 0.022835552215576174, 0.022761663436889647, 0.022859647750854493, 0.022740352630615236, 0.022781631469726563, 0.022829151153564452, 0.023085760116577148, 0.02274857521057129, 0.022839487075805662, 0.02299347114562988, 0.023170848846435547, 0.024883424758911133, 0.02627174377441406, 0.023320608139038086, 0.0229703369140625, 0.022971839904785157, 0.022968896865844725, 0.022923263549804687, 0.02289459228515625, 0.02289254379272461, 0.022853631973266602, 0.022734848022460938, 0.02291868782043457, 0.023126495361328124, 0.023019519805908203, 0.0229552001953125, 0.022846271514892578, 0.022711872100830078, 0.02277737617492676, 0.02286207962036133, 0.022672031402587892, 0.02272051239013672, 0.02255286407470703, 0.022695295333862303, 0.02258710479736328, 0.022605791091918945, 0.022626720428466796, 0.022757631301879883, 0.02264678382873535, 0.022691839218139647, 0.023012928009033203, 0.023002559661865235, 0.022960159301757814, 0.022600671768188477, 0.02266873550415039, 0.02268115234375, 0.02274844741821289, 0.022660415649414064, 0.02259190368652344, 0.022719648361206053, 0.022703968048095702, 0.022700544357299804, 0.022667776107788085, 0.022681440353393555, 0.0227923526763916, 0.02313145637512207, 0.022983360290527343, 0.02294112014770508, 0.022695999145507812, 0.022753791809082033, 0.022601728439331056, 0.022462495803833006, 0.022708192825317382, 0.02264860725402832, 0.0225795841217041, 0.022681440353393555, 0.023358783721923827, 0.022681343078613282, 0.022647743225097657, 0.022564863204956053, 0.022607872009277344, 0.022898687362670898, 0.0227259521484375, 0.022770368576049804, 0.022597631454467772, 0.02272870445251465, 0.02268694305419922, 0.02268035125732422, 0.022648223876953123, 0.022700639724731447, 0.02265088081359863, 0.022740896224975587, 0.022775583267211914, 0.022657567977905274, 0.022820640563964843, 0.022841344833374022, 0.022795648574829103, 0.022675775527954103, 0.022747264862060548, 0.022661312103271485, 0.022627967834472656, 0.02261030387878418, 0.022895904541015626, 0.023030496597290038, 0.022775423049926757, 0.023173503875732422, 0.023169023513793945, 0.023228416442871092, 0.02339142417907715, 0.023538496017456053, 0.023602336883544923, 0.023401504516601564, 0.023366655349731445, 0.02361759948730469, 0.023513248443603516, 0.023361824035644532, 0.02356777572631836, 0.023419071197509765, 0.023277856826782226, 0.023377471923828125, 0.023607423782348633, 0.02346028709411621, 0.023392255783081056, 0.024458751678466797, 0.024061920166015625, 0.023828128814697265, 0.023688064575195313, 0.02405177688598633, 0.02386636734008789, 0.023912384033203126, 0.023802303314208986, 0.02371785545349121, 0.023806560516357423, 0.024253856658935546, 0.024481727600097657, 0.02376969528198242, 0.02400467109680176, 0.024172544479370117, 0.023883775711059572, 0.023725824356079103, 0.02386115264892578, 0.02386390495300293, 0.0239552001953125, 0.023776416778564454, 0.023845727920532227, 0.02385411262512207, 0.0240644474029541, 0.024154655456542967, 0.023975328445434572, 0.023751264572143556, 0.024221696853637696, 0.023965696334838867, 0.023829919815063477, 0.02401750373840332, 0.023805536270141602, 0.023658912658691408, 0.02378156852722168, 0.023747776031494142, 0.023741056442260742, 0.023883775711059572, 0.023734272003173826, 0.024414207458496092, 0.023793664932250977, 0.023832576751708984, 0.023859199523925782, 0.024335615158081053, 0.023847679138183593, 0.024025087356567384, 0.023828479766845705, 0.023926015853881835, 0.02368569564819336, 0.02379385566711426, 0.02381817626953125, 0.023748672485351563, 0.023848960876464844, 0.023837823867797852, 0.02399292755126953, 0.023832576751708984, 0.02473936080932617, 0.023992799758911134, 0.023961599349975587, 0.025796607971191408, 0.026394880294799805, 0.023875328063964845, 0.023956703186035155, 0.023896831512451172, 0.024010784149169923, 0.023879680633544922, 0.02397929573059082, 0.02368342399597168, 0.023861631393432618, 0.024786943435668944, 0.02412748718261719, 0.024246271133422852, 0.023905696868896483, 0.023730783462524413, 0.023792736053466795, 0.023800479888916017, 0.024037023544311524, 0.023751264572143556, 0.023769088745117187, 0.023773183822631837, 0.02498703956604004, 0.0238656005859375, 0.023748319625854494, 0.02376460838317871, 0.02387164878845215, 0.02442326354980469, 0.023905664443969726, 0.02375459289550781, 0.024118047714233398, 0.023975967407226562, 0.023770816802978517, 0.023647680282592773, 0.023802719116210937, 0.023908447265625, 0.023885631561279298, 0.02372003173828125, 0.02388364791870117, 0.02392025566101074, 0.02443247985839844, 0.02390902328491211, 0.023973888397216796, 0.02406399917602539, 0.02401417541503906, 0.02381439971923828, 0.023856639862060547, 0.02388675117492676, 0.023989376068115235, 0.023817087173461916, 0.02392985534667969, 0.02369024085998535, 0.023897632598876953, 0.0237607364654541, 0.023695999145507813, 0.023750656127929686, 0.02372403144836426, 0.023658496856689453, 0.02391209602355957, 0.024124895095825195, 0.02368476867675781, 0.023908832550048827, 0.023756256103515627, 0.023681856155395507, 0.02371552085876465, 0.023728288650512696, 0.02385772705078125, 0.023678655624389647, 0.023793119430541992, 0.023724607467651367, 0.023769088745117187, 0.023967744827270508, 0.023666688919067383, 0.0237076473236084, 0.023582719802856447, 0.02364959907531738, 0.02359980773925781, 0.023559520721435547, 0.02369193649291992, 0.023595008850097656, 0.02371583938598633, 0.02418832015991211, 0.023736223220825196, 0.023714656829833983, 0.023805183410644533, 0.02364681625366211, 0.023859199523925782, 0.023990335464477538, 0.02360099220275879, 0.024791135787963867, 0.02373347282409668, 0.023851423263549804, 0.023779712677001952, 0.02390630340576172, 0.024115135192871093, 0.02375004768371582, 0.023571199417114257, 0.023725984573364257, 0.02361903953552246, 0.023532352447509765, 0.024018911361694335, 0.023826175689697266, 0.023613439559936524, 0.023641151428222658, 0.023501632690429687, 0.023709823608398437, 0.02369945526123047, 0.023567487716674804, 0.023815040588378907, 0.02367283248901367, 0.02364735984802246, 0.02370256042480469, 0.023819456100463866, 0.025094816207885742, 0.024528287887573243, 0.02444758415222168, 0.023905311584472656, 0.02373756790161133, 0.023670495986938475, 0.023705631256103515, 0.023642112731933593, 0.023791616439819335, 0.024204479217529298, 0.02398912048339844, 0.023961631774902344, 0.024047584533691407, 0.023826431274414063, 0.023842815399169923, 0.02392268753051758, 0.023850944519042967, 0.023760927200317382, 0.02358428764343262, 0.02390399932861328, 0.023876352310180662, 0.024778751373291014, 0.023859199523925782, 0.0235885124206543, 0.02363632011413574, 0.023678495407104493, 0.023556447982788085, 0.023592447280883787, 0.02378550338745117, 0.02403571128845215, 0.023742176055908202, 0.023572799682617187, 0.023595199584960938, 0.023586816787719726, 0.023532575607299804, 0.023542688369750975, 0.023748319625854494, 0.023719648361206054, 0.023753343582153322, 0.023615488052368162, 0.023762943267822266, 0.023664287567138672, 0.024021343231201173, 0.02405990409851074, 0.024401792526245115, 0.02395123291015625, 0.0240948486328125, 0.02412371253967285, 0.023885631561279298, 0.02373222351074219, 0.023801759719848634, 0.023656543731689454, 0.024112415313720704, 0.02360393524169922, 0.023788768768310545, 0.02351388740539551, 0.023805952072143553, 0.023842815399169923, 0.023940128326416017, 0.023688095092773438, 0.023637407302856444, 0.023588544845581056, 0.023643104553222657, 0.024147968292236328, 0.023852415084838867, 0.023682720184326173, 0.0236408634185791, 0.02378976058959961, 0.02362303924560547, 0.0238209285736084, 0.023957504272460937, 0.02395955276489258, 0.0236810245513916, 0.023545856475830077, 0.023494272232055663, 0.023736320495605468, 0.02362214469909668, 0.023653472900390625, 0.023636768341064453, 0.023767040252685546, 0.02367692756652832, 0.023699296951293945, 0.02346771240234375, 0.023486080169677733, 0.024060768127441408, 0.023793664932250977, 0.02363916778564453, 0.023571008682250975, 0.024027456283569337, 0.023732383728027343, 0.023652191162109374, 0.02360758399963379, 0.02352262306213379, 0.024024959564208984, 0.023632415771484373, 0.023662591934204103, 0.023576576232910155, 0.023858335494995116, 0.024064863204956054, 0.023863296508789062, 0.023748287200927733, 0.023608671188354493, 0.023623712539672853, 0.02384787178039551, 0.024151391983032226, 0.023748672485351563, 0.026085407257080077, 0.02425503921508789, 0.023908607482910155, 0.023947008132934572, 0.023666688919067383, 0.024032543182373047, 0.024097471237182616, 0.0240230712890625, 0.02376911926269531, 0.023805919647216796, 0.02370560073852539, 0.02384486389160156, 0.024588287353515623, 0.02372812843322754, 0.023744512557983398, 0.023737600326538086, 0.02363612747192383, 0.02351900863647461, 0.02364044761657715, 0.024152511596679686, 0.02392803192138672, 0.023712543487548827, 0.023752479553222655, 0.023643423080444335, 0.023544960021972657, 0.023754560470581054, 0.023711103439331055, 0.023511680603027343, 0.023584735870361327, 0.023821855545043947, 0.023706079483032227, 0.02367068862915039, 0.02369955253601074, 0.023834623336791993, 0.023639072418212892, 0.023464927673339842, 0.023467775344848632, 0.023576831817626952, 0.023395519256591796, 0.026346176147460938, 0.023564224243164063, 0.023417024612426757, 0.023599103927612306, 0.023636991500854493, 0.02353219223022461, 0.023234912872314453, 0.0232421760559082, 0.02321788787841797, 0.023192096710205078, 0.023291263580322265, 0.023120832443237305, 0.023240703582763672, 0.02328985595703125, 0.02335651206970215, 0.02305423927307129, 0.02306255912780762, 0.02321219253540039, 0.023313215255737305, 0.02342911911010742, 0.023404544830322265, 0.023259136199951173, 0.0233240966796875, 0.023107263565063478, 0.023147392272949218, 0.023271135330200195, 0.023517471313476562, 0.02355200004577637, 0.023398143768310547, 0.02325119972229004, 0.023245920181274415, 0.02339455986022949, 0.02381599998474121, 0.024038240432739257, 0.02359065628051758, 0.023439584732055666, 0.02322435188293457, 0.02311369514465332, 0.024137216567993162, 0.026022432327270507, 0.02377903938293457, 0.023736671447753908, 0.02422761535644531, 0.023486015319824218, 0.023419296264648438, 0.023162912368774415, 0.023154783248901366, 0.02314022445678711, 0.023054176330566407, 0.02299465560913086, 0.023007328033447266, 0.02305622482299805, 0.023001792907714844]",tokens/s,42.38577462641084,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,835.735552,4675.534848,0.0,4280.287232,4115.121152,s,1,7.37269970703125,7.37269970703125,0.0,7.37269970703125,7.37269970703125,7.37269970703125,7.37269970703125,[7.37269970703125],,kWh,9.522534087530706e-06,1.0429521619540363e-06,3.299169305998051e-06,1.3864655555482794e-05,,MB,1202.020352,4983.816192,0.0,4575.985664,4408.408064,s,10,0.42754406356811525,0.04275440635681152,0.0014334045633080338,0.042253551483154295,0.04336371269226074,0.04515391941070556,0.046586084785461425,"[0.04694412612915039, 0.04253737640380859, 0.042242782592773434, 0.042584064483642575, 0.04222710418701172, 0.04203776168823242, 0.04195721435546875, 0.041783424377441404, 0.042264320373535155, 0.04296588897705078]",tokens/s,5987.686926664923,kWh,1.5306460620200298e-06,1.6880301024038254e-07,1.0141325739580902e-06,2.713581646218502e-06,tokens/kWh,94340260.72395776,MB,1230.35648,4983.816192,0.0,4575.985664,4408.410624,s,10,15.602204467773436,1.5602204467773437,0.006893250126550453,1.5605767211914063,1.56579580078125,1.5694571044921877,1.5723861474609375,"[1.573118408203125, 1.564982177734375, 1.558853759765625, 1.5473900146484374, 1.5649759521484374, 1.5629482421875, 1.551452880859375, 1.5591427001953124, 1.5620107421875, 1.55732958984375]",tokens/s,40.37890935869181,kWh,4.494949023089878e-05,4.957548495471422e-06,2.977266983324302e-05,7.967970855961321e-05,tokens/kWh,790665.542568669,,s,630,15.599725788116476,0.024761469504946756,0.000482530303829509,0.024702032089233397,0.0250308967590332,0.02532572202682495,0.02608155612945557,"[0.025272319793701172, 0.02474393653869629, 0.024856576919555663, 0.024817663192749022, 0.024887615203857422, 0.024907455444335938, 0.02490572738647461, 0.024807424545288087, 0.024922111511230468, 0.024801279067993166, 0.025231359481811523, 0.029007648468017577, 0.02487318420410156, 0.025145408630371093, 0.02533328056335449, 0.02497372817993164, 0.02569625663757324, 0.024817663192749022, 0.02489139175415039, 0.02480735969543457, 0.024780096054077147, 0.024849151611328123, 0.024706304550170897, 0.024804096221923828, 0.025481056213378907, 0.02496227264404297, 0.02486777687072754, 0.024742944717407227, 0.02535523223876953, 0.02493017578125, 0.02490108871459961, 0.025690559387207032, 0.024985824584960938, 0.025132192611694335, 0.024863584518432617, 0.024937568664550783, 0.024531871795654296, 0.024639488220214844, 0.02459561538696289, 0.02467024040222168, 0.024514591217041016, 0.024591136932373046, 0.02474332809448242, 0.02485513687133789, 0.024702688217163087, 0.0246080322265625, 0.024656671524047852, 0.024721632003784178, 0.02487295913696289, 0.025556991577148438, 0.024922111511230468, 0.024682048797607423, 0.025390687942504882, 0.025489599227905273, 0.025444576263427734, 0.024637887954711914, 0.02473756790161133, 0.024766687393188477, 0.024268800735473633, 0.024357152938842774, 0.024272607803344726, 0.024923391342163086, 0.0250263671875, 0.02491769599914551, 0.02536288070678711, 0.024864639282226562, 0.024590560913085938, 0.024579776763916015, 0.024620864868164064, 0.024539424896240235, 0.02451456069946289, 0.02614476776123047, 0.025933792114257812, 0.02469385528564453, 0.024879968643188477, 0.024748128890991213, 0.024713216781616212, 0.02456928062438965, 0.02465439987182617, 0.024530208587646485, 0.025078176498413086, 0.02460268783569336, 0.02439344024658203, 0.024437280654907228, 0.02471219253540039, 0.03300022506713867, 0.024514463424682616, 0.024929983139038086, 0.024777215957641603, 0.024754304885864258, 0.0247193603515625, 0.02462067222595215, 0.024899967193603517, 0.02451241683959961, 0.024567903518676756, 0.024858015060424805, 0.02486537551879883, 0.024532127380371093, 0.02492630386352539, 0.024453887939453123, 0.024649311065673828, 0.024895519256591798, 0.024696575164794923, 0.024507007598876952, 0.024532800674438478, 0.02455571174621582, 0.024593696594238282, 0.024584928512573243, 0.024635391235351564, 0.02460371208190918, 0.024564672470092773, 0.024436128616333007, 0.024719808578491213, 0.02444044876098633, 0.02446611213684082, 0.025082752227783202, 0.024757055282592772, 0.024538591384887697, 0.024580799102783202, 0.024399551391601562, 0.024586559295654297, 0.02441766357421875, 0.02461065673828125, 0.02457859230041504, 0.02485273551940918, 0.0244421443939209, 0.025602975845336915, 0.02497529602050781, 0.02480953598022461, 0.025163103103637695, 0.024743776321411132, 0.02509292793273926, 0.024543231964111328, 0.024841407775878906, 0.024580928802490236, 0.024549375534057616, 0.024526847839355468, 0.024708736419677736, 0.024904064178466797, 0.024614912033081054, 0.024544832229614257, 0.024533439636230468, 0.02451456069946289, 0.024677568435668946, 0.02478323173522949, 0.024795583724975586, 0.02460633659362793, 0.02478323173522949, 0.024467456817626954, 0.024507616043090822, 0.024566560745239257, 0.024886335372924805, 0.024641536712646486, 0.02457491111755371, 0.024448127746582032, 0.024580768585205078, 0.02518448066711426, 0.024700511932373048, 0.02453094482421875, 0.02488265609741211, 0.024746944427490234, 0.0247193603515625, 0.024554975509643556, 0.024615455627441406, 0.024614240646362303, 0.02446790313720703, 0.024783071517944337, 0.024573951721191405, 0.0248603515625, 0.024653823852539062, 0.024557535171508788, 0.02500556755065918, 0.024653696060180665, 0.025676223754882814, 0.026434080123901367, 0.024757471084594727, 0.024647968292236328, 0.024586271286010743, 0.024666175842285157, 0.024594751358032227, 0.024585344314575194, 0.024669151306152343, 0.024431903839111327, 0.024516511917114257, 0.02465670394897461, 0.02469638442993164, 0.024792991638183593, 0.02461955261230469, 0.024585504531860352, 0.024832000732421877, 0.02459769630432129, 0.024451904296875, 0.024227840423583984, 0.024405727386474608, 0.024289247512817382, 0.024198783874511718, 0.024142559051513673, 0.024088544845581053, 0.02425200080871582, 0.024268287658691406, 0.024404895782470702, 0.024434688568115235, 0.024333471298217772, 0.024345439910888673, 0.024423551559448243, 0.02434752082824707, 0.02480892753601074, 0.026811935424804687, 0.025015327453613283, 0.024469472885131835, 0.024649503707885743, 0.02492367935180664, 0.024615615844726563, 0.024620351791381837, 0.02466476821899414, 0.02467430305480957, 0.024493215560913086, 0.024657760620117188, 0.024515584945678712, 0.02426470375061035, 0.024551424026489257, 0.02428646469116211, 0.024304351806640624, 0.024170080184936524, 0.02418451118469238, 0.024103679656982423, 0.02416758346557617, 0.024298336029052733, 0.0242093448638916, 0.024143936157226563, 0.02408608055114746, 0.024123775482177735, 0.024483680725097656, 0.024641279220581055, 0.02471164894104004, 0.02470035171508789, 0.02472198486328125, 0.024827903747558593, 0.02473574447631836, 0.02485865592956543, 0.02478220748901367, 0.02465020751953125, 0.024610944747924805, 0.024681535720825196, 0.024895999908447267, 0.02466655921936035, 0.024723392486572265, 0.0249815673828125, 0.024846336364746095, 0.02497439956665039, 0.02482387161254883, 0.024947071075439455, 0.02610588836669922, 0.025106399536132813, 0.02497443199157715, 0.024756479263305663, 0.02475484848022461, 0.024829952239990235, 0.024745119094848632, 0.024761184692382813, 0.024720928192138673, 0.024820192337036133, 0.024700319290161133, 0.024731327056884765, 0.02461788749694824, 0.02500819206237793, 0.024852415084838868, 0.024624160766601563, 0.024710111618041992, 0.024849567413330078, 0.024724319458007814, 0.024635391235351564, 0.024796735763549804, 0.02469728088378906, 0.024691743850708006, 0.025012704849243166, 0.02474153518676758, 0.024616832733154296, 0.0247359676361084, 0.024616832733154296, 0.02456787109375, 0.02633401679992676, 0.02523868751525879, 0.024807552337646484, 0.02491663932800293, 0.024694847106933593, 0.024645631790161132, 0.024642879486083985, 0.02481385612487793, 0.024657695770263673, 0.024715904235839845, 0.02449171257019043, 0.024631616592407226, 0.025821184158325194, 0.02486662483215332, 0.024670400619506837, 0.024935455322265626, 0.02493539237976074, 0.024750080108642578, 0.02473574447631836, 0.025233407974243165, 0.024542783737182616, 0.024662464141845704, 0.02469593620300293, 0.02470387268066406, 0.02476406478881836, 0.024768352508544922, 0.024764928817749023, 0.024729120254516603, 0.02485910415649414, 0.025046432495117187, 0.02491763114929199, 0.024765024185180663, 0.02472313690185547, 0.024742591857910157, 0.025319456100463867, 0.024702943801879883, 0.02461676788330078, 0.02463968086242676, 0.024616960525512696, 0.025050239562988283, 0.024968063354492188, 0.024662208557128907, 0.024801088333129884, 0.024993791580200195, 0.02466716766357422, 0.024990688323974608, 0.024672256469726563, 0.025190208435058595, 0.0247891845703125, 0.02486403274536133, 0.02470710372924805, 0.02472755241394043, 0.024746688842773437, 0.02475142478942871, 0.024791391372680664, 0.02470044708251953, 0.02466076850891113, 0.024814783096313478, 0.024734176635742188, 0.02509164810180664, 0.02500227165222168, 0.024835968017578126, 0.024631839752197265, 0.02492560005187988, 0.024689376831054686, 0.024764415740966796, 0.024584192276000977, 0.0247459831237793, 0.02465171241760254, 0.025001440048217773, 0.02465827178955078, 0.02463327980041504, 0.024686208724975588, 0.024512256622314453, 0.024421024322509765, 0.02459676742553711, 0.024604671478271483, 0.024608768463134766, 0.025795936584472656, 0.025664159774780274, 0.02465177536010742, 0.024661855697631838, 0.02466217613220215, 0.024917024612426758, 0.024894432067871095, 0.024559520721435548, 0.02472764778137207, 0.025212928771972655, 0.025330848693847656, 0.024607391357421876, 0.024455263137817384, 0.0245883846282959, 0.024647680282592774, 0.024620864868164064, 0.024698400497436525, 0.025385087966918945, 0.024795679092407225, 0.02514508819580078, 0.024684480667114258, 0.024807519912719726, 0.024715520858764647, 0.024623680114746093, 0.024798688888549806, 0.024625696182250977, 0.024610815048217775, 0.02453708839416504, 0.024573951721191405, 0.024721408843994142, 0.024525920867919923, 0.024488544464111327, 0.02493881607055664, 0.024805376052856445, 0.024649728775024415, 0.02454528045654297, 0.024569664001464844, 0.02446713638305664, 0.024689151763916017, 0.02468454360961914, 0.02464508819580078, 0.024719903945922852, 0.024638784408569335, 0.024507072448730467, 0.024792255401611327, 0.024557600021362303, 0.024648479461669922, 0.024803327560424804, 0.02511257553100586, 0.025217023849487305, 0.024840192794799806, 0.024591808319091798, 0.024615455627441406, 0.024731679916381834, 0.024760351181030274, 0.024707040786743163, 0.02472755241394043, 0.02481052780151367, 0.024572032928466797, 0.024531808853149414, 0.02470297622680664, 0.024647071838378908, 0.0246112003326416, 0.024611040115356444, 0.024827264785766603, 0.025027200698852538, 0.024589471817016602, 0.024389984130859375, 0.024281600952148437, 0.024061279296875, 0.024044191360473633, 0.024197120666503907, 0.024216800689697265, 0.02432694435119629, 0.0245166072845459, 0.024381439208984376, 0.0246824951171875, 0.02453651237487793, 0.02468307113647461, 0.024319999694824217, 0.02450432014465332, 0.024344575881958007, 0.025718591690063478, 0.02471993637084961, 0.024743904113769533, 0.024411264419555663, 0.026021984100341795, 0.024978271484375, 0.024502208709716797, 0.024256128311157227, 0.024314239501953126, 0.024503551483154296, 0.02456038475036621, 0.024473600387573242, 0.024403295516967773, 0.024300128936767577, 0.025299007415771485, 0.024163328170776367, 0.02412838363647461, 0.024195199966430665, 0.0242475528717041, 0.025168512344360353, 0.02425974464416504, 0.024277984619140627, 0.02426192092895508, 0.02426748847961426, 0.025450496673583983, 0.024823808670043947, 0.02454092788696289, 0.024540544509887696, 0.024410144805908203, 0.02461510467529297, 0.024488607406616212, 0.024993791580200195, 0.024683679580688477, 0.02484659194946289, 0.024809280395507814, 0.024992639541625977, 0.024711008071899413, 0.02493801689147949, 0.02501481628417969, 0.02482585525512695, 0.024838144302368165, 0.02469856071472168, 0.02495929527282715, 0.02496620750427246, 0.024903615951538085, 0.02498252868652344, 0.02503036880493164, 0.024916255950927734, 0.02500934410095215, 0.024713216781616212, 0.024761152267456055, 0.024806976318359375, 0.025087711334228515, 0.025053920745849608, 0.024661439895629883, 0.024684223175048828, 0.025404287338256837, 0.024809343338012695, 0.024625280380249023, 0.02474393653869629, 0.024688640594482423, 0.024786815643310547, 0.024897504806518554, 0.02546499252319336, 0.02475449562072754, 0.024713216781616212, 0.024727264404296876, 0.02485481643676758, 0.024899168014526366, 0.024710752487182616, 0.024687423706054687, 0.024557567596435546, 0.024681503295898438, 0.024599519729614258, 0.024713247299194337, 0.024698175430297852, 0.024619680404663086, 0.024653247833251953, 0.024637567520141603, 0.02470137596130371, 0.024969215393066405, 0.024596479415893553, 0.024845888137817383, 0.025100128173828125, 0.025717344284057617, 0.025024511337280272, 0.02481705665588379, 0.024760576248168947, 0.02465622329711914, 0.024853792190551758, 0.024879007339477538, 0.024832832336425782, 0.024901023864746095, 0.024694911956787108, 0.02466454315185547, 0.02482784080505371, 0.02481772804260254, 0.024863807678222657, 0.024662912368774412, 0.024751583099365235, 0.025021024703979492, 0.02489257621765137, 0.024705568313598634, 0.024571584701538085, 0.024617599487304687, 0.024625152587890626, 0.024746208190917968, 0.0246474552154541, 0.024661632537841798, 0.024977792739868165, 0.02488444709777832, 0.024776832580566406, 0.024875680923461915, 0.0247459831237793, 0.02455865669250488, 0.02447455978393555, 0.02456166458129883, 0.0248668155670166, 0.024881120681762695, 0.02462713623046875, 0.0247193603515625, 0.02467030334472656, 0.02446950340270996, 0.024961023330688475, 0.025067520141601563, 0.02523494338989258, 0.025391807556152345, 0.02497443199157715, 0.02506012725830078, 0.024961151123046876, 0.024759584426879883, 0.024742591857910157, 0.024658079147338866, 0.024706560134887694, 0.024713600158691406, 0.024727359771728515, 0.024755903244018555, 0.024672544479370118, 0.024641759872436525, 0.024688575744628908, 0.024610559463500978, 0.0245980167388916, 0.024494911193847658, 0.02443878364562988, 0.024774816513061522, 0.025035648345947265, 0.024773120880126953, 0.02466864013671875, 0.024577951431274413, 0.02454252815246582, 0.024893375396728517, 0.024795904159545898, 0.024680543899536132, 0.024534400939941407, 0.024490304946899414, 0.024656192779541015, 0.024726560592651367, 0.02487196731567383, 0.02478220748901367, 0.024955455780029296, 0.024590560913085938, 0.024755775451660158, 0.0251680965423584, 0.02479046440124512, 0.024686304092407227, 0.024695648193359374, 0.024508256912231446, 0.024541215896606447, 0.024561792373657226, 0.024700927734375, 0.024832000732421877, 0.024702720642089844, 0.024535295486450195, 0.024532991409301756, 0.02471731185913086, 0.024936447143554686, 0.024793088912963866, 0.024571903228759767, 0.024648895263671877, 0.02465990447998047, 0.02468908882141113, 0.02468262481689453, 0.02474403190612793, 0.024569503784179686, 0.02456755256652832, 0.02465875244140625, 0.024586368560791015, 0.024676223754882813, 0.02461871910095215]",tokens/s,40.38532526513516,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.337472,569.311232,0.0,174.063616,172.57984,s,1,7.0809599609375,7.0809599609375,0.0,7.0809599609375,7.0809599609375,7.0809599609375,7.0809599609375,[7.0809599609375],,kWh,4.605726533350207e-06,4.989117345350963e-07,1.0019452460069411e-06,6.106583513892244e-06,,MB,1164.67712,642.711552,0.0,234.881024,215.589888,s,25,0.18156364774703979,0.0072625459098815915,0.0001934262272951333,0.007157440185546875,0.007528467178344727,0.007562796592712402,0.0076320192527771,"[0.007651296138763428, 0.007126143932342529, 0.007109407901763916, 0.007078271865844726, 0.0070833277702331545, 0.007101984024047852, 0.0070431680679321285, 0.007100096225738525, 0.007518655776977539, 0.007526048183441162, 0.007385503768920898, 0.007530079841613769, 0.007375487804412842, 0.007451680183410645, 0.00757097578048706, 0.007209311962127685, 0.0073846721649169925, 0.0074481601715087895, 0.007241727828979493, 0.007157440185546875, 0.0070797119140625, 0.007059904098510742, 0.007146719932556152, 0.007109344005584717, 0.007074528217315674]",tokens/s,35249.34687871375,kWh,2.154723453840459e-07,2.3762854741778743e-08,1.1348072429837874e-07,3.5271592442420335e-07,tokens/kWh,725796546.946133,MB,1198.40768,644.808704,0.0,236.978176,215.592448,s,25,9.855248168945309,0.3942099267578125,0.0258072195663266,0.3893913879394531,0.4026221801757813,0.4114611938476562,0.4888181787109373,"[0.38299411010742185, 0.5126096801757812, 0.38021795654296875, 0.38084127807617185, 0.38131674194335935, 0.3791335144042969, 0.38811654663085937, 0.401467041015625, 0.40339227294921876, 0.41347842407226565, 0.40133578491210936, 0.3942486572265625, 0.3913739013671875, 0.3932178955078125, 0.39696780395507814, 0.3935213012695313, 0.3916291198730469, 0.3952633972167969, 0.3893913879394531, 0.38041622924804686, 0.37945755004882814, 0.3835711669921875, 0.37981634521484375, 0.379656494140625, 0.38181356811523437]",tokens/s,159.81332717353104,kWh,1.1285612854936907e-05,1.2445976165048161e-06,4.527772988701155e-06,1.7057983460142875e-05,tokens/kWh,3693285.325736406,,s,1575,9.8430794839859,0.006249574275546604,0.0032463893249899843,0.006076767921447754,0.006460102462768555,0.006543299198150635,0.007097601194381713,"[0.006322175979614258, 0.00658841609954834, 0.007006207942962647, 0.006119423866271972, 0.006081759929656982, 0.006062304019927979, 0.00606060791015625, 0.006148255825042724, 0.0060761280059814455, 0.006119359970092773, 0.006111231803894043, 0.006586527824401856, 0.0061411519050598145, 0.006069056034088135, 0.006111231803894043, 0.006054111957550049, 0.006077439785003662, 0.006072959899902344, 0.006053088188171386, 0.0060215678215026856, 0.00608512020111084, 0.00611030387878418, 0.006019455909729004, 0.006023903846740722, 0.006032256126403809, 0.006007167816162109, 0.006017600059509277, 0.0060661759376525876, 0.006036767959594727, 0.00603004789352417, 0.0060207037925720215, 0.006043807983398437, 0.006027520179748535, 0.006008800029754639, 0.005991648197174073, 0.006021599769592285, 0.005986464023590088, 0.006003039836883545, 0.005990240097045898, 0.005997600078582764, 0.00598956823348999, 0.00597327995300293, 0.0059920639991760255, 0.005986176013946533, 0.006007904052734375, 0.006014880180358887, 0.006014527797698975, 0.006025248050689698, 0.0059697279930114745, 0.006002912044525147, 0.005995903968811035, 0.006015999794006348, 0.006016064167022705, 0.006040512084960937, 0.005994495868682862, 0.006023231983184814, 0.006006720066070556, 0.006010079860687256, 0.006001440048217774, 0.005992447853088379, 0.00601087999343872, 0.006035520076751709, 0.0060210561752319335, 0.005936031818389893, 0.006013984203338623, 0.006022016048431397, 0.006033408164978027, 0.006006847858428955, 0.006000768184661865, 0.006018879890441894, 0.006021503925323486, 0.006026815891265869, 0.006024735927581787, 0.006010848045349121, 0.006025055885314942, 0.006031392097473144, 0.006015679836273193, 0.13460809326171874, 0.006400352001190186, 0.006250815868377686, 0.006111487865447998, 0.006227359771728515, 0.006592991828918457, 0.006094175815582276, 0.0060934720039367675, 0.0060293121337890625, 0.006074368000030517, 0.006043039798736573, 0.0060680961608886716, 0.006044447898864746, 0.006106912136077881, 0.006094687938690185, 0.006062623977661133, 0.006073567867279053, 0.006206111907958984, 0.006085536003112793, 0.006056896209716797, 0.006002751827239991, 0.0066468482017517086, 0.0060835199356079105, 0.006047743797302246, 0.006070591926574707, 0.006024479866027832, 0.006057983875274659, 0.006029439926147461, 0.006025504112243652, 0.0060308480262756346, 0.0060380158424377444, 0.006164480209350586, 0.006458847999572754, 0.006049439907073974, 0.006046656131744385, 0.006047647953033447, 0.006104479789733887, 0.006039999961853028, 0.006076416015625, 0.006025119781494141, 0.006102399826049805, 0.006040736198425293, 0.00606169605255127, 0.006070400238037109, 0.006063712120056152, 0.0060700798034667965, 0.006072319984436035, 0.006050687789916992, 0.006040480136871338, 0.0064778242111206055, 0.0060044159889221195, 0.006025536060333252, 0.006008992195129395, 0.006017216205596924, 0.005998047828674316, 0.006008224010467529, 0.006004608154296875, 0.006003967761993408, 0.006053055763244629, 0.006037983894348144, 0.005990399837493897, 0.006000639915466309, 0.005988351821899414, 0.0059978880882263184, 0.006010623931884765, 0.006190271854400635, 0.006034463882446289, 0.0060013761520385745, 0.006008831977844238, 0.0060026879310607914, 0.006000448226928711, 0.005994847774505615, 0.006032735824584961, 0.006033088207244873, 0.006031487941741943, 0.006056863784790039, 0.006039199829101563, 0.0060499200820922855, 0.006039391994476318, 0.006051360130310058, 0.006115647792816162, 0.00608460807800293, 0.006047423839569092, 0.006105728149414062, 0.006060031890869141, 0.006072319984436035, 0.006000639915466309, 0.006039552211761475, 0.005994495868682862, 0.0060273919105529785, 0.005991775989532471, 0.0060217599868774415, 0.005942495822906494, 0.006015679836273193, 0.0059714560508728025, 0.006035967826843262, 0.005968224048614502, 0.006024864196777344, 0.005974016189575195, 0.006024191856384278, 0.005983232021331787, 0.006025023937225342, 0.005984799861907959, 0.006027967929840088, 0.005964928150177002, 0.0060191359519958494, 0.005975840091705323, 0.006018335819244385, 0.0059788479804992675, 0.0060356159210205075, 0.006006303787231445, 0.006035488128662109, 0.005922016143798828, 0.0060284161567687984, 0.005995808124542236, 0.006059967994689942, 0.006006591796875, 0.006032351970672607, 0.005997920036315918, 0.006032832145690918, 0.00597654390335083, 0.006039968013763428, 0.006023168087005615, 0.006066304206848144, 0.005996575832366943, 0.006047584056854248, 0.00601087999343872, 0.006059072017669678, 0.006016128063201905, 0.006059840202331543, 0.0059978880882263184, 0.006027679920196533, 0.005976480007171631, 0.006112192153930664, 0.005982463836669922, 0.006077119827270508, 0.0060028800964355465, 0.006074463844299317, 0.006029024124145508, 0.0060457921028137206, 0.0060024957656860355, 0.006057568073272705, 0.0059725441932678225, 0.006039487838745117, 0.006002079963684082, 0.006025536060333252, 0.006041855812072754, 0.0060330238342285155, 0.006031775951385498, 0.006018496036529541, 0.006019775867462158, 0.00602950382232666, 0.006023039817810058, 0.006023231983184814, 0.00602239990234375, 0.006009344100952148, 0.006025216102600098, 0.006011903762817383, 0.006032383918762207, 0.0060247998237609865, 0.006019680023193359, 0.006044703960418701, 0.006041920185089112, 0.006080128192901611, 0.006142144203186035, 0.006082911968231201, 0.006215519905090332, 0.006085375785827637, 0.006138912200927734, 0.0060720000267028805, 0.006120128154754639, 0.0060381760597229, 0.006090400218963623, 0.006025152206420898, 0.006047840118408203, 0.005937344074249268, 0.006043488025665283, 0.006442624092102051, 0.006046207904815673, 0.005977439880371094, 0.0063515520095825196, 0.006012928009033203, 0.006061344146728515, 0.00602185583114624, 0.006025407791137695, 0.006037407875061035, 0.006045599937438965, 0.006060287952423095, 0.006029056072235107, 0.006027455806732178, 0.005993855953216553, 0.006053343772888183, 0.005995584011077881, 0.00602239990234375, 0.005988831996917725, 0.0060356478691101076, 0.006141952037811279, 0.006049791812896729, 0.0060395197868347164, 0.006074399948120117, 0.00600867223739624, 0.006012671947479248, 0.00600435209274292, 0.006038303852081299, 0.005993696212768554, 0.0060191359519958494, 0.006013984203338623, 0.00601087999343872, 0.006078144073486328, 0.006019392013549805, 0.005990079879760742, 0.006015168190002441, 0.006061888217926025, 0.006080512046813965, 0.006014976024627685, 0.006063648223876953, 0.006015456199645996, 0.006060031890869141, 0.006034815788269043, 0.00604966402053833, 0.006008992195129395, 0.006079071998596191, 0.006018335819244385, 0.006060768127441407, 0.00603545618057251, 0.006051839828491211, 0.0060067839622497555, 0.00603545618057251, 0.006014431953430176, 0.006359583854675293, 0.006002431869506836, 0.006048255920410156, 0.005998335838317871, 0.006025216102600098, 0.006014976024627685, 0.006016672134399414, 0.005994175910949707, 0.006001247882843018, 0.005904575824737549, 0.0060293121337890625, 0.006008863925933838, 0.006044832229614258, 0.006040095806121826, 0.006037439823150635, 0.005996863842010498, 0.006060192108154297, 0.005989823818206787, 0.006066944122314453, 0.005993919849395752, 0.0060433921813964845, 0.005974656105041504, 0.0060293121337890625, 0.0059999680519104, 0.006023839950561523, 0.005990399837493897, 0.0060293121337890625, 0.0060000958442687985, 0.006054431915283203, 0.005988351821899414, 0.006021024227142334, 0.005972064018249511, 0.0060284481048583985, 0.005981023788452148, 0.006015007972717285, 0.0059550080299377445, 0.006013023853302002, 0.005982399940490723, 0.006025472164154053, 0.005988416194915772, 0.0060349440574645995, 0.005988800048828125, 0.006039552211761475, 0.005995872020721435, 0.006025184154510498, 0.005984127998352051, 0.006009376049041748, 0.005976352214813232, 0.006002592086791992, 0.006006879806518555, 0.006030367851257324, 0.005970143795013428, 0.006011680126190185, 0.0059818878173828124, 0.006203135967254639, 0.006019872188568115, 0.006004479885101319, 0.0060067839622497555, 0.005994495868682862, 0.0059944639205932616, 0.0059818878173828124, 0.0060070080757141115, 0.005986591815948486, 0.006050911903381348, 0.006017183780670166, 0.006035935878753662, 0.0060087041854858395, 0.006002943992614746, 0.0059901118278503414, 0.006006080150604248, 0.0060037441253662106, 0.006030623912811279, 0.005932576179504394, 0.005972576141357422, 0.006006720066070556, 0.0059903359413146975, 0.006014976024627685, 0.005973152160644531, 0.0060096001625061036, 0.00662883186340332, 0.0065953278541564945, 0.006104063987731933, 0.005990911960601806, 0.006007487773895264, 0.00599616003036499, 0.006006624221801758, 0.006029056072235107, 0.006010848045349121, 0.0059686717987060545, 0.0060020160675048825, 0.006012671947479248, 0.006039840221405029, 0.005984127998352051, 0.006046080112457276, 0.005994592189788818, 0.006205440044403076, 0.006103040218353272, 0.006238560199737549, 0.006147712230682373, 0.0060989117622375485, 0.006002336025238037, 0.006039904117584228, 0.006029727935791015, 0.0060423359870910645, 0.006619840145111084, 0.006107391834259034, 0.008075519561767578, 0.008646080017089844, 0.00631331205368042, 0.006026400089263916, 0.0064299840927124026, 0.00609500789642334, 0.006093183994293213, 0.006107103824615478, 0.006045663833618164, 0.006072639942169189, 0.0061847038269042965, 0.006046783924102783, 0.006035935878753662, 0.006050240039825439, 0.006008863925933838, 0.0060356159210205075, 0.006020959854125977, 0.006031360149383545, 0.006008831977844238, 0.006029600143432617, 0.005993343830108642, 0.006011040210723877, 0.0059987521171569825, 0.006154784202575684, 0.006100992202758789, 0.006059967994689942, 0.006008895874023437, 0.006053855895996094, 0.005974368095397949, 0.005967904090881348, 0.005986271858215332, 0.006020448207855225, 0.006032032012939453, 0.006039552211761475, 0.006078464031219482, 0.006050848007202149, 0.0059913921356201175, 0.006037248134613037, 0.006078559875488282, 0.006023071765899658, 0.005988736152648926, 0.006027135848999023, 0.006000639915466309, 0.006131711959838867, 0.0061430401802062985, 0.0061511039733886715, 0.0060999999046325686, 0.006128608226776123, 0.006176767826080322, 0.0062111678123474125, 0.006267136096954346, 0.006277279853820801, 0.0062873601913452145, 0.0062353601455688475, 0.006318367958068847, 0.006363647937774658, 0.006342656135559082, 0.006432703971862793, 0.006369408130645752, 0.00640121603012085, 0.006419199943542481, 0.006496255874633789, 0.006500351905822754, 0.006360576152801513, 0.006324160099029541, 0.006280992031097412, 0.006351808071136475, 0.006434271812438965, 0.007294496059417724, 0.006849376201629639, 0.006461120128631591, 0.006289728164672851, 0.006336480140686035, 0.006237504005432129, 0.006426464080810547, 0.006410816192626953, 0.006308032035827637, 0.006418560028076172, 0.00640556812286377, 0.006435743808746338, 0.006366879940032959, 0.006272128105163574, 0.0072549118995666505, 0.006603040218353271, 0.008226495742797851, 0.0064553279876708985, 0.006469567775726318, 0.007096735954284668, 0.006987391948699951, 0.006436863899230957, 0.006545472145080567, 0.006453216075897217, 0.006474976062774658, 0.006400800228118896, 0.006377471923828125, 0.006461440086364746, 0.00642252779006958, 0.007579616069793701, 0.007379295825958252, 0.006391488075256348, 0.006297152042388916, 0.006455904006958008, 0.006498144149780273, 0.006442719936370849, 0.006467264175415039, 0.0064637441635131835, 0.0065623679161071775, 0.006588191986083985, 0.00653107213973999, 0.006551551818847656, 0.0064163517951965334, 0.006336703777313233, 0.006471519947052002, 0.006496223926544189, 0.006520864009857178, 0.006589568138122558, 0.006447999954223633, 0.006536799907684326, 0.006601471900939942, 0.006477344036102295, 0.006523231983184815, 0.006542367935180664, 0.0064167361259460445, 0.006314720153808594, 0.006270143985748291, 0.006226431846618652, 0.006168575763702393, 0.006268256187438965, 0.0062707839012146, 0.00616048002243042, 0.006199456214904785, 0.006255199909210205, 0.0062568001747131344, 0.0061699519157409665, 0.006173183917999267, 0.006117216110229492, 0.006146399974822998, 0.006111040115356446, 0.006223584175109863, 0.006322336196899414, 0.006295775890350342, 0.006180223941802979, 0.006134304046630859, 0.0061584959030151366, 0.006131135940551758, 0.0061485118865966795, 0.006176032066345215, 0.006343423843383789, 0.006506624221801757, 0.006375264167785644, 0.006520832061767578, 0.00638976001739502, 0.0063975038528442385, 0.006340320110321045, 0.0063656320571899416, 0.006494080066680908, 0.006326272010803223, 0.006305471897125244, 0.006267199993133545, 0.007161664009094238, 0.006427840232849121, 0.006776095867156983, 0.006501503944396973, 0.0063719358444213866, 0.006277120113372803, 0.006276288032531738, 0.006294400215148926, 0.006477759838104248, 0.006414463996887207, 0.006272064208984375, 0.006374176025390625, 0.007100063800811768, 0.007599775791168213, 0.00793673610687256, 0.01093222427368164, 0.009020895957946777, 0.006556191921234131, 0.006602335929870606, 0.006707615852355957, 0.006596960067749023, 0.006588064193725586, 0.006441088199615479, 0.006330463886260986, 0.006405983924865723, 0.00644704008102417, 0.006452415943145752, 0.0064039998054504395, 0.006321023941040039, 0.00630790376663208, 0.006237823963165284, 0.006210048198699952, 0.0062503361701965335, 0.006232063770294189, 0.006152095794677734, 0.006262176036834717, 0.0061385598182678225, 0.00611737585067749, 0.006264832019805908, 0.0064488000869750975, 0.006392159938812256, 0.006303743839263916, 0.006300064086914062, 0.006239840030670166, 0.006170623779296875, 0.006254208087921142, 0.0062102718353271484, 0.006489759922027588, 0.006326272010803223, 0.00642790412902832, 0.006408959865570068, 0.006462656021118164, 0.006482751846313477, 0.006286911964416504, 0.006293504238128662, 0.00637500810623169, 0.006529695987701416, 0.006488255977630615, 0.006388927936553955, 0.006350944042205811, 0.006406367778778076, 0.006240992069244384, 0.006216671943664551, 0.0061931519508361815, 0.006274687767028809, 0.006259071826934815, 0.006401023864746094, 0.006532447814941406, 0.006480576038360596, 0.0065913920402526855, 0.006496384143829346, 0.006456736087799072, 0.0063554878234863285, 0.006415775775909424, 0.006471776008605957, 0.006382080078125, 0.006378975868225098, 0.006418015956878662, 0.006501311779022217, 0.00659449577331543, 0.006463232040405273, 0.0063851518630981445, 0.006421311855316162, 0.006507967948913574, 0.006504320144653321, 0.0065504322052001955, 0.006468992233276367, 0.006517151832580567, 0.006496384143829346, 0.006555808067321777, 0.0064572482109069825, 0.006335455894470215, 0.006280288219451904, 0.00623529577255249, 0.006418528079986573, 0.0064824318885803225, 0.006710527896881103, 0.0065157442092895505, 0.006525728225708008, 0.00649721622467041, 0.006434879779815674, 0.006305727958679199, 0.006327648162841797, 0.0065194878578186035, 0.006331552028656006, 0.00624505615234375, 0.0061147198677062985, 0.006123648166656494, 0.006083072185516358, 0.00605017614364624, 0.006072288036346436, 0.006197311878204346, 0.006218688011169433, 0.0062657279968261715, 0.006328192234039307, 0.006159808158874512, 0.006150784015655518, 0.006104544162750244, 0.00616534423828125, 0.006151936054229736, 0.006253983974456787, 0.006404575824737549, 0.0065577921867370605, 0.00641923189163208, 0.0064143681526184085, 0.00633190393447876, 0.006176064014434815, 0.00610371208190918, 0.006107327938079834, 0.006082496166229248, 0.006076767921447754, 0.0060495038032531735, 0.006057983875274659, 0.006039552211761475, 0.006154240131378174, 0.006176608085632324, 0.0064208641052246095, 0.0064691839218139644, 0.00645904016494751, 0.006558015823364258, 0.006529280185699463, 0.006420063972473145, 0.006259391784667969, 0.0061129918098449705, 0.00609881591796875, 0.006109312057495118, 0.006121664047241211, 0.0060700798034667965, 0.006078464031219482, 0.006076416015625, 0.006154047966003418, 0.006082272052764893, 0.006113088130950928, 0.0060975680351257324, 0.006436960220336914, 0.006082687854766846, 0.006231135845184326, 0.006275040149688721, 0.00662172794342041, 0.00616809606552124, 0.0061200962066650395, 0.006137792110443115, 0.006067359924316406, 0.006123904228210449, 0.006271520137786865, 0.006175839900970459, 0.0062137598991394045, 0.006326784133911132, 0.006500127792358398, 0.006298111915588379, 0.0062211198806762695, 0.0061057920455932614, 0.006073760032653809, 0.006043327808380127, 0.006093728065490723, 0.006182911872863769, 0.006434815883636475, 0.00660646390914917, 0.006616864204406738, 0.006517343997955323, 0.006453120231628418, 0.006424704074859619, 0.0064767999649047855, 0.006257184028625488, 0.006203872203826904, 0.006132351875305176, 0.0061354880332946775, 0.006139711856842041, 0.006107647895812988, 0.006177792072296143, 0.006200352191925049, 0.006168928146362305, 0.00610265588760376, 0.006260735988616943, 0.006349055767059326, 0.006247392177581787, 0.006154208183288574, 0.006170656204223633, 0.006183135986328125, 0.0061198720932006834, 0.006094816207885742, 0.0061294717788696286, 0.006054240226745606, 0.005984255790710449, 0.006059296131134033, 0.006068960189819336, 0.0062828478813171384, 0.006393311977386474, 0.006202303886413575, 0.006137951850891114, 0.006093791961669922, 0.006112192153930664, 0.006067840099334717, 0.006068672180175781, 0.006039487838745117, 0.006049791812896729, 0.006109183788299561, 0.006158336162567139, 0.006112448215484619, 0.006148928165435791, 0.006060031890869141, 0.00615334415435791, 0.006036352157592773, 0.006076416015625, 0.0060067839622497555, 0.006168575763702393, 0.006336351871490478, 0.006443424224853516, 0.006674176216125489, 0.006612448215484619, 0.006526815891265869, 0.006574336051940918, 0.006379615783691406, 0.006813632011413574, 0.006381984233856201, 0.006333759784698486, 0.006334720134735107, 0.006195648193359375, 0.006176576137542724, 0.0061298561096191405, 0.006167935848236084, 0.0061682558059692385, 0.006112448215484619, 0.006381311893463135, 0.006100800037384033, 0.006135072231292725, 0.006058752059936523, 0.006279327869415283, 0.006194431781768799, 0.006255008220672607, 0.006265408039093018, 0.006239744186401367, 0.006199935913085938, 0.0062278399467468265, 0.006672383785247803, 0.006293087959289551, 0.006207935810089112, 0.006103199958801269, 0.006076223850250244, 0.006059904098510743, 0.006072447776794433, 0.006088223934173584, 0.006074848175048828, 0.006135072231292725, 0.0062696638107299805, 0.006098048210144043, 0.006072703838348389, 0.006300159931182861, 0.006272223949432373, 0.006267871856689453, 0.006112095832824707, 0.006123583793640136, 0.006109792232513428, 0.006084256172180176, 0.006135744094848633, 0.006310624122619629, 0.006227968215942382, 0.00610211181640625, 0.0061244478225708, 0.0060677118301391605, 0.0062490878105163575, 0.006113152027130127, 0.006108352184295654, 0.006087488174438477, 0.006205344200134277, 0.0061641278266906735, 0.006077951908111572, 0.006089024066925049, 0.006186912059783936, 0.006509119987487793, 0.006520959854125976, 0.006706367969512939, 0.006514848232269287, 0.0065133762359619145, 0.0065001602172851565, 0.006525023937225342, 0.006398015975952148, 0.006459296226501465, 0.006460639953613281, 0.006357888221740723, 0.0061296639442443845, 0.007016448020935059, 0.006104415893554688, 0.006049600124359131, 0.0061528959274292, 0.006176928043365479, 0.006127744197845459, 0.006080383777618408, 0.006154240131378174, 0.006057119846343994, 0.006086880207061767, 0.006482687950134278, 0.006608575820922852, 0.006342336177825928, 0.006291423797607422, 0.006192512035369873, 0.0061380801200866695, 0.00609878396987915, 0.006097824096679688, 0.006465536117553711, 0.006184447765350342, 0.006642176151275635, 0.006643712043762207, 0.00702784013748169, 0.006234655857086182, 0.006237728118896484, 0.006212416172027588, 0.006115039825439453, 0.0061073598861694335, 0.006238272190093994, 0.006359039783477783, 0.006252863883972168, 0.0062399358749389644, 0.006213632106781006, 0.006135807991027832, 0.006031360149383545, 0.006083775997161865, 0.006013855934143066, 0.006070432186126709, 0.006104832172393799, 0.0061272640228271485, 0.006277599811553955, 0.006473760128021241, 0.006649280071258545, 0.006566304206848145, 0.0065474557876586915, 0.006662144184112549, 0.0064880638122558594, 0.006492159843444824, 0.006387360095977783, 0.006408544063568116, 0.006401663780212403, 0.00625497579574585, 0.006241312026977539, 0.006215936183929444, 0.006148223876953125, 0.006130271911621093, 0.00615334415435791, 0.006221824169158936, 0.0061829757690429685, 0.006468607902526856, 0.006379327774047852, 0.006371103763580323, 0.006285344123840332, 0.006443359851837158, 0.00645308780670166, 0.006191103935241699, 0.006174176216125488, 0.0062280001640319825, 0.006210048198699952, 0.006131455898284912, 0.0060941438674926756, 0.006087520122528076, 0.006091040134429931, 0.006388895988464356, 0.006177216053009033, 0.0061485118865966795, 0.006147488117218018, 0.0061077442169189455, 0.006045951843261719, 0.006102208137512207, 0.006050047874450684, 0.006144320011138916, 0.006065279960632324, 0.006391776084899902, 0.006463520050048828, 0.006179168224334717, 0.0061038718223571774, 0.006057695865631103, 0.007448575973510742, 0.006615039825439453, 0.006524928092956543, 0.006469791889190674, 0.006346047878265381, 0.006359583854675293, 0.006360415935516357, 0.006226208209991455, 0.006223231792449951, 0.006218751907348633, 0.006231647968292236, 0.006201759815216064, 0.006200448036193847, 0.0062984957695007325, 0.006501408100128174, 0.006487008094787598, 0.006365280151367187, 0.00637440013885498, 0.006375967979431152, 0.0062317438125610355, 0.006161215782165527, 0.006304992198944092, 0.006300672054290772, 0.006076064109802246, 0.006080351829528809, 0.006205088138580323, 0.006193664073944092, 0.006127520084381104, 0.0060416641235351565, 0.006061728000640869, 0.006063680171966553, 0.0060629119873046875, 0.006087935924530029, 0.006111775875091553, 0.006072735786437989, 0.006075424194335937, 0.0060731201171875, 0.006123104095458984, 0.006285727977752686, 0.006279200077056885, 0.006221759796142578, 0.00616860818862915, 0.00613321590423584, 0.00610368013381958, 0.0061439042091369625, 0.00611030387878418, 0.006330848217010498, 0.006373824119567871, 0.006097343921661377, 0.006153791904449463, 0.006136032104492187, 0.006080927848815918, 0.00617033576965332, 0.006146336078643799, 0.006146048069000244, 0.006212800025939941, 0.00638047981262207, 0.00659065580368042, 0.006678207874298096, 0.0064973440170288084, 0.006542272090911865, 0.006432767868041992, 0.006524831771850586, 0.006401631832122803, 0.006229663848876953, 0.006212448120117188, 0.0061972479820251464, 0.006210944175720215, 0.006104991912841797, 0.006191711902618408, 0.006058303833007812, 0.006109119892120362, 0.0060778560638427735, 0.006087135791778564, 0.0060661759376525876, 0.006053120136260986, 0.006087264060974121, 0.0061413440704345705, 0.0062841281890869145, 0.006690271854400635, 0.006207968235015869, 0.0060867519378662105, 0.006112224102020264, 0.006665120124816895, 0.006275231838226318, 0.006131552219390869, 0.006283008098602295, 0.0064486398696899415, 0.006271743774414062, 0.006205664157867431, 0.006102431774139405, 0.006053664207458496, 0.006090784072875976, 0.006127647876739502, 0.006102911949157715, 0.006084512233734131, 0.006092576026916504, 0.006070240020751953, 0.006095104217529297, 0.006076863765716553, 0.006162752151489257, 0.006151999950408936, 0.006135200023651123, 0.00617139196395874, 0.006156320095062256, 0.006209760189056396, 0.006161824226379394, 0.006101376056671143, 0.006082560062408447, 0.006031167984008789, 0.006154431819915772, 0.006596511840820313, 0.006645631790161133, 0.00661897611618042, 0.006531455993652344, 0.006479872226715088, 0.006502399921417237, 0.006409952163696289, 0.006206751823425293, 0.006187615871429443, 0.006135647773742676, 0.006125376224517822, 0.006087488174438477, 0.0060787200927734375, 0.006067903995513916, 0.006123839855194092, 0.006231584072113037, 0.006135968208312989, 0.0061972479820251464, 0.006081984043121338, 0.006275455951690673, 0.006317376136779785, 0.006249216079711914, 0.006205567836761475, 0.006313151836395263, 0.006208320140838623, 0.006252543926239014, 0.006211008071899414, 0.006193727970123291, 0.006162687778472901, 0.006251584053039551, 0.006346687793731689, 0.006273983955383301, 0.006160607814788818, 0.006113887786865235, 0.0061262078285217285, 0.006095488071441651, 0.006094207763671875, 0.006105247974395752, 0.006105311870574951, 0.006050848007202149, 0.006054848194122314, 0.006176832199096679, 0.006213215827941895, 0.006156447887420654, 0.006074111938476562, 0.006124000072479248, 0.0060928001403808595, 0.006365375995635986, 0.006374944210052491, 0.006408864021301269, 0.006452320098876953, 0.006420608043670654, 0.006445343971252441, 0.006368735790252686, 0.006482592105865478, 0.006516287803649903, 0.006552031993865967, 0.006465184211730957, 0.006398176193237305, 0.006324319839477539, 0.006303391933441162, 0.006209887981414795, 0.006223872184753418, 0.006067903995513916, 0.006144224166870117, 0.006108767986297607, 0.006095456123352051, 0.006031328201293945, 0.006125311851501464, 0.00611568021774292, 0.00609503984451294, 0.0060680961608886716, 0.006067999839782715, 0.006047455787658691, 0.0060472960472106934, 0.0062490878105163575, 0.0063816637992858884, 0.006437215805053711, 0.006772384166717529, 0.006270815849304199, 0.006250847816467285, 0.006183743953704834, 0.006149472236633301, 0.006065408229827881, 0.006071872234344482, 0.006055871963500977, 0.006099584102630615, 0.006093088150024414, 0.006077919960021973, 0.006112927913665771, 0.00625651216506958, 0.006165472030639649, 0.006129695892333985, 0.006229472160339355, 0.006430463790893555, 0.006228352069854736, 0.006097311973571777, 0.00613753604888916, 0.006028895854949951, 0.006044095993041993, 0.006213151931762696, 0.006350719928741455, 0.006577023983001709, 0.006512639999389648, 0.006475776195526123, 0.006382847785949707, 0.0064048638343811035, 0.0063777599334716795, 0.0062665920257568355, 0.006198272228240966, 0.006156544208526612, 0.0061010241508483885, 0.006074719905853272, 0.0060993280410766605, 0.00603545618057251, 0.00606822395324707, 0.006012864112854004, 0.006061888217926025, 0.006170783996582031, 0.00615231990814209, 0.006137311935424805, 0.006031871795654297, 0.005994495868682862, 0.006024352073669434, 0.005987167835235596, 0.006041600227355957, 0.005903744220733643, 0.0061348161697387694, 0.006002816200256347, 0.006050848007202149, 0.006035232067108155, 0.006019680023193359, 0.0060022082328796385, 0.006032095909118652, 0.006002943992614746, 0.00606547212600708, 0.006032127857208252, 0.006024960041046142, 0.005975615978240967, 0.0060850558280944825, 0.005996543884277344, 0.006037248134613037, 0.0060265278816223145, 0.006044640064239502, 0.0060992960929870605, 0.006086592197418213, 0.0060804481506347655, 0.0060486397743225095, 0.00604694414138794, 0.006032224178314209, 0.006052095890045166, 0.006120031833648682, 0.006045695781707764, 0.006078464031219482, 0.006091104030609131, 0.006117343902587891, 0.0060778560638427735, 0.006018496036529541, 0.006077280044555664, 0.006033696174621582, 0.006072256088256836, 0.006001728057861328, 0.006049568176269531, 0.005992640018463135, 0.006036384105682373, 0.005982048034667968, 0.006033440113067627, 0.005969791889190674, 0.006023136138916016, 0.005961855888366699, 0.006044703960418701, 0.005959904193878174, 0.006022240161895752, 0.00596451187133789, 0.006032639980316162, 0.006049471855163574, 0.006015103816986084, 0.005971903800964355, 0.006170464038848877, 0.005976160049438476, 0.006021120071411133, 0.0059658241271972655, 0.0060059518814086915, 0.006050496101379394, 0.006008959770202636, 0.005986303806304932, 0.006027200222015381, 0.005959551811218261, 0.006033696174621582, 0.005905888080596924, 0.006017568111419677, 0.005994495868682862, 0.006045536041259765, 0.006000800132751465, 0.006096288204193116, 0.006001183986663819, 0.0060338878631591795, 0.005995840072631836, 0.0060234560966491695, 0.005973440170288086, 0.006050303936004638, 0.005967936038970947, 0.005995935916900634, 0.00598195219039917, 0.006017951965332031, 0.005972064018249511, 0.006114655971527099, 0.0060236802101135255, 0.006030655860900879, 0.005995200157165527, 0.006014463901519776, 0.0059704318046569825, 0.006041600227355957, 0.005998015880584717, 0.006042175769805909, 0.005992288112640381, 0.0061133761405944825, 0.005988480091094971, 0.006043583869934082, 0.0059985918998718265, 0.006031199932098389, 0.006002399921417236, 0.006049248218536377, 0.005995488166809082, 0.006051199913024902, 0.005988480091094971, 0.0060375680923461916, 0.005986688137054444, 0.0060249919891357424, 0.005988639831542969, 0.006033311843872071, 0.005996032238006592, 0.006048351764678955, 0.005982207775115967, 0.006012928009033203, 0.0060061440467834475, 0.006029952049255371, 0.006018784046173096, 0.0060289278030395506, 0.005990816116333008, 0.0060011520385742185, 0.005996640205383301, 0.0061231679916381836, 0.006024223804473877, 0.0060152320861816405, 0.005994527816772461, 0.006003647804260254, 0.006039360046386719, 0.006018815994262695, 0.006011072158813476, 0.0060026879310607914, 0.006031007766723633, 0.005943295955657959, 0.005970240116119385, 0.006029088020324707, 0.0060239357948303224, 0.006035391807556152, 0.006115520000457763, 0.0071840319633483885, 0.006700928211212158, 0.0065998082160949705, 0.0060999679565429685, 0.006330368041992188, 0.006032639980316162, 0.006044415950775146, 0.0060293121337890625, 0.006172671794891358, 0.00601087999343872, 0.00601907205581665, 0.006028575897216797, 0.006023903846740722, 0.0060067839622497555, 0.006038943767547608, 0.005997151851654053, 0.006131711959838867, 0.006080512046813965, 0.006074560165405273, 0.00600816011428833, 0.006243135929107666, 0.006094207763671875, 0.006076704025268555, 0.006145855903625488, 0.006097055912017822, 0.006002719879150391, 0.005992288112640381, 0.0059987521171569825, 0.006045567989349365, 0.006017151832580566, 0.006012800216674804, 0.006021183967590332, 0.006014592170715332, 0.006024672031402588, 0.006140895843505859, 0.006111231803894043, 0.006024608135223389, 0.0060442562103271485, 0.0059985918998718265, 0.0060293121337890625, 0.006002304077148438, 0.006037888050079346, 0.006023104190826416, 0.006027455806732178, 0.005992320060729981, 0.006012928009033203, 0.0059881601333618166, 0.006004928112030029, 0.006031072139739991, 0.006007071971893311, 0.006027647972106934, 0.006020768165588379, 0.006010655879974365, 0.006033599853515625, 0.006000671863555908, 0.006027232170104981, 0.0060028800964355465, 0.005905471801757813, 0.006014976024627685, 0.006006752014160156, 0.0060152320861816405, 0.006010848045349121, 0.005979231834411621, 0.006002655982971191, 0.0060068159103393555, 0.006006432056427002, 0.005971968173980713, 0.005990399837493897, 0.0059699201583862304, 0.005994336128234863, 0.0059967041015625, 0.005990399837493897, 0.005987328052520752, 0.00598905611038208, 0.006008384227752686, 0.00603007984161377, 0.005997920036315918, 0.006051743984222412, 0.006001408100128174, 0.0060160961151123045, 0.006025983810424805, 0.006017183780670166, 0.0062722558975219726, 0.006034175872802735, 0.006045951843261719, 0.0060433921813964845, 0.006062079906463623, 0.006017024040222168, 0.006012928009033203, 0.006036736011505127, 0.0060136961936950685, 0.006039552211761475, 0.0060026879310607914, 0.0060538239479064945, 0.0060226240158081056, 0.0060730881690979005, 0.005990464210510254, 0.00603113603591919, 0.005992447853088379, 0.006066239833831787, 0.005987967967987061, 0.00602675199508667, 0.00598745584487915, 0.006049471855163574, 0.00599622392654419, 0.006043968200683594, 0.005975103855133056, 0.006052800178527832, 0.006008255958557129, 0.006063007831573487, 0.006012576103210449, 0.006053760051727295, 0.006025152206420898, 0.006047935962677002, 0.00598137617111206, 0.006074495792388916, 0.006021984100341797, 0.0060412797927856445, 0.0059712638854980465, 0.006149055957794189, 0.005900191783905029, 0.006160223960876465, 0.006052095890045166, 0.0062399678230285645, 0.00597760009765625, 0.0060050878524780275, 0.005982656002044678, 0.006023519992828369, 0.005987679958343506, 0.006024576187133789, 0.005983168125152588, 0.006008959770202636, 0.0060026879310607914, 0.006006400108337403, 0.006016543865203857, 0.0060095682144165035, 0.0060022401809692386, 0.00601087999343872, 0.006011360168457032, 0.006052127838134766, 0.006020800113677978, 0.0060536317825317385, 0.005986559867858887, 0.006050111770629883, 0.006053120136260986, 0.006058688163757324, 0.0060208640098571775, 0.006021120071411133, 0.006045919895172119, 0.006027040004730225, 0.006078080177307129, 0.006035295963287354, 0.00604963207244873, 0.006009535789489746, 0.0059985918998718265, 0.006000639915466309, 0.006006015777587891, 0.00598419189453125, 0.005994688034057618, 0.005989183902740478, 0.006010687828063964, 0.005974016189575195, 0.0060026879310607914, 0.006020736217498779, 0.0059920639991760255, 0.006000448226928711, 0.006005631923675537, 0.006025023937225342, 0.006014431953430176, 0.0060280637741088865, 0.006014976024627685, 0.006000639915466309, 0.006008575916290284, 0.006023295879364014, 0.006002592086791992, 0.005980703830718994, 0.006016640186309814, 0.005986368179321289, 0.006043295860290527, 0.006008895874023437, 0.006035744190216064, 0.006047743797302246, 0.006021247863769531, 0.005968448162078858, 0.005998655796051026, 0.006021120071411133, 0.00604310417175293, 0.006029856204986572, 0.006037312030792236, 0.006013440132141113, 0.006028704166412354, 0.006029600143432617, 0.006012928009033203, 0.005988351821899414, 0.006281216144561768, 0.006037504196166992, 0.006060256004333496, 0.00616425609588623, 0.006041600227355957, 0.006042655944824219, 0.006013919830322265, 0.00601087999343872, 0.005994495868682862, 0.006010240077972412, 0.006013152122497559, 0.0060360321998596195, 0.006053728103637695, 0.00606547212600708, 0.006066880226135254, 0.006039872169494629, 0.006051743984222412, 0.006024064064025879, 0.006064159870147705, 0.005995039939880371, 0.0060698561668396, 0.005993216037750244, 0.006078464031219482, 0.006000448226928711, 0.00604588794708252, 0.0059983677864074705, 0.006064288139343262, 0.005977536201477051, 0.006081151962280274, 0.006021120071411133, 0.006055007934570313, 0.006003615856170655, 0.006047743797302246, 0.006013023853302002, 0.00602668809890747, 0.005974495887756347, 0.00602623987197876, 0.005962751865386963, 0.006041696071624756, 0.006028768062591553, 0.00608464002609253, 0.006036223888397217, 0.00601039981842041, 0.006041728019714356, 0.0060067839622497555, 0.006062079906463623, 0.006061279773712158, 0.006136608123779297, 0.006170623779296875, 0.006188672065734863, 0.006123807907104492, 0.006647903919219971]",tokens/s,160.0108992884219,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,937.885696,6533.61152,0.0,6138.363904,6060.931072,s,1,7.035689453125,7.035689453125,0.0,7.035689453125,7.035689453125,7.035689453125,7.035689453125,[7.035689453125],,kWh,5.086469033324193e-06,5.537158699413926e-07,3.1925025540113783e-06,8.832687457276963e-06,,MB,1390.432256,6558.777344,0.0,6150.946816,5419.87328,s,10,0.6056985969543457,0.06056985969543457,0.0015528432284002626,0.06032302474975586,0.062413082122802735,0.06241665267944336,0.062419509124755856,"[0.06235702514648438, 0.059317855834960936, 0.05869996643066406, 0.061936065673828124, 0.06242022323608398, 0.0595272331237793, 0.061089248657226564, 0.05955680084228516, 0.05838188934326172, 0.06241228866577148]",tokens/s,4226.524566628572,kWh,1.9661314734339733e-06,2.167680550845188e-07,1.299260920093977e-06,3.4821604486124688e-06,tokens/kWh,73517577.31382193,MB,1446.33856,6560.874496,0.0,6150.946816,5419.87584,s,10,16.310414306640624,1.6310414306640624,0.007857273558815292,1.630450927734375,1.6352144287109376,1.6433138793945312,1.6497934399414063,"[1.6296009521484376, 1.632656494140625, 1.6313009033203125, 1.626613525390625, 1.6276422119140626, 1.651413330078125, 1.62528271484375, 1.632633544921875, 1.63341455078125, 1.6198560791015626]",tokens/s,38.62562827380183,kWh,4.754033525364933e-05,5.242493640114444e-06,3.1553376473105234e-05,8.433620536686902e-05,tokens/kWh,747010.1331444203,,s,630,16.308376537323003,0.02588631196400476,0.0005603952857475632,0.025773807525634768,0.026104809570312502,0.026561150169372556,0.028418554210662847,"[0.026761247634887696, 0.025899007797241212, 0.025796031951904295, 0.025646015167236327, 0.02582271957397461, 0.025593599319458007, 0.02562124824523926, 0.025591808319091795, 0.025661439895629884, 0.025679391860961916, 0.02564963150024414, 0.025873632431030275, 0.025754112243652344, 0.025663679122924804, 0.025847423553466798, 0.025910816192626952, 0.026387392044067384, 0.025753568649291993, 0.025798688888549803, 0.025856000900268555, 0.025659391403198242, 0.025829376220703124, 0.02573107147216797, 0.025748863220214843, 0.02572051239013672, 0.025639423370361326, 0.025651552200317382, 0.02569862365722656, 0.025590944290161132, 0.025659679412841797, 0.025890687942504882, 0.026005983352661133, 0.02581235122680664, 0.025647743225097656, 0.02572697639465332, 0.0263306884765625, 0.02582147216796875, 0.028475551605224608, 0.027189247131347655, 0.025907199859619142, 0.025810943603515626, 0.025844959259033202, 0.025785120010375976, 0.025624576568603515, 0.02571878433227539, 0.02575974464416504, 0.025745407104492187, 0.025589216232299806, 0.02559791946411133, 0.025820928573608397, 0.025658016204833985, 0.02581110382080078, 0.02595430374145508, 0.02594108772277832, 0.025994144439697265, 0.025890815734863282, 0.025941055297851564, 0.025873056411743166, 0.025796895980834962, 0.02564659118652344, 0.025741823196411134, 0.025831424713134765, 0.025722272872924806, 0.02695577621459961, 0.026255392074584962, 0.025720800399780273, 0.02569830322265625, 0.025835519790649415, 0.025695295333862306, 0.025649152755737304, 0.02566649627685547, 0.025943359375, 0.025598655700683592, 0.025786367416381836, 0.025655296325683592, 0.02576326370239258, 0.026518144607543946, 0.02723219108581543, 0.025999359130859375, 0.02576323127746582, 0.025657791137695313, 0.025644384384155273, 0.025676576614379883, 0.02558569526672363, 0.025667583465576172, 0.025600000381469725, 0.02581443214416504, 0.025594463348388673, 0.025675775527954102, 0.025591072082519532, 0.025532831192016603, 0.025628992080688476, 0.02576710319519043, 0.025920320510864257, 0.02571059226989746, 0.025806432723999025, 0.02591375923156738, 0.025968639373779297, 0.02587238311767578, 0.02576959991455078, 0.02565977668762207, 0.02571878433227539, 0.025646112442016603, 0.02573411178588867, 0.025817087173461914, 0.026357343673706055, 0.027492767333984376, 0.025887840270996092, 0.025899328231811524, 0.025964832305908202, 0.02675129508972168, 0.025935871124267578, 0.026015743255615235, 0.026070463180541993, 0.02601136016845703, 0.025936351776123048, 0.025812480926513674, 0.025815616607666014, 0.025905471801757812, 0.025974687576293946, 0.026613567352294924, 0.025942304611206054, 0.025845504760742186, 0.025817344665527344, 0.0258143367767334, 0.025870527267456055, 0.026587488174438477, 0.026187744140625, 0.02590924835205078, 0.025836736679077148, 0.025981760025024413, 0.025927104949951173, 0.025804960250854492, 0.025837984085083008, 0.02573721694946289, 0.025978208541870117, 0.02577680015563965, 0.02582921600341797, 0.02565068817138672, 0.025825920104980468, 0.025739295959472656, 0.026052608489990234, 0.025800703048706054, 0.025808351516723633, 0.02593382453918457, 0.026368703842163086, 0.026242015838623046, 0.026067840576171876, 0.025847808837890625, 0.0259150390625, 0.02590716743469238, 0.02585433578491211, 0.02575699234008789, 0.025784448623657228, 0.025862688064575194, 0.02585798454284668, 0.025804895401000977, 0.025823232650756835, 0.025859872817993165, 0.02573334312438965, 0.025825216293334962, 0.02600761604309082, 0.025675743103027344, 0.0256778564453125, 0.025679424285888673, 0.025831872940063477, 0.025784320831298828, 0.025693727493286134, 0.025782751083374023, 0.025773855209350587, 0.025915615081787108, 0.025812671661376952, 0.025789888381958007, 0.02568828773498535, 0.025942527770996093, 0.0258721923828125, 0.02585798454284668, 0.025770175933837892, 0.025987295150756835, 0.02570444869995117, 0.02631475257873535, 0.02597068786621094, 0.025827327728271485, 0.025911167144775392, 0.026345312118530275, 0.026116384506225585, 0.025849855422973633, 0.025878047943115233, 0.025907680511474608, 0.028279008865356444, 0.026528959274291993, 0.025936511993408202, 0.025825376510620116, 0.025804672241210937, 0.025771392822265624, 0.02577289581298828, 0.02570425605773926, 0.026038463592529298, 0.025833471298217774, 0.02579622459411621, 0.025760128021240235, 0.02574131202697754, 0.02568806457519531, 0.025673248291015624, 0.02565577507019043, 0.02575119972229004, 0.025564895629882813, 0.02580284881591797, 0.02628278350830078, 0.025714431762695313, 0.02577561569213867, 0.025673824310302733, 0.025536991119384764, 0.02564908790588379, 0.02570649528503418, 0.025583423614501954, 0.025843904495239257, 0.025651199340820312, 0.025843711853027345, 0.02575564765930176, 0.025828351974487306, 0.025981952667236328, 0.026003328323364258, 0.0258602237701416, 0.025855903625488282, 0.02574550437927246, 0.026738687515258788, 0.025875968933105467, 0.025707008361816407, 0.025812959671020506, 0.025775680541992186, 0.025623008728027342, 0.025610240936279297, 0.0256342716217041, 0.025727039337158204, 0.0256844482421875, 0.02572697639465332, 0.025657344818115234, 0.02552422332763672, 0.02563849639892578, 0.02570044708251953, 0.025831743240356444, 0.025855007171630858, 0.025682111740112305, 0.02565795135498047, 0.025635007858276368, 0.025707679748535155, 0.025604768753051756, 0.02579475212097168, 0.025775936126708983, 0.025614528656005858, 0.02559939193725586, 0.029030303955078125, 0.02681660842895508, 0.025785823822021485, 0.025708480834960936, 0.025768543243408205, 0.026019840240478515, 0.025710527420043944, 0.025810079574584963, 0.02575820732116699, 0.02577859115600586, 0.025757183074951173, 0.0256680965423584, 0.02570854377746582, 0.025673728942871094, 0.02575564765930176, 0.02571459197998047, 0.02569948768615723, 0.025748319625854492, 0.025703712463378905, 0.02564998435974121, 0.02568806457519531, 0.025624160766601563, 0.025713056564331056, 0.02575152015686035, 0.0256975040435791, 0.025624736785888672, 0.02569203186035156, 0.02572163200378418, 0.02574505615234375, 0.02573347282409668, 0.025707647323608397, 0.025828224182128906, 0.025772031784057618, 0.025751136779785157, 0.025663904190063477, 0.025645055770874024, 0.025616384506225585, 0.025505151748657227, 0.025653600692749023, 0.025688352584838866, 0.025643232345581055, 0.02559542465209961, 0.025557247161865235, 0.025655263900756835, 0.02610383987426758, 0.025819135665893556, 0.02567945671081543, 0.02575811195373535, 0.025756959915161134, 0.02577686309814453, 0.025835391998291015, 0.025903232574462892, 0.02588467216491699, 0.025943935394287108, 0.025978975296020508, 0.02597875213623047, 0.025851039886474608, 0.026079296112060547, 0.02585696029663086, 0.02634752082824707, 0.025845760345458983, 0.02582524871826172, 0.025673759460449218, 0.027419904708862304, 0.026456640243530272, 0.029658912658691406, 0.033287742614746096, 0.025963359832763672, 0.025866239547729493, 0.025812992095947264, 0.025924896240234373, 0.025891199111938476, 0.02589116859436035, 0.025910816192626952, 0.025946592330932616, 0.02602774429321289, 0.025907615661621093, 0.025890687942504882, 0.026009599685668947, 0.025995264053344725, 0.02617344093322754, 0.02593289566040039, 0.026001632690429686, 0.02590348815917969, 0.02598659133911133, 0.02593984031677246, 0.02617568016052246, 0.025852800369262696, 0.026074880599975585, 0.026192031860351562, 0.02609516716003418, 0.025919071197509767, 0.026303264617919923, 0.025910879135131838, 0.02595452880859375, 0.026005216598510742, 0.025911775588989258, 0.02585958480834961, 0.025759807586669924, 0.02577401542663574, 0.025813503265380858, 0.025905311584472655, 0.025902175903320314, 0.02596112060546875, 0.02577414321899414, 0.026183712005615235, 0.027121631622314454, 0.02671414375305176, 0.026097343444824218, 0.026015359878540038, 0.026006208419799805, 0.026009599685668947, 0.026030080795288086, 0.025993215560913087, 0.02593494415283203, 0.025942111968994142, 0.026016576766967774, 0.025831424713134765, 0.026243072509765625, 0.0261529598236084, 0.026228511810302734, 0.026038496017456055, 0.025965984344482423, 0.025948768615722657, 0.025964544296264647, 0.025825279235839844, 0.027570335388183594, 0.02634364891052246, 0.0259420166015625, 0.025726848602294922, 0.025602176666259767, 0.02578816032409668, 0.02556732749938965, 0.0256964168548584, 0.025691200256347656, 0.02566649627685547, 0.0256777286529541, 0.025745216369628905, 0.025667871475219727, 0.0255467529296875, 0.025671167373657225, 0.026078720092773438, 0.025705408096313477, 0.025714431762695313, 0.025773759841918945, 0.025870975494384767, 0.02558278465270996, 0.025689983367919923, 0.025712799072265625, 0.025790464401245116, 0.026155391693115235, 0.02577449607849121, 0.025806848526000976, 0.025990976333618163, 0.025657535552978516, 0.026042367935180662, 0.02568953514099121, 0.028641855239868164, 0.025993215560913087, 0.025828960418701172, 0.02581340789794922, 0.02572287940979004, 0.025713823318481446, 0.02559052848815918, 0.02564476776123047, 0.025615936279296876, 0.025720928192138674, 0.025592159271240235, 0.02565772819519043, 0.025626047134399414, 0.02564358329772949, 0.025604095458984375, 0.025636863708496094, 0.025540607452392578, 0.025647296905517578, 0.02563206481933594, 0.025626688003540038, 0.025641504287719726, 0.02571049690246582, 0.025652767181396485, 0.025725120544433593, 0.025644384384155273, 0.025639711380004884, 0.02561039924621582, 0.025606143951416017, 0.025581087112426758, 0.025539039611816406, 0.025573568344116213, 0.0257139835357666, 0.02674892807006836, 0.02621254348754883, 0.025882623672485353, 0.025707712173461916, 0.025820095062255858, 0.025776351928710937, 0.02576144027709961, 0.02751487922668457, 0.02734489631652832, 0.02592767906188965, 0.025847808837890625, 0.025804128646850586, 0.025812639236450195, 0.025672704696655273, 0.025765888214111327, 0.02571468734741211, 0.025985023498535157, 0.026113536834716795, 0.025715295791625976, 0.02627984046936035, 0.025776128768920898, 0.026064895629882814, 0.02574742317199707, 0.02571062469482422, 0.02564659118652344, 0.025709056854248048, 0.025789823532104492, 0.025796768188476562, 0.025757280349731446, 0.02568822479248047, 0.025651071548461916, 0.025582048416137697, 0.025522144317626953, 0.02562499237060547, 0.025620479583740235, 0.025646240234375, 0.025753856658935547, 0.02728611183166504, 0.02575155258178711, 0.02574131202697754, 0.025806848526000976, 0.02587238311767578, 0.02569011116027832, 0.025899007797241212, 0.02579055976867676, 0.025703903198242187, 0.02667359924316406, 0.026089471817016603, 0.025851776123046875, 0.025792640686035158, 0.025734367370605468, 0.02591231918334961, 0.025867456436157225, 0.025853952407836913, 0.025743967056274415, 0.02589286422729492, 0.02594611167907715, 0.02574051284790039, 0.025909088134765626, 0.02602867126464844, 0.025892896652221678, 0.02575929641723633, 0.025710527420043944, 0.027250816345214843, 0.02632499122619629, 0.025780223846435548, 0.025792512893676758, 0.02576348876953125, 0.025796575546264647, 0.025596223831176757, 0.02568806457519531, 0.025662559509277344, 0.025921920776367187, 0.02574928092956543, 0.025690559387207032, 0.02563315200805664, 0.025726879119873047, 0.025792543411254882, 0.025647167205810548, 0.025610240936279297, 0.030455808639526367, 0.02824950408935547, 0.025702016830444336, 0.025641088485717774, 0.02613260841369629, 0.025750240325927733, 0.025599552154541017, 0.025686559677124025, 0.025869279861450194, 0.025710975646972656, 0.025628671646118165, 0.025606719970703126, 0.025959680557250977, 0.025577215194702147, 0.02556368064880371, 0.025501184463500977, 0.025527263641357423, 0.025686016082763673, 0.025556863784790038, 0.025515487670898437, 0.025490079879760742, 0.025602048873901367, 0.025575679779052736, 0.025595808029174806, 0.025859935760498047, 0.025927040100097658, 0.025573663711547852, 0.025592191696166992, 0.025646816253662108, 0.025767488479614256, 0.025639583587646484, 0.02591299247741699, 0.025700735092163085, 0.02594963264465332, 0.02954911994934082, 0.025819040298461913, 0.026179679870605467, 0.02572492790222168, 0.025798656463623046, 0.025642047882080077, 0.025665695190429688, 0.025563936233520507, 0.025814048767089842, 0.025676767349243165, 0.02587648010253906, 0.02572697639465332, 0.0266309757232666, 0.025948160171508788, 0.025800703048706054, 0.02576383972167969, 0.02564873504638672, 0.025694623947143554, 0.02571628761291504, 0.025571775436401368, 0.025638303756713866, 0.025614944458007813, 0.025531551361083985, 0.02556399917602539, 0.025526208877563475, 0.025694271087646485, 0.02562656021118164, 0.02567788887023926, 0.025781280517578126, 0.025852895736694335, 0.025759584426879884, 0.025792671203613282, 0.025593856811523437, 0.025997312545776367, 0.025657344818115234, 0.025600000381469725, 0.02551318359375, 0.027882112503051757, 0.026881471633911132, 0.02572591972351074, 0.025689376831054687, 0.0257741756439209, 0.025555328369140626, 0.025547903060913087, 0.025512832641601563, 0.025624576568603515, 0.025636512756347655, 0.02558297538757324, 0.025685216903686522, 0.025587039947509764, 0.02561004829406738, 0.025503679275512694, 0.0254716796875, 0.025524192810058594, 0.025499040603637696, 0.02545724868774414, 0.025796768188476562, 0.025653087615966796, 0.025585472106933595, 0.02559609603881836, 0.02554265594482422, 0.02547302436828613, 0.025458688735961913, 0.02546272087097168, 0.025501760482788086, 0.025663488388061522, 0.02609766387939453, 0.02554265594482422, 0.025667583465576172, 0.02557494354248047, 0.02563279914855957, 0.025575872421264648, 0.025826623916625976, 0.025549503326416017, 0.025497087478637694]",tokens/s,38.63045463527259,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/05ab2ee8d6b593bdbab17d728de5c028a7a94d83/modeling_falcon.py"", line 843, in __init__ self.transformer = FalconModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/05ab2ee8d6b593bdbab17d728de5c028a7a94d83/modeling_falcon.py"", line 650, in __init__ self.h = nn.ModuleList([FalconDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/05ab2ee8d6b593bdbab17d728de5c028a7a94d83/modeling_falcon.py"", line 650, in self.h = nn.ModuleList([FalconDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/05ab2ee8d6b593bdbab17d728de5c028a7a94d83/modeling_falcon.py"", line 420, in __init__ self.mlp = FalconMLP(config) File ""/root/.cache/huggingface/modules/transformers_modules/tiiuae/falcon-40b/05ab2ee8d6b593bdbab17d728de5c028a7a94d83/modeling_falcon.py"", line 405, in __init__ self.dense_4h_to_h = FalconLinear(4 * hidden_size, hidden_size, bias=config.bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 512.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 214.12 MiB is free. Process 199688 has 14.53 GiB memory in use. Of the allocated memory 14.41 GiB is allocated by PyTorch, and 1.37 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 990, in __init__ self.model = GemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 775, in __init__ [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 775, in [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 565, in __init__ self.mlp = GemmaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 140, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 137377 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,783.077376,741.277696,0.0,346.03008,335.0016,s,1,7.1831591796875,7.1831591796875,0.0,7.1831591796875,7.1831591796875,7.1831591796875,7.1831591796875,[7.1831591796875],,kWh,2.1506190416668383e-06,2.298053859902211e-07,9.297229660032436e-07,3.310147393660303e-06,,MB,1261.17888,766.44352,0.0,358.612992,302.626816,s,20,0.19743769645690915,0.00987188482284546,8.395238262485647e-05,0.009871455669403076,0.00996808614730835,0.009996689987182616,0.009999389114379881,"[0.009857151985168457, 0.009795424461364746, 0.010000063896179199, 0.009937472343444824, 0.009855808258056641, 0.00994262409210205, 0.009822815895080566, 0.009822815895080566, 0.009700672149658203, 0.009939167976379394, 0.009741279602050781, 0.009799103736877441, 0.009879103660583496, 0.009752287864685058, 0.009863807678222656, 0.009996512413024902, 0.009964927673339844, 0.009907072067260742, 0.009955648422241212, 0.009903936386108399]",tokens/s,25932.231239931636,kWh,2.8870181205077586e-07,3.1838646855313936e-08,1.8717621304450572e-07,5.077166719505956e-07,tokens/kWh,504218226.7059976,MB,1293.98784,781.123584,0.0,373.293056,302.629376,s,20,10.330168548583986,0.5165084274291993,0.003396619121302001,0.5161029663085938,0.5204233520507813,0.5224993988037109,0.5252641864013672,"[0.5118157653808594, 0.5133551635742187, 0.5160929565429687, 0.5223175048828125, 0.5186146850585938, 0.5161129760742188, 0.51690771484375, 0.5147996826171874, 0.5172576904296875, 0.5148217163085937, 0.5136166381835937, 0.5132215576171875, 0.5151278686523437, 0.5128375854492188, 0.5134371948242188, 0.5171526489257813, 0.520212890625, 0.5178597412109375, 0.5186511840820313, 0.5259553833007813]",tokens/s,121.97284043082871,kWh,1.5008283589199291e-05,1.6551660401387799e-06,6.507662476155672e-06,2.3171112105493734e-05,tokens/kWh,2718902.73169336,,s,1260,10.3203361325264,0.008190742962322537,0.00019018185760913555,0.008159103870391846,0.008300131320953369,0.008367008209228515,0.008885352382659912,"[0.00794646406173706, 0.008128959655761719, 0.008122048377990722, 0.008148927688598633, 0.008202591896057128, 0.008093952178955078, 0.008147135734558105, 0.008068991661071776, 0.008118240356445313, 0.008122336387634278, 0.008097663879394531, 0.00809382438659668, 0.008226816177368163, 0.008079423904418945, 0.00810912036895752, 0.008106880187988282, 0.008300064086914062, 0.008103936195373536, 0.008030688285827637, 0.008107071876525878, 0.008067904472351074, 0.008036895751953125, 0.008132191658020019, 0.008101887702941894, 0.008134464263916015, 0.008083616256713867, 0.008179519653320312, 0.008159071922302245, 0.00824937629699707, 0.0081146240234375, 0.00808233642578125, 0.00808233642578125, 0.008110143661499024, 0.008203519821166991, 0.008125344276428222, 0.008116095542907716, 0.008127967834472656, 0.008077055931091308, 0.008128479957580566, 0.008120160102844238, 0.008101056098937989, 0.008288031578063964, 0.008064127922058106, 0.008075360298156739, 0.008097599983215333, 0.008082304000854492, 0.008194144248962403, 0.008251423835754395, 0.008104191780090333, 0.008167136192321777, 0.008089504241943359, 0.008068191528320312, 0.008176639556884765, 0.008091648101806641, 0.008044544219970704, 0.008093695640563964, 0.008052736282348634, 0.008035391807556152, 0.008055744171142578, 0.008101663589477538, 0.008113632202148438, 0.00813372802734375, 0.008083456039428711, 0.007856160163879395, 0.008132351875305175, 0.008200448036193847, 0.008126463890075684, 0.00809926414489746, 0.008097920417785644, 0.008117919921875, 0.008098591804504394, 0.008100959777832031, 0.008205216407775879, 0.008159232139587403, 0.008223775863647462, 0.008190943717956542, 0.008208383560180664, 0.008170816421508789, 0.008160032272338868, 0.008234368324279786, 0.008118080139160157, 0.008096511840820313, 0.0081627836227417, 0.008112992286682129, 0.008107551574707031, 0.008142975807189942, 0.00810598373413086, 0.008150272369384766, 0.008154080390930176, 0.00808732795715332, 0.008069120407104492, 0.008093695640563964, 0.008193056106567382, 0.008137120246887206, 0.008137280464172364, 0.00807862377166748, 0.008106143951416015, 0.008151647567749023, 0.008253408432006835, 0.008187647819519042, 0.008149248123168945, 0.008333375930786133, 0.008132896423339843, 0.008117792129516601, 0.008134431838989258, 0.00815171241760254, 0.008257216453552246, 0.008139967918395996, 0.008119071960449219, 0.008341535568237305, 0.008085503578186035, 0.008093055725097656, 0.008141440391540527, 0.00806931209564209, 0.008165439605712891, 0.008195648193359375, 0.008087871551513671, 0.008101023674011231, 0.008125151634216309, 0.008219936370849609, 0.008080160140991211, 0.008191679954528809, 0.008135231971740722, 0.008076736450195313, 0.008130016326904297, 0.008083488464355469, 0.008167584419250489, 0.008138591766357423, 0.008068991661071776, 0.00812831974029541, 0.008054431915283202, 0.008204095840454102, 0.008196928024291993, 0.008165599822998047, 0.008144767761230468, 0.008120384216308594, 0.008132479667663573, 0.008114303588867187, 0.008142720222473145, 0.008284159660339355, 0.00811638355255127, 0.008111968040466308, 0.00821452808380127, 0.008116479873657226, 0.008105728149414062, 0.008194047927856446, 0.008309087753295898, 0.008160927772521972, 0.008124223709106445, 0.008103296279907227, 0.008118304252624511, 0.008176416397094726, 0.008345024108886719, 0.008151647567749023, 0.008200160026550293, 0.00814031982421875, 0.00812662410736084, 0.00821894359588623, 0.008286208152770995, 0.008145983695983888, 0.008172479629516602, 0.00819382381439209, 0.008130720138549805, 0.008105728149414062, 0.00817743968963623, 0.008273951530456543, 0.008274432182312011, 0.008359935760498047, 0.008197152137756347, 0.008203200340270996, 0.008158880233764649, 0.008138463973999024, 0.00834217643737793, 0.008089792251586915, 0.00819548797607422, 0.00828048038482666, 0.008153087615966797, 0.008204287528991699, 0.008253696441650391, 0.008181504249572754, 0.008136927604675293, 0.008255328178405762, 0.008270815849304199, 0.008385824203491212, 0.008215840339660645, 0.008234496116638184, 0.008274496078491211, 0.008082816123962402, 0.008112959861755371, 0.00994099235534668, 0.011926591873168945, 0.009403583526611328, 0.008259327888488769, 0.008363327980041503, 0.008405695915222168, 0.00828006362915039, 0.008289728164672851, 0.008211071968078614, 0.008210368156433105, 0.008147968292236327, 0.008147168159484863, 0.008205375671386718, 0.00812003231048584, 0.008249567985534669, 0.008154335975646973, 0.008271967887878418, 0.008149344444274903, 0.00809932804107666, 0.008172160148620605, 0.008118271827697754, 0.008161312103271484, 0.008087136268615723, 0.008131168365478515, 0.008189087867736816, 0.008104543685913086, 0.008103391647338868, 0.008155584335327148, 0.008167072296142578, 0.008073087692260741, 0.008155743598937988, 0.008259584426879883, 0.00815225601196289, 0.008102720260620117, 0.008176896095275879, 0.00819916820526123, 0.00819760036468506, 0.00812880039215088, 0.008099840164184571, 0.008220671653747558, 0.008216575622558593, 0.008283424377441406, 0.008190688133239745, 0.008181856155395508, 0.008159135818481445, 0.008159071922302245, 0.0082227201461792, 0.00823136043548584, 0.008206048011779786, 0.00819542407989502, 0.008114815711975098, 0.008071519851684571, 0.008111807823181152, 0.0081112003326416, 0.0082706880569458, 0.00808563232421875, 0.008212448120117188, 0.008142975807189942, 0.008183775901794433, 0.008103808403015137, 0.008086976051330566, 0.008139328002929687, 0.008131679534912109, 0.007939199924468994, 0.008128160476684571, 0.008181759834289551, 0.00814851188659668, 0.008191935539245606, 0.00830508804321289, 0.008115679740905761, 0.00851417636871338, 0.008316608428955078, 0.008819007873535156, 0.008431648254394531, 0.008493311882019043, 0.00859712028503418, 0.008259391784667969, 0.008220352172851563, 0.008301152229309081, 0.008224767684936523, 0.008210687637329102, 0.008133695602416993, 0.008108736038208008, 0.008181759834289551, 0.008134655952453614, 0.008118271827697754, 0.008278016090393067, 0.008245247840881348, 0.00828166389465332, 0.008251839637756348, 0.008217696189880372, 0.00816380786895752, 0.008165920257568359, 0.008265824317932128, 0.008136608123779298, 0.008218655586242676, 0.00812335968017578, 0.008367008209228515, 0.008194047927856446, 0.008187904357910156, 0.008156224250793457, 0.008078271865844727, 0.008212479591369629, 0.008130656242370606, 0.008103839874267578, 0.008141119956970215, 0.00807919979095459, 0.008143839836120605, 0.008198528289794922, 0.008102399826049805, 0.008435680389404296, 0.008178815841674805, 0.008106911659240722, 0.008163328170776368, 0.0081112003326416, 0.008127679824829101, 0.00813206386566162, 0.008216575622558593, 0.008134592056274413, 0.008157504081726075, 0.008260895729064941, 0.008165663719177246, 0.008144991874694824, 0.008139103889465331, 0.00808521556854248, 0.009013536453247071, 0.007968224048614502, 0.008237919807434081, 0.008161279678344726, 0.008169471740722656, 0.008152223587036133, 0.00813088035583496, 0.008181504249572754, 0.00818051242828369, 0.00813862419128418, 0.00814022445678711, 0.00814355182647705, 0.008240832328796386, 0.008249664306640625, 0.008275967597961426, 0.008155136108398438, 0.008225855827331542, 0.008223232269287109, 0.00823136043548584, 0.008154911994934082, 0.008185215950012207, 0.008144927978515624, 0.008145088195800782, 0.00813702392578125, 0.00815459156036377, 0.008090399742126464, 0.00809171199798584, 0.008154111862182617, 0.008127488136291505, 0.0081463041305542, 0.008129152297973632, 0.008264896392822265, 0.008249919891357422, 0.008173824310302735, 0.008258848190307618, 0.008180607795715332, 0.008201951980590821, 0.008261088371276856, 0.008298239707946777, 0.00843603229522705, 0.008454367637634278, 0.008401280403137206, 0.008222111701965332, 0.00816598415374756, 0.008132320404052734, 0.008126751899719238, 0.008109312057495117, 0.008196864128112793, 0.008224767684936523, 0.008130559921264649, 0.008241151809692383, 0.008263392448425293, 0.008181280136108398, 0.008172287940979003, 0.008196191787719726, 0.008167327880859375, 0.00809779167175293, 0.008092896461486816, 0.008110207557678223, 0.008129183769226073, 0.008114175796508789, 0.008134655952453614, 0.008075263977050781, 0.00831283187866211, 0.007988895893096924, 0.008105888366699218, 0.008097279548645019, 0.008049599647521973, 0.008073216438293456, 0.008116224288940429, 0.008101887702941894, 0.008095616340637208, 0.00813811206817627, 0.008143487930297852, 0.008046719551086426, 0.008083392143249511, 0.008069375991821288, 0.008095392227172852, 0.008089407920837403, 0.008146623611450195, 0.008157855987548828, 0.008094719886779785, 0.008080512046813966, 0.008099743843078613, 0.008179776191711426, 0.008093664169311524, 0.008035712242126465, 0.00812499237060547, 0.008061023712158203, 0.008083295822143555, 0.008079423904418945, 0.008116224288940429, 0.008153120040893554, 0.008138496398925782, 0.008089471817016601, 0.008097599983215333, 0.008108063697814942, 0.008158047676086426, 0.008073280334472657, 0.008043680191040039, 0.008117728233337403, 0.00817251205444336, 0.008089695930480957, 0.008094623565673828, 0.00809436798095703, 0.008118623733520508, 0.008115360260009766, 0.008419648170471191, 0.008129055976867677, 0.00813270378112793, 0.008137791633605958, 0.008094880104064941, 0.008691391944885253, 0.008753120422363281, 0.008431424140930175, 0.009867487907409668, 0.009356736183166504, 0.008361663818359375, 0.008215423583984374, 0.008211584091186523, 0.0082543363571167, 0.008183775901794433, 0.008128512382507324, 0.008138784408569336, 0.008279775619506836, 0.008146976470947266, 0.008183135986328124, 0.007829792022705078, 0.008107744216918945, 0.008090751647949219, 0.008106687545776366, 0.008095423698425292, 0.008276479721069336, 0.00811961555480957, 0.008164031982421875, 0.008165375709533691, 0.008138848304748534, 0.008126367568969726, 0.008325119972229005, 0.008171520233154296, 0.008884223937988281, 0.008349791526794433, 0.008189120292663575, 0.008147135734558105, 0.00812063980102539, 0.00811843204498291, 0.008095168113708496, 0.008124832153320313, 0.008079584121704102, 0.008079360008239746, 0.008087712287902832, 0.008115455627441407, 0.008180319786071777, 0.008062975883483887, 0.008099840164184571, 0.008216447830200195, 0.008113792419433594, 0.008143360137939454, 0.008240960121154784, 0.008300736427307128, 0.008318976402282715, 0.008310943603515626, 0.008257375717163087, 0.008304032325744629, 0.008254048347473144, 0.008138655662536621, 0.008187968254089356, 0.008166720390319824, 0.008183839797973633, 0.008092351913452148, 0.008157183647155761, 0.008157119750976563, 0.008242591857910157, 0.008127360343933106, 0.008118047714233398, 0.008155136108398438, 0.008091391563415528, 0.008081664085388183, 0.00809779167175293, 0.008165375709533691, 0.00809779167175293, 0.008269824028015137, 0.008118271827697754, 0.008146783828735352, 0.008089311599731445, 0.008081855773925782, 0.008158880233764649, 0.00807868766784668, 0.008109215736389161, 0.00806223964691162, 0.0077814397811889644, 0.008178624153137207, 0.008092927932739257, 0.008061696052551269, 0.008144288063049317, 0.008139360427856445, 0.008179871559143067, 0.00810915184020996, 0.008185952186584473, 0.008110176086425782, 0.008135199546813965, 0.00806710433959961, 0.008140128135681152, 0.008060928344726562, 0.008096575736999512, 0.008220864295959472, 0.008133760452270508, 0.00805737590789795, 0.008125823974609375, 0.008120479583740235, 0.008143327713012695, 0.008206368446350098, 0.008142815589904786, 0.008134655952453614, 0.008144895553588867, 0.008140512466430664, 0.00891113567352295, 0.008296575546264649, 0.008255104064941406, 0.008202495574951172, 0.008134143829345703, 0.008109663963317871, 0.008098624229431152, 0.00811843204498291, 0.008073151588439942, 0.008248671531677246, 0.00809055995941162, 0.00808944034576416, 0.008097663879394531, 0.008101792335510253, 0.008142144203186034, 0.008089792251586915, 0.00816598415374756, 0.00808140754699707, 0.008089599609375, 0.008092831611633301, 0.008135519981384277, 0.008097536087036132, 0.008360383987426757, 0.008140928268432617, 0.008077055931091308, 0.008075200080871581, 0.00860159969329834, 0.008695808410644532, 0.009971327781677246, 0.008360416412353515, 0.00823465633392334, 0.00819760036468506, 0.008165920257568359, 0.008159168243408203, 0.008188223838806153, 0.008511712074279785, 0.008251168251037598, 0.007903584003448486, 0.00817091178894043, 0.008149503707885742, 0.008101984024047852, 0.008177663803100586, 0.008558591842651368, 0.00810591983795166, 0.008093600273132323, 0.00813206386566162, 0.008172575950622558, 0.008129823684692382, 0.008208767890930176, 0.008093312263488769, 0.0081079683303833, 0.00810643196105957, 0.008196096420288086, 0.00810211181640625, 0.0081397123336792, 0.008103967666625977, 0.00809055995941162, 0.008207776069641114, 0.008085984230041504, 0.008171520233154296, 0.008130304336547852, 0.008392864227294922, 0.008165472030639649, 0.00813161563873291, 0.008219615936279297, 0.008109248161315918, 0.008117055892944336, 0.008152576446533203, 0.008276479721069336, 0.008154272079467773, 0.008137568473815918, 0.008144031524658204, 0.008181856155395508, 0.009714431762695312, 0.008151391983032226, 0.008105119705200196, 0.00816323184967041, 0.008134688377380371, 0.008126720428466797, 0.008114144325256348, 0.008113632202148438, 0.008126527786254882, 0.008063808441162109, 0.00808944034576416, 0.00809385585784912, 0.008137056350708008, 0.008074912071228027, 0.00808140754699707, 0.008306528091430664, 0.008112288475036622, 0.008039711952209472, 0.008065983772277832, 0.008078559875488282, 0.008137280464172364, 0.008136704444885253, 0.008171008110046387, 0.008081791877746582, 0.008093952178955078, 0.008126527786254882, 0.008095552444458008, 0.007819168090820313, 0.008144991874694824, 0.008095647811889648, 0.008095680236816406, 0.008118240356445313, 0.008115903854370117, 0.008167743682861328, 0.008155232429504394, 0.008132096290588378, 0.008104255676269531, 0.008074815750122071, 0.008100480079650878, 0.008034208297729491, 0.008095935821533203, 0.008148991584777832, 0.00824284839630127, 0.00823087978363037, 0.008157247543334962, 0.008086751937866211, 0.008085920333862304, 0.008176223754882812, 0.008206399917602539, 0.008072704315185546, 0.00811257553100586, 0.008187904357910156, 0.008112128257751466, 0.008097408294677734, 0.008169343948364258, 0.008153599739074707, 0.008119872093200683, 0.00866988754272461, 0.008177408218383789, 0.008092831611633301, 0.00817574405670166, 0.00817635154724121, 0.008070176124572753, 0.008180800437927247, 0.008139967918395996, 0.008136704444885253, 0.008077312469482421, 0.008100000381469727, 0.008235103607177734, 0.008133088111877441, 0.008117376327514648, 0.008119232177734375, 0.008078271865844727, 0.008059552192687988, 0.008095647811889648, 0.008174015998840331, 0.0080664644241333, 0.008104543685913086, 0.008089856147766113, 0.008091327667236328, 0.008095840454101562, 0.008140671730041503, 0.008121824264526368, 0.00811631965637207, 0.00824345588684082, 0.008196319580078125, 0.008109567642211914, 0.00815779209136963, 0.008689632415771484, 0.008278047561645508, 0.007864319801330566, 0.008334688186645508, 0.008092320442199707, 0.008175616264343261, 0.008251520156860351, 0.008131967544555665, 0.00809932804107666, 0.008111424446105957, 0.008125823974609375, 0.008073087692260741, 0.00817193603515625, 0.008095135688781738, 0.008044768333435058, 0.008124544143676757, 0.008101152420043946, 0.008111424446105957, 0.008113856315612794, 0.008196096420288086, 0.008118271827697754, 0.008065024375915527, 0.008091648101806641, 0.008068703651428222, 0.008045280456542969, 0.008064703941345215, 0.008257535934448243, 0.008151103973388672, 0.008119680404663087, 0.008129088401794434, 0.008155136108398438, 0.008350784301757813, 0.008260767936706543, 0.008183327674865723, 0.008189727783203125, 0.008216447830200195, 0.008151647567749023, 0.008118271827697754, 0.008142175674438477, 0.008139328002929687, 0.008114175796508789, 0.00807539176940918, 0.008086527824401855, 0.008082112312316895, 0.008071776390075683, 0.008072896003723145, 0.008330368041992187, 0.008063072204589844, 0.008073760032653808, 0.00813696002960205, 0.008111295700073242, 0.008059712409973144, 0.008073023796081543, 0.008159775733947754, 0.008211104393005372, 0.008248607635498046, 0.008153984069824219, 0.008135519981384277, 0.00812179183959961, 0.008184384346008301, 0.008164416313171387, 0.008092608451843262, 0.008204192161560058, 0.008234399795532227, 0.008202943801879883, 0.00801587200164795, 0.008171520233154296, 0.008196096420288086, 0.008142016410827636, 0.008143679618835449, 0.008171520233154296, 0.008101887702941894, 0.008161503791809082, 0.008195872306823731, 0.008269824028015137, 0.00816486358642578, 0.008118783950805664, 0.0081364803314209, 0.008335583686828614, 0.008240287780761718, 0.008189824104309082, 0.00818070411682129, 0.008179295539855956, 0.008155903816223144, 0.008093119621276856, 0.008173791885375977, 0.008126496315002441, 0.008084511756896973, 0.008356800079345703, 0.00832915210723877, 0.008209952354431152, 0.008180255889892579, 0.008140800476074218, 0.008132960319519043, 0.008132160186767578, 0.00814236831665039, 0.008171263694763184, 0.008152000427246094, 0.008177151679992676, 0.008098176002502441, 0.008128128051757812, 0.00811251163482666, 0.008298080444335937, 0.008149663925170898, 0.00815078353881836, 0.008200287818908691, 0.008074751853942871, 0.008103615760803222, 0.00808409595489502, 0.008210816383361817, 0.008193056106567382, 0.008126784324645996, 0.008174112319946289, 0.008118111610412598, 0.008133695602416993, 0.008133567810058594, 0.008134655952453614, 0.008207551956176758, 0.008137855529785156, 0.008199968338012696, 0.008140704154968263, 0.008138367652893067, 0.008110783576965331, 0.008328895568847656, 0.00821190357208252, 0.008241727828979492, 0.008230912208557128, 0.00818992042541504, 0.007829631805419922, 0.008067296028137208, 0.008066431999206543, 0.008141632080078126, 0.00807692813873291, 0.008071167945861817, 0.008101152420043946, 0.008238143920898437, 0.00830787181854248, 0.00813321590423584, 0.008236512184143067, 0.008151488304138184, 0.008116031646728515, 0.008188096046447754, 0.008152128219604492, 0.008178624153137207, 0.008339167594909668, 0.008206879615783692, 0.008153984069824219, 0.008149375915527344, 0.008094207763671875, 0.008245023727416992, 0.008144479751586914, 0.008220288276672363, 0.008123711585998535, 0.008155936241149903, 0.008086432456970214, 0.008142656326293945, 0.0081179838180542, 0.008098272323608399, 0.008255711555480957, 0.008130496025085449, 0.008068927764892578, 0.008071200370788574, 0.008116224288940429, 0.008065024375915527, 0.008136704444885253, 0.008318976402282715, 0.008113151550292968, 0.008069184303283692, 0.008149248123168945, 0.008098496437072753, 0.008066559791564941, 0.008097727775573731, 0.008102432250976562, 0.008226559638977051, 0.008298720359802246, 0.008134719848632812, 0.00815824031829834, 0.008106975555419922, 0.008160863876342773, 0.008103872299194336, 0.008104288101196289, 0.008104288101196289, 0.00809347152709961, 0.008084639549255371, 0.00808233642578125, 0.008130847930908203, 0.008107680320739746, 0.008025216102600099, 0.008071776390075683, 0.008098079681396484, 0.008042495727539062, 0.007831552028656007, 0.008070719718933106, 0.008094143867492676, 0.008211935997009277, 0.00818057632446289, 0.00834233570098877, 0.00893945598602295, 0.008761728286743164, 0.008237792015075683, 0.008124095916748047, 0.00812662410736084, 0.008118271827697754, 0.00814089584350586, 0.008191328048706054, 0.008096096038818359, 0.008095104217529297, 0.008121248245239257, 0.008148927688598633, 0.008210783958435058, 0.008196895599365234, 0.008104127883911133, 0.00814793586730957, 0.008126463890075684, 0.008070336341857911, 0.00806924819946289, 0.008069536209106446, 0.008119551658630372, 0.008048895835876466, 0.008095647811889648, 0.008132191658020019, 0.008103103637695312, 0.008122271537780761, 0.008091551780700684, 0.008134655952453614, 0.008101887702941894, 0.008116000175476075, 0.008109600067138671, 0.008105952262878418, 0.008092479705810547, 0.008081472396850586, 0.008130751609802245, 0.008121439933776856, 0.008114751815795898, 0.008154656410217285, 0.008110912322998046, 0.008064607620239257, 0.008062111854553222, 0.008125120162963868, 0.008073439598083496, 0.008110015869140626, 0.008078944206237794, 0.008057344436645507, 0.008224415779113769, 0.008323295593261719, 0.008085887908935547, 0.00816316795349121, 0.008129759788513184, 0.008141471862792968, 0.00805897617340088, 0.008101792335510253, 0.008147168159484863, 0.008070976257324218, 0.008038528442382812, 0.007814144134521485, 0.008150879859924316, 0.008106143951416015, 0.008223872184753418, 0.00811235237121582, 0.008067584037780762, 0.008062944412231446, 0.008090911865234375, 0.008121055603027344, 0.008212639808654785, 0.00817471981048584, 0.008117471694946289, 0.00821350383758545, 0.008098239898681641, 0.008134655952453614, 0.008513407707214355, 0.00836137580871582, 0.008190367698669434, 0.008239328384399414, 0.008211039543151855, 0.008226271629333495, 0.0081943359375, 0.008179712295532226, 0.00818115234375, 0.008172320365905763, 0.008269951820373536, 0.008277824401855469, 0.008191647529602051, 0.008188063621520997, 0.00821782398223877, 0.008252096176147462, 0.008229023933410644, 0.008215968132019042, 0.008182368278503417, 0.008204287528991699, 0.008224639892578125, 0.008218048095703125, 0.008273695945739746, 0.008232159614562988, 0.008152768135070801, 0.00818188762664795, 0.008124287605285645, 0.008160896301269531, 0.008182144165039063, 0.008172672271728515, 0.008256383895874023, 0.008379903793334961, 0.008236607551574708, 0.008259712219238281, 0.008162112236022949, 0.00818995189666748, 0.008219840049743653, 0.008190879821777344, 0.008161184310913085, 0.008269248008728027, 0.008157247543334962, 0.008205887794494628, 0.008204704284667968, 0.00821241569519043, 0.00825312042236328, 0.008242079734802246, 0.00849715232849121, 0.008265727996826172, 0.008022239685058594, 0.008184063911437988, 0.00820143985748291, 0.008212703704833985, 0.008193920135498047, 0.008258208274841309, 0.008221887588500976, 0.008189824104309082, 0.008217184066772461, 0.0082456636428833, 0.008306112289428711, 0.00847696018218994, 0.008458144187927246, 0.008284255981445313, 0.008237055778503418, 0.0082575044631958, 0.008197407722473144, 0.008367008209228515, 0.0081693115234375, 0.008239104270935058, 0.008250911712646485, 0.008374496459960938, 0.00825920009613037, 0.008245887756347657, 0.008225088119506835, 0.008226655960083007, 0.008228832244873047, 0.008236639976501465, 0.008191264152526856, 0.008250368118286134, 0.008310784339904785, 0.008220671653747558, 0.00826691246032715, 0.008258272171020507, 0.008241279602050781, 0.008233023643493651, 0.00820627212524414, 0.008228863716125488, 0.008214655876159669, 0.008201536178588867, 0.00827564811706543, 0.008247360229492188, 0.008276800155639648, 0.008240703582763673, 0.008177984237670898, 0.008342687606811523, 0.008573920249938964, 0.008197952270507812, 0.008200384140014649, 0.00823526382446289, 0.008213919639587402, 0.008219231605529785, 0.008193792343139649, 0.00826524829864502, 0.008282591819763183, 0.008317952156066894, 0.008270848274230956, 0.008328448295593262, 0.008241920471191407, 0.008157312393188476, 0.008182944297790528, 0.008228832244873047, 0.00821939182281494, 0.007919616222381591, 0.00820633602142334, 0.008203935623168946, 0.008251744270324707, 0.008179200172424317, 0.008185855865478516, 0.008247391700744629, 0.008196096420288086, 0.008301247596740722, 0.008267104148864746, 0.008262240409851074, 0.008275936126708985, 0.008189696311950684, 0.008216544151306153, 0.008246463775634766, 0.008319904327392578, 0.008259584426879883, 0.008261216163635255, 0.008167840003967286, 0.008177663803100586, 0.00819814395904541, 0.008187711715698242, 0.00817580795288086, 0.008267487525939941, 0.008225055694580078, 0.00828825569152832, 0.0082227201461792, 0.008155136108398438, 0.008250592231750488, 0.008165663719177246, 0.008169983863830567, 0.008232959747314453, 0.008187904357910156, 0.008159456253051757, 0.00820201587677002, 0.008142848014831543, 0.008263680458068847, 0.008415231704711914, 0.008171008110046387, 0.00820684814453125, 0.008194047927856446, 0.008134655952453614, 0.00821232032775879, 0.008180992126464844, 0.008208288192749023, 0.008303199768066406, 0.00820083236694336, 0.008179488182067872, 0.008230175971984863, 0.00818239974975586, 0.008183391571044921, 0.008180224418640136, 0.008238752365112305, 0.008149024009704589, 0.008377984046936035, 0.008235456466674805, 0.008244704246520997, 0.0082194242477417, 0.008187904357910156, 0.008177663803100586, 0.008146944046020508, 0.008165120124816894, 0.008192352294921875, 0.007951615810394287, 0.0082042236328125, 0.008186047554016113, 0.008194239616394042, 0.008190400123596191, 0.008538432121276855, 0.008302271842956543, 0.008273056030273437, 0.008238240242004395, 0.008261311531066894, 0.008261631965637208, 0.008240480422973633, 0.00824182415008545, 0.008146944046020508, 0.008191519737243653, 0.008206815719604492, 0.008216575622558593, 0.008167424201965333, 0.00817750358581543, 0.008162591934204102, 0.008178079605102539, 0.008192319869995118, 0.008169631958007812, 0.00818995189666748, 0.008230912208557128, 0.008240415573120117, 0.00832966423034668, 0.008288831710815429, 0.008226592063903809, 0.008183296203613282, 0.008222496032714844, 0.008225567817687988, 0.008191871643066406, 0.008248576164245605, 0.008237728118896484, 0.008182944297790528, 0.008295359611511231, 0.008237024307250977, 0.008167455673217774, 0.00849824047088623, 0.008292415618896484, 0.008179679870605468, 0.008225119590759278, 0.008239680290222168, 0.008158528327941894, 0.008139455795288086, 0.008267680168151855, 0.008185088157653808, 0.008196928024291993, 0.00834335994720459, 0.00823136043548584, 0.008173343658447265, 0.008167424201965333, 0.008159232139587403, 0.008205504417419433, 0.008210687637329102, 0.00819871997833252, 0.008225919723510743, 0.00823203182220459, 0.008271519660949706, 0.008267744064331055, 0.008284095764160157, 0.008213024139404296, 0.007942143917083741, 0.008192288398742675, 0.008230496406555175, 0.008286335945129394, 0.008400896072387695, 0.00845142364501953, 0.008417920112609863, 0.008377856254577636, 0.008413663864135742, 0.008394816398620605, 0.008316927909851075, 0.008356160163879394, 0.00832639980316162, 0.00829689598083496, 0.008635552406311035, 0.00888697624206543, 0.00822492790222168, 0.008241503715515137, 0.008276960372924805, 0.008337120056152343, 0.0083538236618042, 0.008307647705078126, 0.008228863716125488, 0.008382687568664551, 0.008320128440856934, 0.008270496368408204, 0.008234880447387695, 0.008388447761535644, 0.008423168182373047, 0.008247424125671386, 0.00825590419769287, 0.008261119842529297, 0.00907315158843994, 0.008292032241821288, 0.00822544002532959, 0.00832271957397461, 0.008311039924621582, 0.008208224296569824, 0.008355744361877441, 0.008235008239746093, 0.008254879951477051, 0.008202848434448242, 0.008315135955810547, 0.008265472412109376, 0.008322943687438964, 0.008255616188049316, 0.008236639976501465, 0.008382880210876464, 0.008327008247375488, 0.008298656463623048, 0.008335359573364258, 0.008224767684936523, 0.008308544158935546, 0.008208576202392579, 0.008265407562255859, 0.008260095596313476, 0.008293888092041016, 0.008313152313232422, 0.008429568290710449, 0.008482815742492676, 0.008990240097045899, 0.008329567909240722, 0.008439935684204102]",tokens/s,122.08904669576442,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1234, in __init__ self.transformer = DbrxModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1009, in __init__ self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1009, in self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 788, in __init__ self.ffn = DbrxFFN(config=config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 765, in __init__ self.experts = DbrxExperts( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 704, in __init__ self.mlp = DbrxExpertGLU( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 682, in __init__ self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 97550 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,781.959168,1133.44512,0.0,738.197504,715.772928,s,1,7.1385732421875,7.1385732421875,0.0,7.1385732421875,7.1385732421875,7.1385732421875,7.1385732421875,[7.1385732421875],,kWh,3.0771318083149403e-06,3.3236041878850374e-07,9.727785559923707e-07,4.382270783095815e-06,,MB,1231.417344,1175.38816,0.0,767.557632,723.637248,s,11,0.1881715850830078,0.017106507734818897,0.00021070095363948937,0.01708678436279297,0.017403936386108397,0.01743947219848633,0.017467900848388673,"[0.01709846305847168, 0.017403936386108397, 0.01708678436279297, 0.01703126335144043, 0.017475008010864258, 0.017203168869018556, 0.01681648063659668, 0.016903776168823242, 0.01689788818359375, 0.017326175689697267, 0.016928640365600586]",tokens/s,14965.064989795255,kWh,5.02287555421702e-07,5.539292336322952e-08,3.3192462966265027e-07,8.896051084475817e-07,tokens/kWh,287768131.6901794,MB,1264.017408,1215.234048,0.0,807.40352,735.775744,s,11,10.490889038085937,0.95371718528054,0.003998828024129619,0.9527057495117187,0.9571828002929688,0.9605534057617188,0.9632498901367188,"[0.954315185546875, 0.952645751953125, 0.9509779052734375, 0.9478515625, 0.9639240112304688, 0.9527057495117187, 0.954770751953125, 0.9539635009765625, 0.9502321166992187, 0.9523197021484375, 0.9571828002929688]",tokens/s,66.0573186394542,kWh,2.773411667791219e-05,3.058641454160931e-06,1.3192076088519415e-05,4.398483422059255e-05,tokens/kWh,1432311.8664956805,,s,693,10.485052336692819,0.015129945651793377,0.00030938632956023314,0.015063072204589844,0.015295583724975586,0.015474534225463866,0.016191924057006837,"[0.015158143997192383, 0.015160767555236817, 0.01509228801727295, 0.015135040283203125, 0.014992447853088378, 0.014920543670654297, 0.015034144401550292, 0.015200160026550292, 0.015022496223449706, 0.015022111892700194, 0.015036288261413575, 0.01500870418548584, 0.014977888107299804, 0.015134719848632813, 0.015341504096984863, 0.01516988754272461, 0.01514367961883545, 0.015064224243164063, 0.015179583549499512, 0.016235584259033202, 0.01530735969543457, 0.015391072273254395, 0.015140064239501953, 0.015057696342468261, 0.01496678352355957, 0.014981120109558106, 0.015073280334472656, 0.015036479949951172, 0.015378368377685548, 0.015126527786254883, 0.015291872024536133, 0.01531334400177002, 0.015284543991088867, 0.015159071922302246, 0.015224800109863282, 0.015071264266967773, 0.015159232139587402, 0.015118399620056152, 0.01498646354675293, 0.015084256172180175, 0.015111455917358399, 0.015245183944702149, 0.015029151916503907, 0.014937631607055664, 0.015008128166198731, 0.014991135597229003, 0.015051263809204102, 0.015073087692260742, 0.015049823760986328, 0.01505743980407715, 0.015050559997558595, 0.015046879768371582, 0.015024191856384277, 0.0150797119140625, 0.015725631713867188, 0.015634655952453613, 0.015102047920227051, 0.015006015777587891, 0.015083328247070312, 0.01526364803314209, 0.015073439598083497, 0.014975520133972168, 0.015009440422058106, 0.014962528228759766, 0.015093791961669921, 0.015105664253234863, 0.015136287689208985, 0.014959424018859864, 0.014966303825378419, 0.015112832069396972, 0.01507532787322998, 0.014938112258911132, 0.015006879806518554, 0.015076160430908203, 0.014997823715209961, 0.01490937614440918, 0.015044735908508302, 0.015056703567504883, 0.015103839874267578, 0.015089664459228515, 0.014984736442565918, 0.0151146240234375, 0.01510211181640625, 0.015132063865661622, 0.01497993564605713, 0.015122112274169921, 0.015433728218078613, 0.015218688011169433, 0.015245087623596191, 0.01515503978729248, 0.015607551574707031, 0.015252096176147461, 0.015094079971313477, 0.015288224220275879, 0.014999327659606933, 0.01501968002319336, 0.01507545566558838, 0.015075839996337891, 0.015001312255859375, 0.01502723217010498, 0.015166432380676269, 0.015186240196228027, 0.015152383804321289, 0.015102016448974609, 0.015140895843505859, 0.015071583747863769, 0.015187935829162598, 0.015126720428466797, 0.01509552001953125, 0.015155136108398437, 0.015072511672973634, 0.015124896049499511, 0.015076895713806153, 0.01502883243560791, 0.01503446388244629, 0.01502239990234375, 0.015048447608947755, 0.015021568298339843, 0.015082240104675293, 0.015122112274169921, 0.015811967849731445, 0.015090847969055176, 0.01508899211883545, 0.015129023551940918, 0.01518950366973877, 0.015226752281188964, 0.014960320472717285, 0.015089759826660156, 0.015531935691833497, 0.015046751976013184, 0.015132672309875488, 0.014978912353515625, 0.015029888153076171, 0.01502467155456543, 0.015062432289123535, 0.015083168029785156, 0.015211008071899413, 0.015057503700256348, 0.015066975593566895, 0.015089344024658204, 0.015026495933532715, 0.015007200241088868, 0.01496448040008545, 0.015014687538146972, 0.015034687995910645, 0.015158975601196289, 0.0150217924118042, 0.01512224006652832, 0.015038368225097656, 0.014950336456298828, 0.014967488288879395, 0.0150447998046875, 0.015021951675415039, 0.015038335800170898, 0.014992959976196289, 0.015290719985961914, 0.01522697639465332, 0.015116607666015625, 0.01514463996887207, 0.014979328155517578, 0.01504640007019043, 0.01498691177368164, 0.014930144309997558, 0.0151778564453125, 0.015038463592529297, 0.015075008392333984, 0.015065535545349121, 0.015020159721374512, 0.015164992332458497, 0.015216863632202149, 0.014995264053344727, 0.015009440422058106, 0.0149900484085083, 0.015177408218383788, 0.015132767677307129, 0.014956864356994629, 0.014988672256469727, 0.015058303833007812, 0.015107328414916993, 0.014926527976989746, 0.015174655914306641, 0.01507532787322998, 0.015087936401367187, 0.014959903717041015, 0.015038880348205566, 0.015396160125732422, 0.015750176429748537, 0.015191328048706055, 0.015200608253479004, 0.01505401611328125, 0.015035200119018554, 0.014995327949523926, 0.015188096046447754, 0.015133760452270507, 0.015010656356811524, 0.014949760437011718, 0.014971424102783203, 0.014989503860473633, 0.015011839866638184, 0.014919679641723632, 0.014944576263427734, 0.015118111610412597, 0.014960543632507324, 0.014980480194091797, 0.015100543975830077, 0.014975263595581054, 0.015017696380615234, 0.014954496383666992, 0.01500879955291748, 0.015092703819274902, 0.015019968032836914, 0.014981184005737305, 0.015007743835449219, 0.01500175952911377, 0.015008607864379882, 0.014920703887939453, 0.014931072235107421, 0.015004544258117676, 0.015037983894348144, 0.015071136474609375, 0.015029952049255372, 0.015057791709899902, 0.01517520046234131, 0.015235136032104492, 0.01502239990234375, 0.015151071548461913, 0.015108160018920899, 0.01506719970703125, 0.014941760063171387, 0.015056960105895996, 0.015033727645874023, 0.0150632963180542, 0.014949119567871094, 0.014994879722595215, 0.015018464088439942, 0.015048800468444824, 0.014954208374023437, 0.014987551689147949, 0.01499135971069336, 0.015134079933166504, 0.01496777629852295, 0.015124159812927246, 0.015187935829162598, 0.015341567993164062, 0.01505292797088623, 0.01504038429260254, 0.015024127960205079, 0.015031488418579101, 0.014987168312072753, 0.014924351692199706, 0.015072832107543946, 0.015156000137329102, 0.015111552238464356, 0.015336192131042481, 0.018846527099609375, 0.01590771198272705, 0.015322912216186523, 0.01526416015625, 0.015532992362976073, 0.015196319580078124, 0.015106783866882325, 0.015070655822753905, 0.014969568252563476, 0.015183775901794434, 0.015121439933776855, 0.015270751953125, 0.01498528003692627, 0.015074624061584472, 0.01505292797088623, 0.015036160469055176, 0.015031295776367188, 0.014999263763427735, 0.01508672046661377, 0.015059935569763183, 0.015126208305358886, 0.014973440170288087, 0.015047616004943847, 0.015268735885620117, 0.015235072135925292, 0.015028223991394044, 0.015026176452636719, 0.014979071617126465, 0.015036064147949219, 0.01498470401763916, 0.015106911659240723, 0.015032447814941407, 0.015034367561340332, 0.015125568389892579, 0.015033151626586914, 0.015166687965393066, 0.015235872268676757, 0.015298879623413086, 0.015065055847167969, 0.015210528373718262, 0.0152194242477417, 0.015260767936706544, 0.015280223846435547, 0.015265215873718261, 0.0151309757232666, 0.015201279640197754, 0.01510268783569336, 0.016183584213256837, 0.017989248275756837, 0.015349311828613281, 0.016188127517700195, 0.015286368370056152, 0.015290495872497558, 0.015273983955383302, 0.015079296112060547, 0.015240351676940917, 0.015108256340026856, 0.01512441635131836, 0.015109888076782227, 0.015053824424743652, 0.015056639671325684, 0.015355903625488282, 0.015106047630310059, 0.015050751686096191, 0.015042559623718262, 0.015039648056030273, 0.0149617280960083, 0.015052576065063477, 0.015336864471435547, 0.015056480407714843, 0.014959199905395508, 0.014969183921813964, 0.015028096199035645, 0.014983360290527343, 0.015001919746398926, 0.01495740795135498, 0.015024991989135742, 0.01502012825012207, 0.014964544296264648, 0.01609942436218262, 0.017424671173095704, 0.015140576362609863, 0.015079744338989258, 0.015117759704589845, 0.01500595188140869, 0.015021727561950684, 0.014897279739379883, 0.014952896118164062, 0.015015711784362793, 0.014997504234313964, 0.01502444839477539, 0.015017151832580566, 0.015024160385131836, 0.01498793601989746, 0.015074655532836915, 0.015004128456115722, 0.014982624053955078, 0.01498483180999756, 0.015077343940734863, 0.014970047950744629, 0.015113823890686036, 0.015030431747436523, 0.015230303764343261, 0.015073951721191406, 0.015059167861938477, 0.015106111526489259, 0.015062751770019532, 0.014985535621643066, 0.01507868766784668, 0.015122879981994629, 0.01504252815246582, 0.015091584205627442, 0.014992959976196289, 0.015177760124206543, 0.015064736366271973, 0.01505743980407715, 0.015019935607910156, 0.014987551689147949, 0.015114463806152344, 0.01512831974029541, 0.015167424201965332, 0.015070976257324219, 0.01564022445678711, 0.0149717435836792, 0.014871968269348144, 0.015302751541137695, 0.014946368217468262, 0.015386719703674317, 0.015258079528808593, 0.015245311737060547, 0.01581593608856201, 0.01515392017364502, 0.0156428804397583, 0.015529696464538573, 0.015111904144287109, 0.015018272399902343, 0.014999584197998047, 0.01589241600036621, 0.017346176147460937, 0.015237567901611328, 0.015093759536743164, 0.015070719718933106, 0.015086079597473144, 0.015087615966796876, 0.014990495681762696, 0.014895456314086914, 0.01496348762512207, 0.015092896461486817, 0.0150513277053833, 0.014929920196533204, 0.014997759819030761, 0.015157183647155762, 0.015621567726135254, 0.01497270393371582, 0.015337375640869141, 0.01507744026184082, 0.015164159774780273, 0.015091584205627442, 0.014905344009399414, 0.014995455741882324, 0.015071231842041016, 0.014993408203125, 0.014974559783935547, 0.01495248031616211, 0.015015616416931152, 0.015062911987304688, 0.014969951629638672, 0.015029984474182129, 0.015023776054382324, 0.015036767959594726, 0.015193568229675294, 0.015089823722839355, 0.015018367767333984, 0.01510755157470703, 0.014996352195739747, 0.014943903923034667, 0.01504252815246582, 0.015036447525024414, 0.015050592422485352, 0.014999711990356445, 0.015107392311096191, 0.015049440383911133, 0.01500598430633545, 0.014998527526855468, 0.01503711986541748, 0.015071231842041016, 0.015017951965332031, 0.014860159873962402, 0.014988544464111329, 0.015012351989746094, 0.014995840072631836, 0.015050784111022949, 0.014884927749633788, 0.01545206356048584, 0.015120160102844239, 0.01510217571258545, 0.014987039566040038, 0.015136159896850587, 0.015187935829162598, 0.01518671989440918, 0.015072511672973634, 0.014933055877685546, 0.014982912063598633, 0.01503446388244629, 0.01504041576385498, 0.014971263885498047, 0.015017087936401368, 0.018297119140625, 0.016107744216918945, 0.015261568069458008, 0.015062496185302735, 0.015348383903503418, 0.015134559631347657, 0.014968735694885254, 0.014964991569519043, 0.01491744041442871, 0.014974176406860351, 0.015064352035522461, 0.014953280448913574, 0.015034527778625488, 0.015057279586791992, 0.015142751693725585, 0.015040800094604493, 0.014948575973510743, 0.015054847717285156, 0.015031968116760254, 0.015012191772460938, 0.015034367561340332, 0.015007743835449219, 0.014980287551879882, 0.014967616081237794, 0.015019488334655762, 0.014973471641540528, 0.014945792198181153, 0.014966303825378419, 0.014969504356384278, 0.016465248107910155, 0.01526028823852539, 0.015140192031860351, 0.015043807983398437, 0.015116064071655274, 0.015012191772460938, 0.015058272361755371, 0.015126848220825195, 0.014986751556396484, 0.014960576057434082, 0.014989184379577638, 0.015018783569335937, 0.01502400016784668, 0.014950400352478027, 0.01490614414215088, 0.01502780818939209, 0.014961055755615234, 0.014944255828857422, 0.014925824165344239, 0.01512054443359375, 0.015136544227600097, 0.015052864074707031, 0.015073280334472656, 0.01524499225616455, 0.015365983963012695, 0.015182080268859864, 0.01537660789489746, 0.015980607986450197, 0.015167424201965332, 0.015011775970458984, 0.015079584121704102, 0.015127679824829102, 0.015067808151245117, 0.015021247863769531, 0.01500051212310791, 0.015017151832580566, 0.01496121597290039, 0.014946559906005859, 0.014946240425109864, 0.014993151664733887, 0.014938431739807129, 0.014942079544067384, 0.014958751678466798, 0.01495043182373047, 0.01584067153930664, 0.015036416053771973, 0.015024288177490235, 0.015097984313964844, 0.014974720001220703, 0.01496729564666748, 0.014927871704101562, 0.015056351661682128, 0.014959136009216308, 0.015060223579406739, 0.014977567672729493, 0.015091936111450195, 0.015051967620849609, 0.015002431869506836, 0.01490124797821045, 0.0151244478225708, 0.01507753562927246, 0.015044192314147949, 0.014940447807312011, 0.014970687866210938, 0.014979488372802734, 0.01499283218383789, 0.015190431594848633, 0.014960672378540039, 0.01506492805480957, 0.015106111526489259, 0.015054207801818847, 0.015010175704956054, 0.015075008392333984, 0.015091551780700683, 0.015540960311889649, 0.015063039779663086, 0.015032511711120605, 0.014864640235900879, 0.015216287612915038, 0.015099295616149902, 0.015143872261047363, 0.014981120109558106, 0.014970879554748535, 0.01502950382232666, 0.015453184127807617, 0.015087488174438477, 0.014909631729125976, 0.015046336174011231, 0.015370240211486816, 0.015325311660766602, 0.01508243179321289, 0.015219679832458496, 0.015545503616333008, 0.015127615928649902, 0.015063072204589844, 0.015267552375793457, 0.01499123191833496, 0.015025664329528808, 0.01515715217590332, 0.015071231842041016, 0.014971615791320801, 0.015034175872802735, 0.015526080131530761, 0.01506067180633545, 0.014993791580200195, 0.015001631736755372, 0.015046719551086427, 0.0150218563079834, 0.014996576309204101, 0.014922719955444336, 0.01501625633239746, 0.015104703903198242, 0.015238143920898438, 0.01552995204925537, 0.015030207633972168, 0.01503651237487793, 0.015067135810852051, 0.014987263679504394, 0.015202303886413575, 0.015114591598510743, 0.015004480361938476, 0.014936896324157714, 0.0148439359664917, 0.014987199783325196, 0.015220543861389161, 0.015188575744628906, 0.015169183731079101, 0.015122528076171876, 0.015046784400939942, 0.015072832107543946, 0.014919903755187989, 0.014984319686889649, 0.015230015754699707, 0.01514249610900879, 0.015036640167236328, 0.014948543548583984, 0.015249216079711914, 0.015224831581115723, 0.015085536003112793, 0.015410367965698241, 0.014960639953613282, 0.01502780818939209, 0.014942624092102052, 0.014995552062988282, 0.015161312103271485, 0.014961600303649902, 0.015021120071411133, 0.015071167945861816, 0.015091520309448242, 0.015522239685058593, 0.015174752235412597, 0.015205023765563964, 0.01501363182067871, 0.01512399959564209, 0.015503135681152344, 0.015286239624023437, 0.01520259189605713, 0.015810367584228515, 0.015500127792358398, 0.015138848304748535, 0.015119872093200683, 0.014989279747009277, 0.01505452823638916, 0.015160096168518067, 0.015396927833557129, 0.015189632415771485, 0.015327615737915039, 0.01502940845489502, 0.015215456008911133, 0.01518182373046875, 0.015362048149108886, 0.015303775787353516, 0.015137791633605957, 0.015156479835510253, 0.015063455581665039, 0.014944031715393066, 0.014954943656921387, 0.015364095687866211, 0.015216511726379394, 0.015173855781555176, 0.015066559791564942, 0.01515283203125, 0.015141695976257323, 0.01529651165008545, 0.015349760055541992, 0.015170720100402832, 0.015184736251831055, 0.015280256271362305, 0.015310720443725586, 0.015138112068176269, 0.015362751960754395, 0.01528831958770752, 0.015168800354003906, 0.01513327980041504, 0.015122400283813477, 0.015044095993041993, 0.015026847839355468, 0.015164735794067383, 0.015180480003356934, 0.015066816329956054, 0.01514521598815918, 0.015288384437561035, 0.01545747184753418]",tokens/s,66.09409068706525,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1133, in __init__ self.model = StableLmModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 914, in __init__ [StableLmDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 914, in [StableLmDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 688, in __init__ self.self_attn = ATTENTION_CLASSES[config._attn_implementation](config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 339, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.use_qkv_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 14.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 188915 has 14.73 GiB memory in use. Of the allocated memory 14.54 GiB is allocated by PyTorch, and 78.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 743, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 175, in __init__ self.dense = nn.Linear(config.hidden_size, config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 40354 has 14.73 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 20.86 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,788.025344,763.232256,0.0,360.710144,345.493504,s,1,7.619962890625,7.619962890625,0.0,7.619962890625,7.619962890625,7.619962890625,7.619962890625,[7.619962890625],,kWh,2.6073006166749715e-06,2.8020535747552444e-07,9.266674080021797e-07,3.8141733821526756e-06,,MB,1263.714304,777.91232,0.0,362.807296,319.011328,s,16,0.1841737594604492,0.011510859966278077,0.00016140034903026017,0.011498496055603026,0.011576640129089356,0.011705935955047608,0.011980611324310303,"[0.012049280166625976, 0.011497344017028809, 0.011522015571594239, 0.011561792373657226, 0.011499648094177245, 0.011540351867675781, 0.0113887357711792, 0.011591487884521485, 0.011560735702514648, 0.011391679763793945, 0.011485055923461914, 0.011479071617126465, 0.011540512084960937, 0.011428704261779785, 0.011348031997680664, 0.011289312362670898]",tokens/s,22239.86745994401,kWh,3.3735045183527463e-07,3.720347788553693e-08,2.0272591683697911e-07,5.772798465577907e-07,tokens/kWh,443459097.9166847,MB,1310.347264,800.980992,0.0,385.875968,319.013888,s,16,10.096327514648438,0.6310204696655274,0.004425911968887541,0.6310072021484375,0.6364345397949218,0.6379564666748047,0.6384537445068359,"[0.6311331787109375, 0.6351198120117187, 0.6385780639648437, 0.6348186645507813, 0.637749267578125, 0.6337777709960938, 0.6323617553710937, 0.6305470581054687, 0.62755322265625, 0.6308812255859375, 0.6278810424804687, 0.6343887329101563, 0.6278984375, 0.6255131225585937, 0.6241744995117188, 0.62395166015625]",tokens/s,99.83828263668399,kWh,1.77994479001267e-05,1.962668706794743e-06,6.918423570927964e-06,2.6680540177849403e-05,tokens/kWh,2361271.5327369412,,s,1008,10.08594080924988,0.010005893659970115,0.000223938622697202,0.009965423583984376,0.010182994937896728,0.010313630247116089,0.010913224344253538,"[0.009529376029968261, 0.009764415740966797, 0.0097903995513916, 0.009844639778137206, 0.009898112297058106, 0.009811039924621581, 0.009919391632080079, 0.009960864067077637, 0.009849280357360839, 0.00980720043182373, 0.009836735725402833, 0.009878144264221192, 0.009819328308105469, 0.00996828842163086, 0.010442272186279297, 0.009947744369506836, 0.010367008209228515, 0.009928704261779785, 0.009872703552246094, 0.009905055999755859, 0.009899616241455078, 0.009999615669250488, 0.009956512451171875, 0.009948960304260254, 0.009977631568908692, 0.010032575607299804, 0.010009407997131348, 0.010315903663635254, 0.010025952339172363, 0.010894207954406739, 0.010057727813720703, 0.01042841625213623, 0.009993503570556641, 0.009918560028076171, 0.010089119911193848, 0.009948960304260254, 0.009914560317993164, 0.009806079864501953, 0.010036992073059083, 0.009904128074645996, 0.009895487785339356, 0.009855423927307129, 0.00994099235534668, 0.009833696365356446, 0.009874208450317383, 0.010147135734558105, 0.00991097640991211, 0.009965567588806153, 0.00985200023651123, 0.010617504119873047, 0.009996543884277344, 0.00990822410583496, 0.010024959564208985, 0.010274815559387206, 0.010100288391113281, 0.010094112396240234, 0.010154911994934082, 0.010163935661315918, 0.00998588752746582, 0.010151871681213379, 0.010093055725097656, 0.010160127639770507, 0.010167584419250489, 0.009739999771118163, 0.009949312210083008, 0.010057439804077148, 0.009949440002441407, 0.009948575973510742, 0.00996611213684082, 0.010065983772277833, 0.010002431869506835, 0.009971712112426758, 0.009996224403381347, 0.010043583869934081, 0.009977824211120605, 0.009992159843444825, 0.010200448036193847, 0.01007091236114502, 0.010085056304931641, 0.009945952415466308, 0.009933119773864746, 0.009962431907653809, 0.010113344192504883, 0.010678272247314453, 0.01129315185546875, 0.010340512275695801, 0.010364671707153321, 0.010086463928222657, 0.010159328460693359, 0.01012831974029541, 0.010059231758117676, 0.010031871795654297, 0.010049407958984374, 0.009942943572998048, 0.009959263801574707, 0.009986240386962891, 0.009985119819641113, 0.009960543632507325, 0.010032480239868163, 0.010027008056640625, 0.00994268798828125, 0.00987980842590332, 0.009963104248046875, 0.009920736312866212, 0.010063808441162109, 0.009939104080200196, 0.01020531177520752, 0.010016927719116211, 0.010098239898681641, 0.009978336334228516, 0.010188608169555665, 0.010086112022399903, 0.010019264221191405, 0.010071519851684571, 0.010070591926574708, 0.009979840278625488, 0.01018329620361328, 0.010104031562805175, 0.010062368392944335, 0.01012076759338379, 0.010107199668884277, 0.010013919830322265, 0.01033289623260498, 0.01010086441040039, 0.010051648139953613, 0.009942208290100098, 0.009578495979309083, 0.010000384330749512, 0.009870368003845215, 0.00995849609375, 0.00994495964050293, 0.009971712112426758, 0.010000384330749512, 0.01003110408782959, 0.01008566379547119, 0.010144576072692871, 0.010188703536987304, 0.010121055603027344, 0.010070176124572753, 0.010174464225769043, 0.010127360343933106, 0.010146047592163086, 0.010219167709350586, 0.01012668800354004, 0.010341119766235352, 0.0101396484375, 0.010157855987548828, 0.010146016120910645, 0.010169407844543458, 0.010138303756713866, 0.010236160278320312, 0.010088447570800782, 0.010014975547790527, 0.011062623977661133, 0.011092384338378907, 0.01029081630706787, 0.01015174388885498, 0.010262911796569824, 0.010071264266967774, 0.0101278076171875, 0.010082847595214843, 0.010162176132202149, 0.010164128303527833, 0.00999766445159912, 0.010072832107543945, 0.01002086353302002, 0.010084351539611817, 0.00996339225769043, 0.010039423942565918, 0.009973952293395996, 0.01012876796722412, 0.009951680183410645, 0.01001251220703125, 0.009998496055603028, 0.01007158374786377, 0.009940671920776366, 0.010093343734741212, 0.010245599746704102, 0.010105152130126954, 0.010096863746643066, 0.010144031524658204, 0.010469087600708008, 0.010114848136901855, 0.010088128089904785, 0.010085920333862304, 0.01024841594696045, 0.010167072296142578, 0.01009670352935791, 0.01003923225402832, 0.009795231819152832, 0.010009599685668944, 0.009997183799743653, 0.010052032470703125, 0.010264255523681641, 0.0102391357421875, 0.009993056297302245, 0.010064224243164062, 0.010060832023620606, 0.010101375579833984, 0.010013952255249023, 0.00999507236480713, 0.010028863906860351, 0.010071552276611329, 0.010142335891723633, 0.009957568168640137, 0.009983807563781739, 0.009900064468383788, 0.009897631645202637, 0.009916031837463379, 0.01040617561340332, 0.009943455696105956, 0.009953344345092773, 0.009998080253601074, 0.0099716157913208, 0.010031200408935547, 0.009926848411560059, 0.010090496063232422, 0.010268671989440918, 0.009965279579162597, 0.009984288215637207, 0.010082559585571289, 0.010004223823547364, 0.00990822410583496, 0.010000384330749512, 0.01004758358001709, 0.01169603157043457, 0.009961471557617188, 0.009914079666137695, 0.010037471771240234, 0.0099999361038208, 0.009942912101745605, 0.009943679809570312, 0.010092543601989747, 0.010009696006774902, 0.00999619197845459, 0.010042624473571777, 0.01008614444732666, 0.010041343688964843, 0.010069567680358887, 0.010182975769042969, 0.010162431716918946, 0.010073568344116211, 0.010192831993103028, 0.010495936393737793, 0.010058367729187011, 0.010003487586975097, 0.009960320472717285, 0.009906175613403321, 0.009939264297485352, 0.010081376075744629, 0.01012387180328369, 0.010083488464355469, 0.009495200157165528, 0.010324288368225097, 0.010005632400512695, 0.009892000198364257, 0.009907808303833008, 0.01006265640258789, 0.009938176155090333, 0.010057472229003906, 0.009850048065185546, 0.00986911964416504, 0.009963520050048828, 0.01010854434967041, 0.010031423568725585, 0.010039168357849121, 0.010044639587402343, 0.010127327919006347, 0.010061856269836425, 0.00997043228149414, 0.010107104301452636, 0.010188384056091309, 0.010058208465576172, 0.00993609619140625, 0.009976544380187989, 0.010057727813720703, 0.010290495872497559, 0.010079968452453613, 0.010847200393676757, 0.010309184074401855, 0.010473567962646485, 0.010090432167053222, 0.010125632286071778, 0.010026464462280274, 0.010090847969055176, 0.010105216026306152, 0.010111136436462402, 0.010398752212524414, 0.009986175537109374, 0.010019071578979492, 0.01005827236175537, 0.010334015846252442, 0.010059776306152344, 0.01044275188446045, 0.010119168281555176, 0.010143744468688964, 0.010103967666625977, 0.010109951972961426, 0.010233728408813477, 0.011589407920837402, 0.011030112266540527, 0.010156352043151856, 0.009950528144836426, 0.0100765438079834, 0.010003199577331543, 0.010088288307189941, 0.010008319854736328, 0.009910528182983398, 0.009867103576660156, 0.009942912101745605, 0.009984288215637207, 0.010135135650634765, 0.009838175773620606, 0.009935456275939942, 0.00993712043762207, 0.009560799598693848, 0.00995680046081543, 0.009869919776916505, 0.010178815841674805, 0.010002592086791991, 0.010022944450378417, 0.009959232330322266, 0.010049087524414062, 0.009951680183410645, 0.009976896286010742, 0.010136863708496095, 0.010089632034301757, 0.010084863662719726, 0.010067968368530274, 0.010085920333862304, 0.009960960388183594, 0.010060031890869141, 0.01000227165222168, 0.010080927848815919, 0.010172415733337402, 0.009957887649536134, 0.009938655853271484, 0.010098688125610352, 0.010089887619018554, 0.009927519798278809, 0.010106304168701171, 0.010183039665222169, 0.010107904434204102, 0.010077343940734864, 0.009905952453613281, 0.010039615631103515, 0.01009996795654297, 0.010112480163574218, 0.010244095802307129, 0.010376128196716308, 0.010122976303100585, 0.010070336341857911, 0.010385408401489257, 0.010464415550231934, 0.010232671737670899, 0.010061823844909668, 0.01009225559234619, 0.009954624176025391, 0.009984031677246093, 0.009903039932250977, 0.010012672424316407, 0.010046719551086426, 0.009942912101745605, 0.009943039894104003, 0.009984095573425293, 0.009992544174194335, 0.010049920082092285, 0.01002672004699707, 0.010162143707275391, 0.00993727970123291, 0.009924320220947266, 0.009916704177856445, 0.0099552640914917, 0.009948927879333496, 0.009902400016784668, 0.009962623596191406, 0.010693599700927734, 0.009916543960571289, 0.010495776176452637, 0.009982175827026367, 0.009792863845825195, 0.009865887641906738, 0.010061504364013672, 0.009871232032775878, 0.010185407638549806, 0.00990998363494873, 0.009996352195739746, 0.01000972843170166, 0.010320416450500488, 0.01041859245300293, 0.009998751640319823, 0.009963135719299316, 0.010053600311279297, 0.009984031677246093, 0.009942303657531739, 0.009997023582458495, 0.009888992309570313, 0.009980352401733398, 0.010015135765075683, 0.009977439880371093, 0.009885600090026855, 0.00989247989654541, 0.009905055999755859, 0.009888640403747559, 0.010055168151855469, 0.009877120018005371, 0.010182880401611328, 0.01009721565246582, 0.010165760040283203, 0.009937439918518066, 0.009990079879760742, 0.010137887954711914, 0.010011520385742188, 0.010066911697387696, 0.009984000205993653, 0.009930208206176757, 0.009907744407653808, 0.00989094352722168, 0.010076031684875489, 0.009940095901489258, 0.009812864303588867, 0.009885696411132813, 0.009920639991760254, 0.009866144180297852, 0.00981430435180664, 0.009914752006530761, 0.009816384315490723, 0.00983670425415039, 0.00989577579498291, 0.010173824310302735, 0.010309408187866211, 0.010914655685424804, 0.010779840469360352, 0.009964351654052734, 0.00991964817047119, 0.010136416435241699, 0.01011894416809082, 0.010096863746643066, 0.010071488380432128, 0.00993132781982422, 0.010010368347167968, 0.009608927726745606, 0.010035776138305664, 0.009907967567443847, 0.009963232040405274, 0.009967616081237793, 0.009937151908874512, 0.010069952011108399, 0.01011894416809082, 0.009977375984191895, 0.009883584022521973, 0.009931424140930175, 0.009967776298522949, 0.0098754243850708, 0.010016575813293457, 0.009985983848571777, 0.009934880256652833, 0.010028800010681153, 0.009994912147521972, 0.009918399810791016, 0.009859199523925782, 0.009940447807312012, 0.00997920036315918, 0.009933823585510254, 0.009930751800537109, 0.010242048263549805, 0.00999129581451416, 0.010002495765686036, 0.00998691177368164, 0.01012492847442627, 0.009987423896789551, 0.010134048461914062, 0.010065983772277833, 0.009961888313293457, 0.010855775833129883, 0.009941887855529786, 0.010003328323364258, 0.00998083209991455, 0.009891839981079101, 0.010008319854736328, 0.009967679977416993, 0.010033344268798828, 0.010125311851501465, 0.010043392181396485, 0.01005731201171875, 0.00998851203918457, 0.009898143768310547, 0.010161279678344726, 0.010072928428649902, 0.009917728424072266, 0.010009056091308593, 0.009992351531982422, 0.00994918441772461, 0.010043295860290527, 0.010262592315673828, 0.009953280448913575, 0.009971712112426758, 0.009918463706970216, 0.009893888473510743, 0.009959744453430177, 0.009940799713134765, 0.009929951667785644, 0.009896063804626464, 0.00999295997619629, 0.009554047584533692, 0.00990559959411621, 0.009937376022338866, 0.009862912178039552, 0.009863648414611816, 0.009895872116088868, 0.009887007713317872, 0.009957216262817382, 0.009847359657287598, 0.009993663787841797, 0.00991420841217041, 0.009941375732421875, 0.009848447799682617, 0.009864319801330566, 0.009885343551635742, 0.010469568252563477, 0.010022496223449707, 0.009918720245361327, 0.009871520042419434, 0.010188799858093261, 0.010004608154296876, 0.009907487869262696, 0.009947999954223633, 0.009981439590454102, 0.009842111587524413, 0.009884480476379395, 0.009909567832946777, 0.009917375564575195, 0.009832192420959472, 0.009784576416015624, 0.009855008125305176, 0.009855711936950683, 0.010076160430908204, 0.010219103813171386, 0.009986047744750976, 0.009877216339111329, 0.010027744293212891, 0.010153951644897461, 0.010076160430908204, 0.010022560119628906, 0.010039423942565918, 0.009981344223022461, 0.010037504196166992, 0.009842880249023437, 0.009951616287231445, 0.009918463706970216, 0.009910271644592286, 0.009871359825134277, 0.00995257568359375, 0.009923040390014648, 0.009937376022338866, 0.009903871536254882, 0.00988265609741211, 0.009907168388366699, 0.01022060775756836, 0.010062368392944335, 0.009974176406860352, 0.009854623794555664, 0.009877056121826171, 0.010088255882263183, 0.009950176239013672, 0.010030240058898925, 0.009997280120849609, 0.009631487846374511, 0.009945152282714843, 0.01056761646270752, 0.010280960083007813, 0.009954879760742188, 0.010234368324279786, 0.01050220775604248, 0.00992454433441162, 0.009937055587768555, 0.010059552192687988, 0.009871328353881836, 0.00995740795135498, 0.009881600379943848, 0.010249792098999023, 0.009887743949890136, 0.010156319618225098, 0.009908032417297364, 0.009853504180908202, 0.010331711769104003, 0.009879776000976562, 0.009979904174804688, 0.009977855682373048, 0.009902144432067871, 0.009884832382202148, 0.009914239883422852, 0.009887711524963379, 0.00994540786743164, 0.009952992439270019, 0.009913248062133789, 0.010188575744628905, 0.009953503608703613, 0.009928704261779785, 0.009926655769348144, 0.009997728347778321, 0.010056320190429688, 0.009959360122680665, 0.01000761604309082, 0.009978848457336426, 0.010000351905822755, 0.010217503547668457, 0.010496159553527833, 0.010145024299621582, 0.011110336303710937, 0.009994912147521972, 0.01002076816558838, 0.009924768447875976, 0.010016703605651855, 0.009934847831726074, 0.009848832130432129, 0.009995807647705078, 0.009829888343811035, 0.009829343795776367, 0.009877504348754883, 0.009816191673278808, 0.00994707202911377, 0.009823807716369629, 0.009759103775024414, 0.009830400466918946, 0.009727583885192872, 0.010010687828063965, 0.009929471969604493, 0.009866847991943359, 0.00992255973815918, 0.009642687797546387, 0.009930720329284668, 0.010016768455505372, 0.010149663925170898, 0.009922783851623535, 0.010401215553283692, 0.010080544471740723, 0.00990563201904297, 0.009954015731811523, 0.010061920166015625, 0.00994927978515625, 0.009899935722351073, 0.009985440254211426, 0.009951168060302735, 0.009971455574035644, 0.010152864456176757, 0.010135744094848633, 0.009958304405212403, 0.009937631607055663, 0.009928895950317382, 0.009885536193847657, 0.00992198371887207, 0.00999292755126953, 0.009879551887512206, 0.010047360420227051, 0.010219648361206054, 0.01009663963317871, 0.010048640251159669, 0.010103551864624024, 0.00994876766204834, 0.010123807907104493, 0.010002431869506835, 0.010061823844909668, 0.009949024200439454, 0.010024479866027832, 0.009878144264221192, 0.009867520332336425, 0.009889535903930664, 0.009849151611328126, 0.009823328018188476, 0.009858655929565429, 0.009990752220153809, 0.009893471717834473, 0.009780192375183105, 0.009808863639831543, 0.009888544082641601, 0.009777152061462402, 0.009861215591430664, 0.0100065279006958, 0.00989356803894043, 0.009914688110351563, 0.009898240089416505, 0.009833984375, 0.009871295928955078, 0.009955648422241212, 0.009895903587341309, 0.009783519744873046, 0.009803263664245606, 0.009859423637390136, 0.01039151954650879, 0.009920384407043457, 0.009944416046142579, 0.00985315227508545, 0.009573696136474609, 0.009976672172546388, 0.010022368431091309, 0.01005401611328125, 0.009975808143615723, 0.009941087722778321, 0.009959327697753905, 0.009904288291931152, 0.009910240173339843, 0.00994217586517334, 0.009851903915405273, 0.010005439758300781, 0.010124064445495606, 0.010059295654296875, 0.010093024253845215, 0.009883584022521973, 0.009887807846069336, 0.009768959999084472, 0.00982630443572998, 0.00987887954711914, 0.010170816421508789, 0.009999872207641602, 0.009906911849975586, 0.009961119651794434, 0.010074463844299317, 0.010065919876098632, 0.00996889591217041, 0.009980799674987793, 0.00984870433807373, 0.009838591575622559, 0.009853023529052735, 0.009800959587097167, 0.009800127983093262, 0.009830623626708985, 0.009843968391418456, 0.009785247802734374, 0.009870240211486817, 0.009797568321228028, 0.009825535774230957, 0.010023296356201172, 0.010037759780883788, 0.009889663696289063, 0.010011839866638183, 0.010316672325134278, 0.010127296447753906, 0.010002016067504883, 0.010183327674865723, 0.010075712203979492, 0.010022784233093262, 0.01004700756072998, 0.009870207786560059, 0.010215423583984374, 0.009962688446044922, 0.010074848175048829, 0.009875455856323241, 0.01002905559539795, 0.009944095611572265, 0.010188960075378417, 0.009991104125976563, 0.010023839950561523, 0.01221628761291504, 0.012401087760925293, 0.011346240043640137, 0.009723135948181152, 0.010199808120727539, 0.010112256050109862, 0.010240768432617188, 0.010004480361938477, 0.010172415733337402, 0.009974016189575195, 0.009946911811828614, 0.00983631992340088, 0.009932991981506347, 0.009852255821228027, 0.009957792282104493, 0.009848959922790527, 0.009873536109924317, 0.009871359825134277, 0.009959168434143066, 0.01020479965209961, 0.009845376014709473, 0.009971455574035644, 0.009928064346313476, 0.00997651195526123, 0.009918656349182129, 0.009963232040405274, 0.00994262409210205, 0.009933024406433106, 0.00990236759185791, 0.01021132755279541, 0.009963839530944825, 0.010104703903198241, 0.00992460823059082, 0.009861120223999023, 0.01002086353302002, 0.010041343688964843, 0.00984444808959961, 0.009838944435119628, 0.009973247528076172, 0.009941439628601074, 0.009881600379943848, 0.009953503608703613, 0.009940768241882324, 0.00998521614074707, 0.009888799667358398, 0.009940768241882324, 0.009879551887512206, 0.009953280448913575, 0.009992064476013184, 0.009973888397216796, 0.010004480361938477, 0.009875200271606445, 0.009857472419738769, 0.009852831840515136, 0.009822239875793457, 0.009869183540344238, 0.009950912475585938, 0.009914688110351563, 0.009948736190795898, 0.010011327743530274, 0.010006208419799805, 0.01009385585784912, 0.010096511840820312, 0.009865280151367187, 0.00983846378326416, 0.010000991821289062, 0.009547776222229003, 0.009905887603759765, 0.010008831977844239, 0.010020416259765625, 0.009941087722778321, 0.009896320343017578, 0.009896063804626464, 0.009889568328857422, 0.009885824203491211, 0.009824224472045898, 0.009938272476196289, 0.009869983673095703, 0.009942879676818847, 0.009825568199157714, 0.009909119606018067, 0.01021951961517334, 0.010182751655578613, 0.009910176277160645, 0.010018688201904297, 0.009943167686462402, 0.010053119659423827, 0.009986559867858886, 0.010205183982849121, 0.009953280448913575, 0.009985376358032226, 0.00986723232269287, 0.010265024185180664, 0.010022527694702148, 0.009867775917053222, 0.009817407608032227, 0.009990495681762695, 0.009947615623474121, 0.009973759651184083, 0.009852959632873535, 0.00999830436706543, 0.00978876781463623, 0.009797856330871582, 0.009797151565551759, 0.009954527854919433, 0.009809375762939453, 0.009850079536437989, 0.01003212833404541, 0.009897503852844238, 0.009892319679260254, 0.010584063529968261, 0.009887680053710938, 0.009795104026794434, 0.009836607933044434, 0.009855456352233886, 0.009963520050048828, 0.009828351974487304, 0.00990822410583496, 0.00977660846710205, 0.00992899227142334, 0.009830719947814941, 0.00976905632019043, 0.00970531177520752, 0.009720928192138672, 0.010123519897460938, 0.00984335994720459, 0.009802944183349609, 0.009781120300292968, 0.00977785587310791, 0.009383808135986329, 0.00982204818725586, 0.0099136962890625, 0.009916959762573242, 0.009855423927307129, 0.009770976066589356, 0.010278911590576171, 0.009891903877258301, 0.009945023536682128, 0.009875455856323241, 0.009934847831726074, 0.00985267162322998, 0.009864512443542481, 0.009789376258850098, 0.009769984245300293, 0.00988310432434082, 0.009828831672668457, 0.00980793571472168, 0.009752096176147461, 0.00980016040802002, 0.00998316764831543, 0.009986944198608398, 0.009905216217041015, 0.00989465618133545, 0.009873056411743164, 0.009878335952758789, 0.009916064262390136, 0.009861120223999023, 0.009846752166748046, 0.009794848442077637, 0.009824352264404297, 0.009855072021484374, 0.009793503761291504, 0.009785951614379883, 0.009832639694213868, 0.009817567825317383, 0.00989628791809082, 0.009869312286376953, 0.009803872108459472, 0.00986300754547119, 0.009824192047119141, 0.010106176376342773, 0.009924672126770019, 0.009827391624450683, 0.009885312080383301, 0.009817631721496582, 0.010437248229980468, 0.00993280029296875, 0.009918560028076171, 0.009810784339904784, 0.00986342430114746, 0.009892576217651368, 0.009887616157531738, 0.010270336151123046, 0.010117728233337403, 0.009867520332336425, 0.009961183547973634, 0.009840576171875, 0.009822208404541016, 0.01019603157043457, 0.010150848388671875, 0.009918304443359375, 0.01003536033630371, 0.00953609561920166, 0.009877663612365722, 0.009777152061462402, 0.009821760177612304, 0.009821920394897462, 0.00979206371307373, 0.009857343673706056, 0.009883487701416015, 0.009980192184448242, 0.009843520164489746, 0.009874560356140137, 0.010023903846740723, 0.009870143890380859, 0.009879103660583496, 0.00984217643737793, 0.00983955192565918, 0.009801183700561523, 0.00998249626159668, 0.009926655769348144, 0.009862303733825684, 0.009892831802368164, 0.009901472091674805, 0.009904831886291503, 0.00983836841583252, 0.009863167762756348, 0.009852928161621094, 0.009828384399414063, 0.009844703674316406, 0.009859071731567384, 0.009795488357543946, 0.009868800163269043, 0.009791584014892578, 0.01004412841796875, 0.010258048057556153, 0.010027039527893066, 0.01054700756072998, 0.009802047729492187, 0.009783424377441407, 0.009807744026184083, 0.009852416038513183, 0.009936479568481446, 0.009802847862243653, 0.009832575798034668, 0.009823936462402344, 0.009872511863708496, 0.009859968185424804, 0.009862367630004882, 0.010005215644836425, 0.009883711814880371, 0.010018207550048829, 0.009822815895080566, 0.009999808311462402, 0.009912896156311034, 0.009751775741577148, 0.009845727920532227, 0.009806943893432616, 0.009845184326171876, 0.010100255966186524, 0.0099683837890625, 0.009930751800537109, 0.009879551887512206, 0.010093952178955078, 0.009949824333190919]",tokens/s,99.94109811506699,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,837.287936,14689.435648,0.0,14294.188032,14284.158464,s,1,7.4896943359375,7.4896943359375,0.0,7.4896943359375,7.4896943359375,7.4896943359375,7.4896943359375,[7.4896943359375],,kWh,1.4267168679187611e-05,1.560477292521457e-06,7.197227980010323e-06,2.3024873951719392e-05,,MB,1159.585792,14995.61984,0.0,14587.789312,14512.892416,s,10,2.1156253356933594,0.21156253356933594,0.0047781127013005464,0.21339472198486328,0.21463478698730468,0.2157583038330078,0.2166571173095703,"[0.198820068359375, 0.21002482604980469, 0.20845228576660157, 0.21333978271484375, 0.21344966125488282, 0.21438511657714843, 0.2128421173095703, 0.21688182067871092, 0.2136397705078125, 0.21378988647460936]",tokens/s,1210.0441211444486,kWh,6.3592148394028245e-06,7.012992785076774e-07,4.21556496665201e-06,1.1276079084562512e-05,tokens/kWh,22702926.973124564,MB,1182.142464,15100.47744,0.0,14692.646912,14646.153216,s,10,43.51311914062501,4.351311914062501,0.006574783603022592,4.351512939453125,4.359264599609375,4.360290502929688,4.361111225585938,"[4.33798828125, 4.3467451171875, 4.3467138671875, 4.35394970703125, 4.35903662109375, 4.35664697265625, 4.36131640625, 4.350025390625, 4.3476962890625, 4.35300048828125]",tokens/s,14.47839209053196,kWh,0.00012701867365351295,1.4010532308286438e-05,8.433970032194869e-05,0.00022536890628374807,tokens/kWh,279541.6680980853,,s,630,43.509153587341366,0.0690621485513354,0.0005463833746282887,0.06900400161743164,0.06953564147949219,0.06970079917907715,0.07210129539489747,"[0.07258521270751953, 0.06911984252929687, 0.0683496322631836, 0.06819667053222657, 0.0684606704711914, 0.0682718734741211, 0.06827760314941406, 0.0684728012084961, 0.06824038696289063, 0.06830226898193359, 0.06852806091308594, 0.06832998657226562, 0.06824969482421875, 0.06847283172607421, 0.06864662170410156, 0.06875552368164063, 0.06874960327148437, 0.06874476623535156, 0.06857766723632812, 0.06848863983154296, 0.06853004455566407, 0.06851612854003906, 0.06860022735595703, 0.06840697479248047, 0.06888070678710938, 0.06847923278808593, 0.06859478759765625, 0.06850012969970704, 0.06882118225097657, 0.06887612915039062, 0.06887369537353516, 0.06878873443603516, 0.06865853118896484, 0.0689179229736328, 0.06872064208984376, 0.06883676910400391, 0.06883388519287109, 0.06876096343994141, 0.06901209259033203, 0.06890393829345703, 0.06879676818847656, 0.06892726135253906, 0.06908956909179688, 0.06896256256103515, 0.06917769622802734, 0.06915641784667968, 0.06898531341552734, 0.06902777862548828, 0.06900534057617187, 0.06899001312255859, 0.0690060806274414, 0.06899321746826172, 0.06906620788574219, 0.06912464141845703, 0.0690802230834961, 0.06911430358886719, 0.06930409240722656, 0.06935616302490234, 0.069212158203125, 0.06920588684082031, 0.06909964752197266, 0.06915299224853516, 0.0694188461303711, 0.07203292846679688, 0.06919577789306641, 0.06860800170898437, 0.06844585418701171, 0.06844214630126953, 0.06841999816894531, 0.06843897247314454, 0.06843244934082031, 0.06820735931396485, 0.06861775970458985, 0.06832550048828125, 0.06852352142333984, 0.06844822692871094, 0.06868860626220703, 0.06915782165527344, 0.06906355285644532, 0.06898073577880859, 0.06910140991210938, 0.06886147308349609, 0.06884153747558594, 0.0686370849609375, 0.06852828979492187, 0.06879424285888672, 0.06846227264404296, 0.06878572845458984, 0.06905846405029296, 0.06874018859863282, 0.06907459259033204, 0.06926486206054687, 0.06904096221923828, 0.06891065979003906, 0.06924256134033203, 0.06876982116699219, 0.0689662094116211, 0.06892361450195313, 0.06891500854492187, 0.06879235076904297, 0.06863481903076171, 0.06878585815429687, 0.06879875183105469, 0.06882982635498047, 0.06886547088623046, 0.06926102447509766, 0.06913715362548828, 0.06922988891601563, 0.06908329772949219, 0.06908089447021484, 0.06925804901123046, 0.06916710662841796, 0.06939238739013671, 0.06890086364746094, 0.06916915130615234, 0.06901302337646484, 0.06894854736328125, 0.06946192169189454, 0.0690708465576172, 0.06947020721435547, 0.06950911712646485, 0.0691937255859375, 0.06998226928710938, 0.06947833251953126, 0.0695902099609375, 0.06928272247314453, 0.07193395233154297, 0.06917743682861328, 0.06852294158935547, 0.06883599853515625, 0.06840780639648437, 0.06867254638671876, 0.06846121978759766, 0.06855897521972656, 0.068523681640625, 0.06881260681152344, 0.06859961700439453, 0.06848313903808594, 0.06858812713623047, 0.06872998046875, 0.06949903869628907, 0.06934796905517578, 0.06880659484863282, 0.06860822296142578, 0.06840729522705079, 0.06855270385742188, 0.0685277099609375, 0.068470947265625, 0.06841545867919922, 0.06864530944824218, 0.06864185333251953, 0.06855500793457031, 0.06872930908203125, 0.06871363067626954, 0.068901123046875, 0.06947235107421874, 0.06902559661865235, 0.06902352142333984, 0.06885270690917969, 0.06883737945556641, 0.06871670532226562, 0.06896419525146484, 0.06873257446289062, 0.06912556457519531, 0.07008528137207032, 0.06873078155517579, 0.06881734466552734, 0.06898854064941407, 0.06909503936767578, 0.06944847869873047, 0.06961724853515625, 0.06898876953125, 0.06894992065429688, 0.06912873840332032, 0.06905606079101563, 0.069421630859375, 0.069119873046875, 0.06924057769775391, 0.06933324432373048, 0.06898489379882812, 0.0689697265625, 0.06883510589599609, 0.0690206069946289, 0.06953705596923829, 0.06995833587646484, 0.06930850982666016, 0.06919158172607422, 0.06910157012939454, 0.06945315551757812, 0.07210189056396485, 0.06947225952148438, 0.06875958251953125, 0.06831919860839844, 0.06831839752197266, 0.06841222381591797, 0.06851939392089844, 0.06883372497558594, 0.06869551849365234, 0.068880126953125, 0.06829740905761719, 0.06849350738525391, 0.0689392318725586, 0.06862681579589844, 0.06912220764160157, 0.06913433837890624, 0.06914252471923828, 0.06882099151611328, 0.06854783630371093, 0.06867775726318359, 0.06864768218994141, 0.06876338958740234, 0.06894127655029297, 0.06849932861328124, 0.06849520111083984, 0.06862726593017578, 0.06875337219238281, 0.06927110290527344, 0.06924931335449219, 0.06924697875976563, 0.06953794860839843, 0.06936914825439452, 0.06947235107421874, 0.06915312194824219, 0.06874940490722656, 0.06896044921875, 0.06890444946289062, 0.0693623046875, 0.07014940643310547, 0.06887702178955078, 0.06887833404541016, 0.06893977355957032, 0.06923375701904297, 0.06997062683105469, 0.06942313385009766, 0.06927788543701172, 0.06965657806396484, 0.06939260864257812, 0.06901023864746093, 0.06932784271240235, 0.06913027191162109, 0.06928598022460937, 0.06939225769042968, 0.06895961761474609, 0.06912882995605468, 0.06899712371826172, 0.06923673248291015, 0.06984060668945312, 0.0696568603515625, 0.06935552215576171, 0.06938777923583984, 0.06957011413574218, 0.06936396789550782, 0.07218790435791016, 0.06934937286376953, 0.06856841278076171, 0.06848310089111329, 0.0684303970336914, 0.06856636810302734, 0.06867449951171875, 0.06885968017578124, 0.06856716918945313, 0.06872013092041016, 0.06890534210205078, 0.06892134094238281, 0.0688046417236328, 0.06892518615722656, 0.06926716613769532, 0.06948095703125, 0.06938361358642578, 0.06893753814697266, 0.06864498901367187, 0.06864959716796876, 0.06873017883300782, 0.06867574310302735, 0.06936547088623046, 0.06878899383544922, 0.0687022705078125, 0.06856729888916016, 0.06907469177246094, 0.06948047637939453, 0.06935343933105469, 0.06959017944335938, 0.06951760101318359, 0.06901529693603516, 0.06941104125976562, 0.0688213119506836, 0.068847900390625, 0.06878617858886718, 0.069010498046875, 0.06945465850830078, 0.06910979461669922, 0.06894831848144531, 0.06884281921386719, 0.06949078369140625, 0.06950252532958984, 0.06938873291015625, 0.0694029769897461, 0.0692384033203125, 0.06931251525878906, 0.06961190032958985, 0.06958080291748046, 0.06939238739013671, 0.06950508880615235, 0.06946364593505859, 0.06921619415283203, 0.06900777435302734, 0.06914646148681641, 0.06961138916015625, 0.06953548431396485, 0.07015888214111328, 0.06960332489013672, 0.06912614440917969, 0.06954105377197266, 0.06985779571533203, 0.06950739288330078, 0.07209983825683594, 0.06916896057128906, 0.06849513244628906, 0.06857068634033203, 0.0685184326171875, 0.06871059417724609, 0.06898665618896484, 0.06856694030761719, 0.06848966217041015, 0.06862643432617188, 0.06918962860107422, 0.06866102600097657, 0.068609375, 0.0689438705444336, 0.06970681762695312, 0.06940643310546875, 0.06928336334228516, 0.06889859008789062, 0.06880131530761718, 0.06851583862304687, 0.0686913604736328, 0.0691226577758789, 0.06872268676757813, 0.06880255889892578, 0.06857500457763673, 0.06872406768798828, 0.06894297790527344, 0.06898252868652344, 0.06934102630615234, 0.06986972808837891, 0.06929612731933593, 0.06933708953857422, 0.06877318572998047, 0.06916780853271484, 0.06877184295654297, 0.06877597045898437, 0.06901942443847656, 0.06880480194091797, 0.06905840301513672, 0.06901190185546875, 0.06912790679931641, 0.06932892608642578, 0.06959718322753906, 0.06979373168945313, 0.06948067474365234, 0.06926233673095702, 0.06967174530029296, 0.06935942077636718, 0.06891334533691407, 0.06893977355957032, 0.06942720031738281, 0.06931199645996093, 0.06901811218261719, 0.06908236694335937, 0.06926988983154297, 0.06952566528320313, 0.06967874908447266, 0.06972473907470703, 0.0694205093383789, 0.06924931335449219, 0.06936716461181641, 0.06951213073730468, 0.07014988708496094, 0.07210371398925781, 0.06939670562744141, 0.06869606781005859, 0.06883328247070312, 0.06846380615234375, 0.06862726593017578, 0.06890716552734374, 0.0687511978149414, 0.06866051483154297, 0.06872959899902344, 0.0688333740234375, 0.06909939575195312, 0.06861561584472656, 0.06872940826416016, 0.06956646728515625, 0.06904994964599609, 0.06926284790039063, 0.06876060485839844, 0.06875945281982422, 0.06877696228027344, 0.06909836578369141, 0.06889686584472657, 0.06882713317871093, 0.06906674957275391, 0.06891725158691406, 0.06896422576904297, 0.06926051330566406, 0.0696278076171875, 0.0691435546875, 0.06939852905273437, 0.06967203521728516, 0.06924150085449218, 0.0690660171508789, 0.06904729461669921, 0.06904624176025391, 0.06887382507324219, 0.06931670379638671, 0.06922016143798829, 0.06904637145996094, 0.06950691223144531, 0.06945645141601563, 0.0693759994506836, 0.06954598236083985, 0.06931046295166016, 0.06948834991455079, 0.0696404800415039, 0.06928998565673829, 0.06938832092285156, 0.06949600219726562, 0.06947702026367188, 0.06942428588867187, 0.06933193969726563, 0.06949478149414062, 0.0694307861328125, 0.06919379425048829, 0.06925299072265625, 0.06945439910888672, 0.06962995147705078, 0.0695767059326172, 0.06966668701171876, 0.0696562271118164, 0.06934575653076172, 0.0691443862915039, 0.07213875579833984, 0.06927769470214844, 0.06873827362060547, 0.06869478607177734, 0.06858675384521484, 0.068552734375, 0.0691136932373047, 0.06899203491210938, 0.06880863952636719, 0.06870748901367188, 0.06850745391845703, 0.06854550170898438, 0.06864806365966797, 0.0687809295654297, 0.06926937866210937, 0.0690458526611328, 0.06880441284179688, 0.0688486099243164, 0.0687891845703125, 0.06859449768066406, 0.06857933044433594, 0.0688222427368164, 0.06890364837646484, 0.06865312194824219, 0.06865715026855469, 0.06855455780029297, 0.06863276672363282, 0.0689656982421875, 0.06932921600341797, 0.0691756820678711, 0.06864691162109375, 0.06900505828857421, 0.06882329559326172, 0.06893567657470703, 0.06884259033203124, 0.06910620880126953, 0.06886243438720703, 0.06883932495117187, 0.06887606048583984, 0.06911385345458984, 0.06906082916259766, 0.06924854278564453, 0.06930867004394531, 0.06958512115478516, 0.06919577789306641, 0.06897869110107421, 0.06917113494873046, 0.06912598419189453, 0.06891487884521484, 0.06890147399902344, 0.06912185668945313, 0.06906594848632812, 0.06913046264648437, 0.06896742248535156, 0.06918131256103516, 0.0694879379272461, 0.0697798080444336, 0.0697838363647461, 0.06913148498535156, 0.0690749740600586, 0.0689988784790039, 0.06952384185791016, 0.07013209533691406, 0.07213459014892579, 0.0691500473022461, 0.06855958557128906, 0.06860800170898437, 0.06835318756103516, 0.06856585693359375, 0.06845439910888672, 0.06848102569580078, 0.06839705657958985, 0.06851299285888672, 0.06839785766601562, 0.06854783630371093, 0.06849120330810547, 0.06880662536621093, 0.06949542236328125, 0.06931407928466797, 0.06893949127197266, 0.0685902099609375, 0.06840278625488282, 0.06834867095947265, 0.06881011199951172, 0.06868409729003906, 0.06859715270996093, 0.06859993743896485, 0.06864771270751953, 0.06851789093017578, 0.06871449279785156, 0.06875299072265625, 0.06898902130126953, 0.06978797149658203, 0.06911318206787109, 0.06900726318359375, 0.06903679656982421, 0.06877932739257812, 0.06884528350830078, 0.06915376281738281, 0.06966995239257813, 0.06906771087646485, 0.06887811279296875, 0.06893180847167969, 0.06900294494628906, 0.06915309143066406, 0.06917446136474609, 0.06947840118408204, 0.06940534210205078, 0.06937385559082031, 0.06910182189941406, 0.06899097442626953, 0.06900940704345702, 0.06920716857910156, 0.06940354919433593, 0.06946412658691406, 0.06892329406738282, 0.06885158538818359, 0.06882726287841796, 0.06902547454833985, 0.06933126068115235, 0.06947433471679687, 0.06938950347900391, 0.06922217559814453, 0.06967005157470703, 0.0694557113647461, 0.06923849487304687, 0.07238857269287109, 0.06957615661621094, 0.06867407989501953, 0.06846876525878906, 0.06876156616210938, 0.06865446472167969, 0.06875523376464844, 0.06870425415039062, 0.06840013122558594, 0.06836128234863281, 0.06920829010009766, 0.06871711730957031, 0.06846463775634766, 0.06882508850097656, 0.0694939193725586, 0.06966480255126953, 0.06904710388183594, 0.0686612777709961, 0.0684849624633789, 0.06853644561767579, 0.06894127655029297, 0.06853817749023437, 0.06861692810058594, 0.0684933090209961, 0.06860514831542969, 0.06875398254394531, 0.06891248321533203, 0.06882601928710938, 0.06928173065185547, 0.06976675415039063, 0.06939894104003906, 0.06906473541259765, 0.06881056213378907, 0.06919574737548828, 0.06888169860839843, 0.06892845153808594, 0.06891519927978515, 0.06879567718505859, 0.06899942779541016, 0.06894544219970702, 0.06923769378662109, 0.069165283203125, 0.06920521545410156, 0.0697636489868164, 0.06970780944824219, 0.06938985443115234, 0.06917369842529297, 0.06975897979736329, 0.06891292572021485, 0.06925539398193359, 0.06910157012939454, 0.06902579498291016, 0.06904627227783203, 0.06911590576171875, 0.06919497680664062, 0.06912489318847656, 0.06933299255371093, 0.06981017303466797, 0.06969344329833985, 0.06923878479003906, 0.06908889770507813, 0.06931289672851562, 0.06946931457519531]",tokens/s,14.479711694122551,, float16-eager,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.181824,1326.383104,0.0,931.135488,917.648384,s,1,7.267576171875,7.267576171875,0.0,7.267576171875,7.267576171875,7.267576171875,7.267576171875,[7.267576171875],,kWh,9.731543208325394e-06,1.0588403921696833e-06,4.187781127998336e-06,1.4978164728493414e-05,,MB,1164.394496,1458.50368,0.0,1050.673152,1018.330112,s,10,0.2378742084503174,0.023787420845031736,0.00025684186075807625,0.023689487457275393,0.02418208408355713,0.024227458477020264,0.02426375799179077,"[0.024172000885009766, 0.023683807373046876, 0.02393600082397461, 0.023694143295288086, 0.023820640563964844, 0.023541439056396486, 0.023403263092041014, 0.0242728328704834, 0.023684831619262697, 0.02366524887084961]",tokens/s,10761.99061965427,kWh,6.842642028688759e-07,7.546128079848815e-08,4.5220111637936463e-07,1.2119266000467288e-06,tokens/kWh,211233914.65302378,MB,1198.395392,1475.280896,0.0,1067.450368,1032.767488,s,10,13.457886840820311,1.3457886840820312,0.017177029152594957,1.3504623413085937,1.359035498046875,1.360448779296875,1.361579404296875,"[1.35027099609375, 1.34962744140625, 1.358721435546875, 1.3506536865234375, 1.3226339111328125, 1.3043353271484375, 1.3483697509765624, 1.361862060546875, 1.3536444091796875, 1.357767822265625]",tokens/s,46.812698564910725,kWh,3.787052177546552e-05,4.176658680802899e-06,1.7827452396220437e-05,5.987463285248884e-05,tokens/kWh,1052198.5187819193,,s,630,13.451185438156136,0.021351087997073217,0.00045205151538563394,0.02138086414337158,0.021686100006103517,0.02196893768310547,0.022835756683349608,"[0.02106595230102539, 0.021346879959106446, 0.02127872085571289, 0.02129100799560547, 0.021303295135498047, 0.021448991775512696, 0.021298912048339842, 0.021391008377075197, 0.02154489517211914, 0.021416351318359374, 0.02126185607910156, 0.021357023239135742, 0.02118572807312012, 0.021412128448486327, 0.021381664276123046, 0.021553279876708985, 0.021544704437255858, 0.021479551315307616, 0.021571104049682616, 0.021453279495239258, 0.021335487365722657, 0.02129158401489258, 0.021364288330078127, 0.0213623046875, 0.021316415786743165, 0.021583871841430666, 0.021978784561157226, 0.024228288650512696, 0.021284576416015624, 0.0214420166015625, 0.021383007049560546, 0.021314111709594727, 0.021451072692871095, 0.021391199111938475, 0.021065536499023436, 0.021246015548706056, 0.02138947105407715, 0.021878751754760742, 0.022534303665161133, 0.02166988754272461, 0.02131385612487793, 0.021712575912475586, 0.021139455795288087, 0.021212736129760743, 0.021174623489379884, 0.021401472091674804, 0.021395679473876952, 0.021402751922607422, 0.021304191589355467, 0.021182464599609374, 0.02115488052368164, 0.021255104064941407, 0.02123980712890625, 0.02138051223754883, 0.021162080764770507, 0.021642847061157225, 0.021600288391113283, 0.021261184692382813, 0.020927999496459963, 0.021076480865478517, 0.021190656661987304, 0.021192607879638673, 0.021180511474609375, 0.021114208221435546, 0.02127052879333496, 0.021258079528808593, 0.021300031661987306, 0.021374975204467773, 0.02131113624572754, 0.021439903259277342, 0.02142201614379883, 0.02123673629760742, 0.0211661434173584, 0.021147136688232423, 0.021078208923339843, 0.021301504135131835, 0.021383167266845703, 0.02197248077392578, 0.021680992126464845, 0.021490495681762697, 0.021133344650268556, 0.02122960090637207, 0.02188934326171875, 0.021295040130615235, 0.022047264099121094, 0.021900320053100587, 0.021379903793334962, 0.021665824890136718, 0.02129462432861328, 0.02153926467895508, 0.021493120193481444, 0.021322719573974608, 0.021364255905151366, 0.021317920684814452, 0.021214496612548827, 0.021318368911743164, 0.021243904113769533, 0.021315584182739256, 0.02121660804748535, 0.021426847457885742, 0.021370880126953123, 0.02154297637939453, 0.021323423385620117, 0.02129897689819336, 0.021254623413085937, 0.021262208938598634, 0.021155168533325195, 0.02148137664794922, 0.021607648849487304, 0.02148137664794922, 0.021534143447875978, 0.021324127197265626, 0.022230976104736327, 0.021281919479370116, 0.021187519073486327, 0.021656736373901368, 0.02139411163330078, 0.021382495880126952, 0.021541919708251953, 0.02118771171569824, 0.021481472015380858, 0.02149443244934082, 0.021429759979248047, 0.02148953628540039, 0.021413759231567384, 0.02155388832092285, 0.021386079788208008, 0.021405023574829103, 0.02153251266479492, 0.021594816207885743, 0.022244735717773436, 0.02132044792175293, 0.021739168167114256, 0.021184864044189452, 0.021433759689331054, 0.02137763214111328, 0.02141606330871582, 0.021448575973510742, 0.021360992431640625, 0.021468832015991212, 0.021401599884033205, 0.02156870460510254, 0.02152876853942871, 0.02126630401611328, 0.021346368789672852, 0.02144291114807129, 0.021506399154663087, 0.02145894432067871, 0.021460384368896485, 0.021430879592895507, 0.022981760025024413, 0.022840192794799805, 0.021941471099853515, 0.021535295486450196, 0.021229440689086915, 0.02148918342590332, 0.021540800094604493, 0.02157043266296387, 0.021462400436401366, 0.021494047164916992, 0.021485919952392577, 0.02143846321105957, 0.02142617607116699, 0.02138710403442383, 0.0214880313873291, 0.02145635223388672, 0.02131177520751953, 0.021591903686523438, 0.021612287521362305, 0.021540256500244142, 0.021728256225585937, 0.022007104873657226, 0.021329631805419923, 0.021362752914428712, 0.021719968795776368, 0.021622047424316407, 0.0223874568939209, 0.02134422492980957, 0.021354496002197267, 0.021574687957763673, 0.021354496002197267, 0.02143491172790527, 0.021227392196655273, 0.02135856056213379, 0.022089567184448242, 0.02211408042907715, 0.021369823455810545, 0.021227519989013673, 0.021321216583251954, 0.020748287200927733, 0.021513599395751953, 0.021365503311157226, 0.02151849555969238, 0.021288671493530274, 0.021310848236083986, 0.02124982452392578, 0.02133625602722168, 0.021600927352905273, 0.021356544494628905, 0.02130534362792969, 0.02168627166748047, 0.021559295654296876, 0.02152239990234375, 0.021201183319091797, 0.021141248703002928, 0.021384672164916994, 0.02137280082702637, 0.02154697608947754, 0.02151807975769043, 0.021324735641479492, 0.0215285758972168, 0.02145715141296387, 0.02119059181213379, 0.021663040161132813, 0.02120863914489746, 0.021301984786987305, 0.021282560348510744, 0.0214552001953125, 0.021605855941772462, 0.021410655975341798, 0.021880640029907226, 0.02121436882019043, 0.022067583084106446, 0.021426624298095703, 0.021388511657714843, 0.021468255996704103, 0.021379968643188477, 0.021386016845703126, 0.02149718475341797, 0.02154159927368164, 0.021467552185058594, 0.021270111083984376, 0.021350400924682617, 0.021441919326782227, 0.021404287338256837, 0.021623968124389648, 0.021328832626342772, 0.021454751968383787, 0.021506048202514647, 0.021476736068725587, 0.02133465576171875, 0.021522335052490234, 0.021220735549926758, 0.021457696914672853, 0.02118547248840332, 0.021478431701660156, 0.02141302490234375, 0.021302080154418944, 0.021368671417236328, 0.022006111145019533, 0.021515903472900392, 0.021624319076538084, 0.020828351974487305, 0.021493343353271483, 0.021075679779052735, 0.021252031326293944, 0.021082944869995117, 0.021491680145263672, 0.021733152389526368, 0.021462944030761717, 0.02137273597717285, 0.0213275203704834, 0.021441152572631836, 0.021456895828247072, 0.02124310493469238, 0.021551616668701173, 0.021335424423217772, 0.021697471618652344, 0.023775199890136718, 0.02130454444885254, 0.021468992233276366, 0.02093507194519043, 0.020655712127685546, 0.020620256423950194, 0.021012479782104493, 0.021261600494384764, 0.02134931182861328, 0.020764448165893554, 0.02077471923828125, 0.020713727951049806, 0.020520383834838868, 0.020586751937866212, 0.020471807479858398, 0.020424959182739257, 0.020696224212646483, 0.02059766387939453, 0.020545055389404297, 0.02031407928466797, 0.020310464859008788, 0.02068889617919922, 0.020719615936279297, 0.020942848205566408, 0.020472991943359376, 0.020658016204833984, 0.020974592208862306, 0.020745471954345705, 0.020716287612915038, 0.020602880477905275, 0.02065017509460449, 0.020565824508666994, 0.02090188789367676, 0.020809024810791017, 0.02078611183166504, 0.02086444854736328, 0.020752288818359374, 0.02066473579406738, 0.020699136734008788, 0.02066761589050293, 0.020910879135131837, 0.020783103942871094, 0.021818368911743165, 0.020677631378173827, 0.02065999984741211, 0.021082624435424805, 0.021257951736450197, 0.02172492790222168, 0.020821247100830078, 0.020880384445190428, 0.020645631790161132, 0.020590848922729492, 0.020590591430664062, 0.020568031311035156, 0.02059267234802246, 0.020527423858642577, 0.020491968154907225, 0.020426496505737305, 0.020510944366455078, 0.02045737648010254, 0.020605056762695313, 0.020516864776611327, 0.020414464950561522, 0.020477535247802735, 0.020420703887939453, 0.02044960021972656, 0.02029974365234375, 0.020534719467163086, 0.02074435234069824, 0.020668352127075195, 0.02059110450744629, 0.02031820869445801, 0.02043903923034668, 0.020508447647094728, 0.020527135848999022, 0.020744384765625, 0.020473791122436524, 0.020556928634643555, 0.0208035831451416, 0.02079840087890625, 0.020965599060058595, 0.021144832611083984, 0.02111747169494629, 0.021149696350097655, 0.020932928085327148, 0.020670143127441407, 0.02060310363769531, 0.02060652732849121, 0.020713695526123045, 0.020610624313354493, 0.02048988723754883, 0.02049056053161621, 0.020848960876464845, 0.020555936813354492, 0.020686784744262696, 0.021489728927612306, 0.020854207992553712, 0.02084876823425293, 0.020559968948364257, 0.020574560165405275, 0.02048409652709961, 0.020622528076171875, 0.02087436866760254, 0.02090166473388672, 0.02086697578430176, 0.020725759506225586, 0.02073798370361328, 0.020895807266235352, 0.02106502342224121, 0.020947647094726563, 0.02070944023132324, 0.02120649528503418, 0.021178335189819337, 0.020889408111572267, 0.021060447692871093, 0.02107792091369629, 0.020899456024169923, 0.021325824737548828, 0.020973760604858397, 0.02100764846801758, 0.02105027198791504, 0.020940799713134766, 0.021175615310668944, 0.02108220863342285, 0.021234272003173828, 0.02112870407104492, 0.021393632888793944, 0.021290943145751952, 0.021424480438232422, 0.021429311752319335, 0.021461952209472657, 0.02155897521972656, 0.02169068717956543, 0.022055103302001954, 0.02168608093261719, 0.021540864944458008, 0.02150099182128906, 0.021605152130126953, 0.021299455642700197, 0.021950111389160157, 0.021512447357177736, 0.021747711181640626, 0.021994943618774413, 0.0215695686340332, 0.021791263580322264, 0.021604352951049805, 0.021482688903808594, 0.021674816131591796, 0.02161164855957031, 0.021610847473144533, 0.021563135147094726, 0.021501728057861328, 0.02143699264526367, 0.0212393913269043, 0.02121353530883789, 0.021434879302978514, 0.021405696868896484, 0.021284063339233397, 0.0213590087890625, 0.021309823989868165, 0.02123980712890625, 0.021471456527709962, 0.02123139190673828, 0.02129871940612793, 0.021580255508422852, 0.02162086486816406, 0.021403871536254882, 0.021153343200683593, 0.021407007217407226, 0.021504831314086915, 0.021606399536132814, 0.021475263595581055, 0.021507551193237304, 0.021364992141723632, 0.0214835205078125, 0.02177801513671875, 0.021333951950073243, 0.02133964729309082, 0.021389280319213867, 0.021443071365356444, 0.021328384399414063, 0.021381120681762695, 0.021364255905151366, 0.02137750434875488, 0.021313119888305664, 0.021379615783691405, 0.021594207763671876, 0.022573856353759764, 0.02155392074584961, 0.021964607238769532, 0.02141788864135742, 0.021598751068115235, 0.024227840423583984, 0.02147532844543457, 0.02142963218688965, 0.021602943420410158, 0.0213668155670166, 0.021561311721801757, 0.02130963134765625, 0.02147884750366211, 0.021606624603271483, 0.02142223930358887, 0.021512351989746093, 0.021364736557006835, 0.021245792388916017, 0.021326143264770506, 0.021402336120605468, 0.02168726348876953, 0.021461151123046876, 0.021763391494750976, 0.02152707290649414, 0.021442720413208008, 0.021391199111938475, 0.02149504089355469, 0.02154572868347168, 0.021428224563598632, 0.021444671630859374, 0.02137833595275879, 0.021508447647094725, 0.02149600028991699, 0.02219343948364258, 0.022824895858764647, 0.02147011184692383, 0.021545024871826173, 0.021876672744750976, 0.021566976547241212, 0.021443071365356444, 0.021389280319213867, 0.021638240814208985, 0.021348831176757812, 0.02166819190979004, 0.021849920272827148, 0.02139686393737793, 0.0225098876953125, 0.021695104598999024, 0.02182150459289551, 0.02127257537841797, 0.021372575759887696, 0.02145961570739746, 0.021460607528686525, 0.021435680389404296, 0.021480031967163086, 0.02154105567932129, 0.021358591079711914, 0.02142790412902832, 0.02150982475280762, 0.021633663177490235, 0.021597408294677736, 0.02147212791442871, 0.02136662483215332, 0.021671743392944337, 0.021250303268432618, 0.02146918487548828, 0.021140512466430665, 0.02132476806640625, 0.02144256019592285, 0.021383167266845703, 0.021319583892822267, 0.021460639953613282, 0.021373376846313477, 0.02156342315673828, 0.021513824462890626, 0.021205568313598634, 0.021806848526000976, 0.02136684799194336, 0.02162073516845703, 0.021503583908081055, 0.021327999114990233, 0.021514623641967774, 0.0216407356262207, 0.021850496292114257, 0.02136579132080078, 0.021385759353637696, 0.021430559158325195, 0.021307552337646484, 0.021348352432250976, 0.02207257652282715, 0.02133865547180176, 0.0216856632232666, 0.022333631515502928, 0.02155174446105957, 0.021573631286621094, 0.021394527435302735, 0.0214715518951416, 0.02161520004272461, 0.021547008514404296, 0.02161267280578613, 0.021473215103149413, 0.02154694366455078, 0.021431743621826174, 0.02127097511291504, 0.021207168579101564, 0.02140355110168457, 0.021423711776733398, 0.021343936920166017, 0.02140665626525879, 0.021526432037353514, 0.021302848815917968, 0.021459199905395507, 0.021001216888427734, 0.021385215759277345, 0.02149171257019043, 0.021695520401000975, 0.02148387145996094, 0.022984895706176758, 0.021383392333984376, 0.021241472244262694, 0.02143270492553711, 0.021274368286132814, 0.021237279891967772, 0.021242111206054688, 0.02128950309753418, 0.02125446319580078, 0.021380607604980468, 0.021410144805908204, 0.021505088806152345, 0.02137343978881836, 0.021258176803588866, 0.021352960586547853, 0.02156105613708496, 0.02148944091796875, 0.021532447814941406, 0.021850847244262697, 0.021372671127319335, 0.021332223892211913, 0.022409503936767577, 0.021591487884521483, 0.02123196792602539, 0.021325183868408204, 0.02129158401489258, 0.02146713638305664, 0.02127257537841797, 0.02138252830505371, 0.02126464080810547, 0.021284543991088867, 0.021994176864624022, 0.02148316764831543, 0.021433919906616212, 0.02138995170593262, 0.02146905517578125, 0.02152272033691406, 0.021456703186035157, 0.02143244743347168, 0.02259974479675293, 0.022409183502197266, 0.022222496032714843, 0.021505760192871093, 0.02146784019470215, 0.021335744857788087, 0.021192991256713867, 0.021305376052856446, 0.021759040832519533, 0.021418912887573242, 0.021444608688354492, 0.0214936637878418, 0.021321119308471678, 0.02130400085449219, 0.021073919296264648, 0.021362464904785158, 0.021215679168701172, 0.02462211227416992, 0.021505023956298826]",tokens/s,46.836020728174546,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: BloomForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: BloomForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: BloomForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmp4ly39q4p/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: BloomForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,920.4736,2082.340864,0.0,1679.818752,1632.232448,s,1,8.5171240234375,8.5171240234375,0.0,8.5171240234375,8.5171240234375,8.5171240234375,8.5171240234375,[8.5171240234375],,kWh,3.2913518166651556e-06,3.558315727479314e-07,9.369451940011847e-07,4.584128583414272e-06,,MB,1348.104192,2099.11808,0.0,1684.013056,1430.345728,s,10,0.21164393806457518,0.02116439380645752,0.0003292233506428967,0.021089136123657228,0.02169312992095947,0.021731476497650146,0.021762153759002683,"[0.02127728080749512, 0.021046655654907227, 0.021684608459472655, 0.021052640914916994, 0.020846656799316406, 0.021273824691772462, 0.021125631332397463, 0.020745439529418944, 0.02176982307434082, 0.020821376800537108]",tokens/s,12095.787025182419,kWh,6.296137031519404e-07,6.941290204780159e-08,4.17472795663795e-07,1.116499400863537e-06,tokens/kWh,229288076.46649992,MB,1404.567552,2138.963968,0.0,1721.761792,1430.348288,s,10,12.118008911132813,1.2118008911132814,0.007968141511694971,1.2122135620117187,1.2202083618164064,1.2231542907714843,1.2255110339355468,"[1.2102088623046876, 1.215547607421875, 1.2129307861328125, 1.211496337890625, 1.2079512939453125, 1.198763916015625, 1.1995294189453125, 1.2261002197265625, 1.2159267578125, 1.2195537109375]",tokens/s,51.98873879529987,kWh,3.511343597601489e-05,3.872561781867933e-06,1.7828791801336306e-05,5.681478955921912e-05,tokens/kWh,1108866.2034791824,,s,630,12.115870748519907,0.019231540870666503,0.0003622668265235296,0.01912553596496582,0.01963649616241455,0.019827546977996827,0.020399996299743654,"[0.01935513687133789, 0.018998815536499025, 0.01894294357299805, 0.018917375564575196, 0.019390464782714844, 0.01905254364013672, 0.018964256286621094, 0.018882303237915038, 0.018835519790649412, 0.019001632690429687, 0.0188535041809082, 0.01913702392578125, 0.018958368301391602, 0.018900960922241212, 0.018847360610961914, 0.01887171173095703, 0.018969791412353516, 0.018884384155273437, 0.018956287384033203, 0.01916124725341797, 0.019221887588500977, 0.019343839645385743, 0.019306495666503908, 0.0192225284576416, 0.01915411186218262, 0.0190001277923584, 0.018950143814086915, 0.019005439758300782, 0.0189006404876709, 0.018823360443115233, 0.018849952697753906, 0.018890752792358398, 0.019076480865478515, 0.022732831954956054, 0.019106208801269533, 0.019329216003417967, 0.019779584884643556, 0.019175424575805664, 0.01904537582397461, 0.019131391525268555, 0.01913398361206055, 0.019525503158569334, 0.019489120483398438, 0.019312896728515626, 0.01910927963256836, 0.01898067283630371, 0.01910044860839844, 0.019105344772338866, 0.019222976684570313, 0.019561792373657228, 0.02077766418457031, 0.019612768173217773, 0.019283967971801756, 0.018968799591064452, 0.01903481674194336, 0.019290016174316405, 0.019347551345825196, 0.019503103256225587, 0.01943552017211914, 0.019139968872070312, 0.01908790397644043, 0.018974143981933592, 0.019089344024658204, 0.01901158332824707, 0.018986303329467772, 0.01904915237426758, 0.018944000244140623, 0.018982912063598634, 0.018987104415893553, 0.019103647232055664, 0.0190382080078125, 0.01960313606262207, 0.019740991592407227, 0.01928131294250488, 0.01924278450012207, 0.019155519485473633, 0.019349632263183595, 0.020528448104858397, 0.01956332778930664, 0.01965875244140625, 0.019709951400756837, 0.01918979263305664, 0.019230527877807616, 0.01909916877746582, 0.018981023788452147, 0.01893529510498047, 0.018888992309570314, 0.01905939292907715, 0.01907302474975586, 0.018861888885498047, 0.019040063858032228, 0.018982944488525392, 0.01920854377746582, 0.019268863677978514, 0.019162975311279296, 0.019419296264648438, 0.019077280044555663, 0.018987199783325196, 0.0191694393157959, 0.019247360229492187, 0.019673088073730468, 0.019558399200439454, 0.019647872924804688, 0.019489408493041992, 0.019169279098510742, 0.01939993667602539, 0.01938204765319824, 0.019174367904663085, 0.019616031646728517, 0.01919152069091797, 0.01949020767211914, 0.01937046432495117, 0.01943574333190918, 0.019575807571411134, 0.01940275192260742, 0.019163455963134766, 0.019079776763916017, 0.019123807907104492, 0.019136991500854492, 0.019058080673217775, 0.019166879653930664, 0.01945484733581543, 0.01942527961730957, 0.01956153678894043, 0.01974367904663086, 0.019969568252563477, 0.019799711227416993, 0.019630239486694335, 0.019593408584594726, 0.01934441566467285, 0.019090335845947267, 0.019028032302856445, 0.0190118408203125, 0.019162687301635742, 0.019044256210327147, 0.019038431167602538, 0.018992799758911133, 0.01902838325500488, 0.019064159393310548, 0.019161760330200197, 0.01905619239807129, 0.019028415679931642, 0.019017728805541992, 0.019102783203125, 0.019116992950439452, 0.019013631820678712, 0.019140607833862306, 0.019197824478149415, 0.019058143615722656, 0.01901430320739746, 0.01923072052001953, 0.019866783142089842, 0.019993440628051758, 0.019655872344970703, 0.019645248413085938, 0.019534975051879882, 0.019257408142089844, 0.019123008728027344, 0.01914854431152344, 0.019288320541381836, 0.019083263397216797, 0.019124223709106446, 0.019170848846435547, 0.019120832443237305, 0.01930563163757324, 0.019235424041748047, 0.019181600570678713, 0.019019872665405273, 0.019089311599731446, 0.019050048828125, 0.018927040100097655, 0.01885456085205078, 0.018964832305908202, 0.0190667839050293, 0.018908447265625, 0.019465024948120118, 0.019598432540893555, 0.019528608322143554, 0.01947235107421875, 0.019490848541259764, 0.019557567596435548, 0.019634208679199218, 0.019566688537597656, 0.019500799179077148, 0.019434431076049804, 0.019578687667846678, 0.019191200256347657, 0.019104543685913085, 0.019021823883056642, 0.019516223907470702, 0.01957683181762695, 0.01960655975341797, 0.019616256713867186, 0.019279840469360352, 0.019235328674316408, 0.019147872924804688, 0.01914950370788574, 0.019126495361328124, 0.019152767181396486, 0.019210304260253906, 0.02008684730529785, 0.020669952392578125, 0.019636032104492187, 0.019579103469848633, 0.019173311233520507, 0.019355199813842774, 0.019632959365844728, 0.01989852714538574, 0.01942323112487793, 0.019531679153442384, 0.01961747169494629, 0.01928835105895996, 0.019680479049682616, 0.018975648880004883, 0.01921567916870117, 0.019053247451782225, 0.01907302474975586, 0.019109888076782225, 0.019041439056396485, 0.019228927612304686, 0.01933555221557617, 0.019019392013549803, 0.018975103378295898, 0.018992799758911133, 0.018882720947265626, 0.01906630325317383, 0.018909503936767578, 0.01922115135192871, 0.019113983154296875, 0.01901692771911621, 0.0190097599029541, 0.018947967529296873, 0.01907151985168457, 0.018960479736328126, 0.018954303741455077, 0.020395519256591797, 0.019018079757690428, 0.01892572784423828, 0.018909183502197266, 0.018928768157958985, 0.019180416107177734, 0.01896259117126465, 0.018980703353881835, 0.01890822410583496, 0.01889580726623535, 0.01899519920349121, 0.018865695953369142, 0.01908691215515137, 0.019030527114868166, 0.018921632766723633, 0.018956544876098633, 0.018976320266723634, 0.019200639724731447, 0.019173088073730468, 0.01937436866760254, 0.019628032684326172, 0.019628032684326172, 0.019501056671142578, 0.019580928802490235, 0.01928099250793457, 0.019157600402832032, 0.019140928268432618, 0.01948646354675293, 0.01904665565490723, 0.019100831985473632, 0.01913327980041504, 0.01902774429321289, 0.01906265640258789, 0.019006080627441406, 0.0189333438873291, 0.019433120727539062, 0.019587711334228517, 0.01959462356567383, 0.01957267189025879, 0.01952409553527832, 0.019445791244506835, 0.01920204734802246, 0.019158655166625977, 0.019118463516235352, 0.019439456939697265, 0.01937014389038086, 0.019912288665771483, 0.01921455955505371, 0.01911008071899414, 0.019112960815429687, 0.019122207641601562, 0.01905094337463379, 0.019845312118530273, 0.01921776008605957, 0.019054880142211916, 0.01907084846496582, 0.018994016647338866, 0.018906463623046876, 0.018920095443725585, 0.018886207580566406, 0.018928064346313476, 0.018909183502197266, 0.018869760513305665, 0.018931711196899414, 0.018893312454223633, 0.018790304183959963, 0.018956064224243164, 0.018880096435546875, 0.018836191177368164, 0.018916576385498048, 0.01901804733276367, 0.019014112472534178, 0.019264799118041992, 0.01940671920776367, 0.018951007843017578, 0.01894528007507324, 0.018886655807495118, 0.018963199615478515, 0.0189881591796875, 0.019081151962280274, 0.019083263397216797, 0.018966527938842775, 0.019068895339965822, 0.01929360008239746, 0.01897724723815918, 0.019007648468017578, 0.018875808715820314, 0.01901628875732422, 0.019005439758300782, 0.018989343643188477, 0.019012351989746094, 0.018911935806274413, 0.018953664779663086, 0.018930528640747072, 0.018935136795043946, 0.01941747283935547, 0.018891040802001952, 0.018882560729980468, 0.018923519134521484, 0.018984960556030273, 0.01899929618835449, 0.01902902412414551, 0.018959327697753905, 0.019054592132568358, 0.019019775390625, 0.018980512619018553, 0.019013696670532227, 0.019019296646118164, 0.019030752182006835, 0.018958368301391602, 0.018902271270751954, 0.018953983306884765, 0.018938720703125, 0.01892572784423828, 0.01892915153503418, 0.01892572784423828, 0.018895200729370117, 0.01904547119140625, 0.01888934326171875, 0.018934272766113282, 0.018941568374633788, 0.0189050235748291, 0.01891276741027832, 0.019169984817504884, 0.01895427131652832, 0.019095552444458007, 0.019818496704101563, 0.018944000244140623, 0.019005216598510743, 0.018895072937011717, 0.018892799377441406, 0.01903561592102051, 0.019456544876098634, 0.01903139114379883, 0.019279584884643555, 0.018973087310791014, 0.01898918342590332, 0.0190611515045166, 0.0188408317565918, 0.01890342330932617, 0.019023935317993165, 0.01952751922607422, 0.01935408020019531, 0.01917795181274414, 0.019148799896240236, 0.018979839324951172, 0.018932735443115235, 0.019236703872680665, 0.019104927062988282, 0.0188852481842041, 0.018985151290893554, 0.019046592712402343, 0.01884364891052246, 0.018843072891235352, 0.01884841537475586, 0.01921014404296875, 0.018964479446411133, 0.018866111755371093, 0.018815040588378906, 0.018939903259277344, 0.019161088943481445, 0.018884288787841798, 0.018956607818603515, 0.018929664611816405, 0.0189520320892334, 0.01894211196899414, 0.018963712692260742, 0.018924415588378905, 0.01887628746032715, 0.018966432571411132, 0.018829408645629882, 0.01900339126586914, 0.018905088424682616, 0.018865535736083985, 0.018872032165527342, 0.019037088394165038, 0.01904025650024414, 0.01925119972229004, 0.019170400619506835, 0.019134944915771484, 0.01900771141052246, 0.01889302444458008, 0.019074207305908204, 0.01896294403076172, 0.019361408233642578, 0.01906934356689453, 0.01967519950866699, 0.018988927841186522, 0.019128704071044923, 0.018907136917114258, 0.018960575103759765, 0.01889587211608887, 0.018883392333984374, 0.01907302474975586, 0.01903126335144043, 0.019447744369506834, 0.019419391632080077, 0.019349632263183595, 0.019319040298461914, 0.019576576232910155, 0.019065343856811523, 0.019015167236328127, 0.018903520584106444, 0.019007232666015624, 0.018927263259887694, 0.018870880126953125, 0.01904364776611328, 0.019124576568603516, 0.019149375915527345, 0.019021823883056642, 0.019017440795898437, 0.019408512115478515, 0.0189486083984375, 0.019140127182006837, 0.01889344024658203, 0.018925567626953126, 0.01907711982727051, 0.021960704803466798, 0.01918156814575195, 0.01904640007019043, 0.019058687210083008, 0.018951423645019533, 0.01902463912963867, 0.019201631546020507, 0.019136383056640626, 0.01907766342163086, 0.019310176849365233, 0.018997600555419922, 0.019044576644897462, 0.01910153579711914, 0.018997247695922852, 0.019050336837768553, 0.019204256057739257, 0.01908448028564453, 0.019800895690917968, 0.01930409622192383, 0.019849567413330077, 0.019886112213134764, 0.01974678421020508, 0.020353248596191406, 0.01967900848388672, 0.019697664260864257, 0.019853471755981445, 0.019838783264160158, 0.020022687911987306, 0.019927743911743165, 0.01976313591003418, 0.019931135177612306, 0.020286815643310547, 0.020140703201293946, 0.019847103118896484, 0.019599424362182618, 0.01964998435974121, 0.019595775604248047, 0.01969977569580078, 0.01956172752380371, 0.019555072784423828, 0.019644416809082032, 0.01982636833190918, 0.02047007942199707, 0.019578880310058593, 0.01940060806274414, 0.01937187194824219, 0.019331327438354494, 0.01917679977416992, 0.019038976669311522, 0.019109151840209962, 0.01909756851196289, 0.01907308769226074, 0.020065984725952148, 0.01917027282714844, 0.01943756866455078, 0.01926665687561035, 0.019358495712280273, 0.019570816040039064, 0.019419103622436523, 0.01960963249206543, 0.01967103958129883, 0.019625215530395507, 0.01968204879760742, 0.019451904296875, 0.019499008178710937, 0.019449344635009767, 0.019567007064819335, 0.01938377571105957, 0.019364479064941407, 0.01965875244140625, 0.020401824951171876, 0.019116384506225586, 0.01905254364013672, 0.01906892776489258, 0.019119808197021484, 0.01904876708984375, 0.018968320846557616, 0.018909439086914063, 0.018950143814086915, 0.01887027168273926, 0.01947216033935547, 0.018872543334960936, 0.018867839813232423, 0.01912665557861328, 0.018882015228271484, 0.01891996765136719, 0.01889036750793457, 0.01888243293762207, 0.018979007720947266, 0.018909536361694335, 0.019230688095092773, 0.019617504119873046, 0.019353567123413087, 0.019220800399780275, 0.019355648040771483, 0.019187711715698243, 0.019458047866821288, 0.019563615798950194, 0.019677536010742187, 0.019548736572265624, 0.019443904876708985, 0.019480287551879884, 0.019459711074829102, 0.019274208068847658, 0.019165184020996092, 0.018990240097045898, 0.01911280059814453, 0.018941951751708985, 0.019324895858764648, 0.019826719284057617, 0.01960550308227539, 0.01938230323791504, 0.019003360748291016, 0.018907136917114258, 0.019020927429199218, 0.018956928253173827, 0.018860031127929687, 0.018923519134521484, 0.01922867202758789, 0.019189695358276367, 0.01936390495300293, 0.020175968170166016, 0.019092384338378905, 0.019513343811035155, 0.019318784713745117, 0.019380224227905272, 0.019298303604125978, 0.019388416290283202, 0.019547296524047852, 0.01956950378417969, 0.019465951919555663, 0.019179807662963868, 0.0191932487487793, 0.019186271667480468, 0.0194334716796875, 0.019558208465576172, 0.020084320068359376, 0.01933577537536621, 0.019828224182128908, 0.019587583541870117, 0.01984102439880371, 0.019354976654052735, 0.019730464935302734, 0.01950729560852051, 0.019544416427612305, 0.01928825569152832, 0.01915011215209961, 0.019020511627197267, 0.018933216094970704, 0.019204639434814454, 0.01928716850280762, 0.019782144546508788, 0.019276159286499023, 0.0191506233215332, 0.01897056007385254, 0.018979103088378906, 0.018863712310791016, 0.018968992233276367, 0.01898700714111328, 0.018921472549438476, 0.019357696533203125, 0.019458047866821288, 0.01988803291320801, 0.01946403121948242, 0.019427007675170898, 0.019644416809082032, 0.01948259162902832, 0.019546720504760744, 0.01933660888671875, 0.019273439407348634, 0.01941593551635742, 0.01942108726501465, 0.01964067268371582, 0.01953318405151367, 0.019259424209594728, 0.019208288192749022, 0.0192608642578125, 0.019288543701171876]",tokens/s,51.99791356943638,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1572, in _autoset_attn_implementation cls._check_and_enable_flash_attn_2( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1688, in _check_and_enable_flash_attn_2 raise ValueError( ValueError: BloomForCausalLM does not support Flash Attention 2.0 yet. Please request to add support where the model is hosted, on its model hub page: https://huggingface.co//tmp/tmpyzqt5fff/no_weights_model/discussions/new or in the Transformers GitHub repo: https://github.com/huggingface/transformers/issues/new ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,931.500032,2082.340864,0.0,1679.818752,1632.232448,s,1,8.818603515625,8.818603515625,0.0,8.818603515625,8.818603515625,8.818603515625,8.818603515625,[8.818603515625],,kWh,3.1427756791676606e-06,3.395175684439329e-07,1.0038896920000484e-06,4.486182939611642e-06,,MB,1230.036992,2097.020928,0.0,1684.013056,1430.345728,s,10,1.0015207977294922,0.10015207977294922,0.0023137925689742864,0.10060041427612304,0.10166705474853516,0.10268879013061523,0.1035061784362793,"[0.10371052551269531, 0.10132492828369141, 0.10040735626220704, 0.09880944061279297, 0.10104176330566406, 0.09432428741455078, 0.10049526214599609, 0.10070556640625, 0.10144000244140625, 0.09926166534423828]",tokens/s,2556.112669655661,kWh,3.294956696395134e-06,3.6327250571604124e-07,2.152932433955069e-06,5.8111616360662444e-06,tokens/kWh,44053154.262853086,MB,1279.504384,2134.769664,0.0,1721.761792,1430.348288,s,10,11.7607294921875,1.1760729492187498,0.004471154849394669,1.1754273071289063,1.1821063720703124,1.1828998046875,1.18353455078125,"[1.1785135498046875, 1.1791951904296876, 1.1836932373046876, 1.1700218505859374, 1.174586669921875, 1.1710855712890624, 1.17536328125, 1.1708487548828126, 1.1819300537109374, 1.1754913330078125]",tokens/s,53.56810565352268,kWh,3.404618432610434e-05,3.7547884522826507e-06,1.73380276032448e-05,5.513900038163178e-05,tokens/kWh,1142566.9592114498,,s,630,11.75863798713685,0.018664504741487047,0.000348685638615281,0.018567487716674803,0.019007532882690432,0.01913779077529907,0.019911779251098636,"[0.01941017532348633, 0.018733951568603517, 0.01864249610900879, 0.018741695404052735, 0.019134464263916014, 0.018908479690551757, 0.01877168083190918, 0.019035104751586915, 0.019177471160888672, 0.01892313575744629, 0.01877577590942383, 0.018561824798583985, 0.01844620704650879, 0.01844428825378418, 0.018499488830566405, 0.0184116153717041, 0.018544479370117186, 0.018432159423828125, 0.018465919494628905, 0.01846771240234375, 0.018415071487426757, 0.01846735954284668, 0.018561023712158203, 0.01847039985656738, 0.018557439804077147, 0.0185994873046875, 0.018516223907470705, 0.018620607376098632, 0.019406848907470704, 0.018763776779174804, 0.01865727996826172, 0.018654880523681642, 0.018538848876953125, 0.018589183807373046, 0.018500095367431642, 0.018488672256469725, 0.018491519927978515, 0.018477792739868163, 0.018476640701293946, 0.01847318458557129, 0.01869824028015137, 0.018542591094970702, 0.018538496017456055, 0.018589696884155273, 0.018560672760009767, 0.018532703399658203, 0.01865123176574707, 0.018632192611694336, 0.018591424942016602, 0.018619104385375975, 0.018693952560424804, 0.01885318374633789, 0.01875014305114746, 0.018687583923339843, 0.01870294380187988, 0.01894790458679199, 0.01909584045410156, 0.01891744041442871, 0.01891913604736328, 0.019115615844726562, 0.019141151428222657, 0.018855648040771486, 0.019379711151123045, 0.019722240447998047, 0.018890752792358398, 0.018556512832641602, 0.018581920623779297, 0.018556928634643553, 0.018506847381591796, 0.018481760025024413, 0.018608448028564453, 0.018561023712158203, 0.018568416595458985, 0.01981123161315918, 0.021757280349731446, 0.018745311737060545, 0.01888928031921387, 0.01861222457885742, 0.01846272087097168, 0.018491392135620118, 0.018741247177124023, 0.018579519271850586, 0.01856096076965332, 0.01851603126525879, 0.018648832321166993, 0.018441631317138673, 0.01857411193847656, 0.01920204734802246, 0.018733119964599608, 0.018555967330932618, 0.01852035140991211, 0.01840208053588867, 0.018455520629882812, 0.018455392837524415, 0.01846790313720703, 0.0184532470703125, 0.018423999786376953, 0.018468223571777343, 0.01857155227661133, 0.018557279586791993, 0.018634752273559572, 0.018925567626953126, 0.018704256057739257, 0.018694271087646486, 0.018692096710205077, 0.018681600570678712, 0.01854489517211914, 0.018690143585205078, 0.018538272857666016, 0.019563840866088866, 0.018471168518066405, 0.01837664031982422, 0.01910643196105957, 0.01845587158203125, 0.018537151336669923, 0.01843814468383789, 0.018499135971069336, 0.018714656829833986, 0.018948511123657228, 0.019033567428588867, 0.019026464462280273, 0.0187675838470459, 0.01853660774230957, 0.01843008041381836, 0.018382848739624022, 0.018466272354125977, 0.019007328033447266, 0.01872115135192871, 0.018606815338134765, 0.018533920288085936, 0.01879465675354004, 0.018526176452636718, 0.018736799240112303, 0.01860243225097656, 0.018744895935058594, 0.01868828773498535, 0.018645280838012694, 0.01887628746032715, 0.01877731132507324, 0.018831680297851563, 0.019042783737182618, 0.019097375869750976, 0.018921152114868164, 0.018749088287353517, 0.018686464309692383, 0.018678144454956056, 0.018615583419799804, 0.018622655868530274, 0.018526752471923827, 0.018530303955078126, 0.01931612777709961, 0.019398975372314452, 0.01915673637390137, 0.018616863250732422, 0.01862214469909668, 0.01873516845703125, 0.018858240127563475, 0.018855680465698244, 0.018821184158325194, 0.0186943359375, 0.01862451171875, 0.0187064323425293, 0.01904025650024414, 0.018897983551025392, 0.019089727401733397, 0.019140512466430663, 0.018823904037475588, 0.019869695663452147, 0.01848854446411133, 0.018417535781860353, 0.018477983474731445, 0.018378751754760742, 0.01845782470703125, 0.01843280029296875, 0.01848476791381836, 0.018354656219482422, 0.018350080490112306, 0.018599199295043944, 0.018835519790649412, 0.022844064712524415, 0.018734720230102538, 0.01858531188964844, 0.01859446334838867, 0.018493471145629884, 0.018423776626586914, 0.018517568588256837, 0.01838252830505371, 0.01838755226135254, 0.018429279327392578, 0.019640352249145506, 0.01873302459716797, 0.018601247787475586, 0.018546880722045897, 0.018481695175170898, 0.01844223976135254, 0.018480127334594726, 0.018437120437622072, 0.01871401596069336, 0.01859552001953125, 0.01859676742553711, 0.01880268859863281, 0.018521600723266602, 0.018462944030761718, 0.01869968032836914, 0.018561311721801758, 0.018446367263793947, 0.01847699165344238, 0.01884569549560547, 0.01883750343322754, 0.018710304260253906, 0.01861452865600586, 0.018506336212158202, 0.01838489532470703, 0.018391040802001952, 0.018337087631225588, 0.018470848083496094, 0.018467584609985353, 0.01846681594848633, 0.018663135528564453, 0.01851651191711426, 0.01850137519836426, 0.018482175827026368, 0.018397344589233398, 0.018408287048339845, 0.01844223976135254, 0.01839276885986328, 0.018510143280029298, 0.018464736938476563, 0.01854470443725586, 0.018544479370117186, 0.01896460723876953, 0.018972671508789063, 0.018735103607177735, 0.01884160041809082, 0.01887984085083008, 0.018639520645141603, 0.01848281669616699, 0.018378463745117188, 0.0184736328125, 0.01838604736328125, 0.01842265510559082, 0.01840438461303711, 0.01848419189453125, 0.018518016815185546, 0.01845043182373047, 0.01847657585144043, 0.018512351989746094, 0.01846019172668457, 0.018399711608886718, 0.018476959228515624, 0.0184116153717041, 0.018900287628173827, 0.019017663955688477, 0.01872863960266113, 0.01903152084350586, 0.01896451187133789, 0.01901145553588867, 0.018904031753540038, 0.01864908790588379, 0.018560192108154298, 0.01869647979736328, 0.018810752868652345, 0.018618080139160158, 0.01857753562927246, 0.01852704048156738, 0.01875299263000488, 0.01885238456726074, 0.01880396842956543, 0.0186397762298584, 0.018609376907348634, 0.01850636863708496, 0.018443456649780275, 0.018506559371948242, 0.01845248031616211, 0.0184770565032959, 0.01842995262145996, 0.01851116752624512, 0.018905792236328125, 0.019076576232910158, 0.018930208206176757, 0.01887027168273926, 0.018704448699951172, 0.018751520156860352, 0.018516063690185547, 0.018486272811889647, 0.018508607864379883, 0.018534400939941405, 0.01852390480041504, 0.018449888229370118, 0.01843231964111328, 0.01855062484741211, 0.01848179244995117, 0.01846220779418945, 0.018436384201049805, 0.018499391555786133, 0.018497215270996094, 0.018431711196899413, 0.01838703918457031, 0.01859391975402832, 0.01895043182373047, 0.018864639282226564, 0.018837215423583985, 0.018692256927490235, 0.01851753616333008, 0.01844883155822754, 0.01845199966430664, 0.018432575225830077, 0.018396703720092774, 0.01908176040649414, 0.018669248580932617, 0.01864860725402832, 0.01858585548400879, 0.018594335556030274, 0.01861417579650879, 0.01845667266845703, 0.01858585548400879, 0.018558944702148437, 0.018644351959228516, 0.018735040664672853, 0.018526016235351564, 0.018498464584350584, 0.018520063400268554, 0.01856716728210449, 0.018791584014892577, 0.01898556709289551, 0.019105152130126955, 0.01889164733886719, 0.02038979148864746, 0.01991894340515137, 0.018618368148803712, 0.018560224533081055, 0.018729375839233398, 0.018616704940795897, 0.01881292724609375, 0.018535711288452147, 0.018647775650024415, 0.018581119537353516, 0.018509664535522462, 0.018526111602783203, 0.018552928924560546, 0.01838467216491699, 0.01848931121826172, 0.018504480361938476, 0.018515968322753908, 0.018575136184692382, 0.018635103225708008, 0.01844806480407715, 0.018516159057617186, 0.01844223976135254, 0.018374656677246092, 0.018410751342773438, 0.018580032348632813, 0.018473152160644532, 0.018485248565673826, 0.01861155128479004, 0.018457248687744142, 0.018655168533325196, 0.018505599975585936, 0.018411712646484377, 0.018431072235107423, 0.018484128952026366, 0.01858937644958496, 0.018506048202514648, 0.01847500801086426, 0.018350080490112306, 0.0184050235748291, 0.0184303035736084, 0.018358144760131836, 0.018436416625976563, 0.018412511825561525, 0.018420576095581054, 0.018515392303466795, 0.018360895156860353, 0.018408863067626954, 0.018305631637573243, 0.018319168090820313, 0.018401344299316405, 0.0183768310546875, 0.01946828842163086, 0.018866207122802733, 0.018605951309204102, 0.018642080307006835, 0.018678112030029295, 0.018659488677978515, 0.01840096092224121, 0.018995647430419923, 0.01874937629699707, 0.01852387237548828, 0.018514463424682617, 0.018426015853881837, 0.01839308738708496, 0.01845043182373047, 0.0184136962890625, 0.018368383407592774, 0.018456575393676757, 0.01835740852355957, 0.018451295852661132, 0.01903001594543457, 0.01861612892150879, 0.018352319717407226, 0.018421760559082033, 0.018372608184814454, 0.01846681594848633, 0.018431999206542968, 0.018564767837524414, 0.01858108711242676, 0.018456607818603515, 0.01854128074645996, 0.018571264266967775, 0.01860403251647949, 0.01849772834777832, 0.019236671447753907, 0.01939455986022949, 0.018526208877563476, 0.018372127532958984, 0.0183853759765625, 0.01840889549255371, 0.01831769561767578, 0.01841107177734375, 0.018407072067260742, 0.018525152206420897, 0.018497535705566406, 0.018415615081787108, 0.019092832565307617, 0.019561023712158204, 0.018935903549194336, 0.018910816192626953, 0.018944511413574217, 0.01896623992919922, 0.020142303466796876, 0.0188002872467041, 0.01865760040283203, 0.018550783157348632, 0.018720224380493165, 0.018530847549438477, 0.01861222457885742, 0.01861542320251465, 0.018740095138549805, 0.018503488540649413, 0.018530719757080077, 0.018519840240478515, 0.018833824157714844, 0.01864255905151367, 0.018659711837768555, 0.018509824752807616, 0.01978691291809082, 0.018512447357177733, 0.01859820747375488, 0.018563039779663087, 0.018707839965820313, 0.018582143783569337, 0.018570720672607424, 0.018623008728027343, 0.018978496551513672, 0.01848556709289551, 0.01855881690979004, 0.018706592559814453, 0.018683904647827147, 0.01859993553161621, 0.01857535934448242, 0.018524160385131837, 0.018528255462646484, 0.018540544509887694, 0.018505632400512697, 0.01851811218261719, 0.018515968322753908, 0.018492448806762696, 0.018586559295654295, 0.018525632858276367, 0.018416223526000978, 0.018441984176635742, 0.01835647964477539, 0.018397184371948243, 0.018452255249023438, 0.01838319969177246, 0.018401151657104493, 0.018479103088378905, 0.01842790412902832, 0.018547807693481445, 0.01858243179321289, 0.018486495971679687, 0.018633375167846678, 0.018522239685058593, 0.018696191787719727, 0.018630527496337892, 0.01855196762084961, 0.01851491165161133, 0.018480512619018556, 0.018522367477416993, 0.018481088638305665, 0.018362335205078125, 0.01853225517272949, 0.019228544235229492, 0.018514623641967775, 0.018702335357666015, 0.018531744003295898, 0.018608736038208007, 0.018514976501464844, 0.018611167907714842, 0.018609567642211913, 0.018521728515625, 0.018635679244995117, 0.018407487869262697, 0.018544639587402344, 0.01989423942565918, 0.018884607315063476, 0.01883545684814453, 0.019116031646728517, 0.01902207946777344, 0.018807584762573243, 0.01872150421142578, 0.018632511138916015, 0.019064640045166014, 0.018621055603027344, 0.018687007904052734, 0.01854342460632324, 0.018614431381225587, 0.018724447250366212, 0.018595584869384764, 0.019641056060791015, 0.01920518493652344, 0.018561664581298827, 0.018868480682373047, 0.01864089584350586, 0.01863884735107422, 0.01884115219116211, 0.018600383758544923, 0.018521951675415038, 0.01847452735900879, 0.018497888565063476, 0.020307903289794923, 0.018577760696411132, 0.018539968490600585, 0.018617151260375976, 0.018616064071655274, 0.018522144317626953, 0.018584768295288087, 0.0190511360168457, 0.01895849609375, 0.019009376525878908, 0.018781728744506836, 0.01907571220397949, 0.01865673637390137, 0.018584096908569336, 0.01847436714172363, 0.018526432037353515, 0.018608095169067383, 0.018583232879638673, 0.01860416030883789, 0.01856780815124512, 0.018520063400268554, 0.018532352447509767, 0.01849884796142578, 0.018522144317626953, 0.018668224334716797, 0.01903206443786621, 0.018529312133789062, 0.01867670440673828, 0.01866316795349121, 0.01858380889892578, 0.01873052787780762, 0.018599775314331053, 0.018549375534057617, 0.01889446449279785, 0.018976736068725585, 0.018594207763671874, 0.018651136398315428, 0.019579168319702148, 0.018622592926025392, 0.018583423614501954, 0.018486495971679687, 0.018530527114868165, 0.018503488540649413, 0.018560831069946288, 0.01873094367980957, 0.01845267105102539, 0.018631488800048827, 0.0186429443359375, 0.019005439758300782, 0.018560224533081055, 0.01852227210998535, 0.01855961608886719, 0.018626560211181642, 0.01846249580383301, 0.018445663452148438, 0.018493919372558593, 0.018425344467163086, 0.018494367599487305, 0.018404767990112304, 0.018530912399291992, 0.01845984077453613, 0.018479936599731444, 0.018491071701049806, 0.018465087890625, 0.018722816467285155, 0.018778112411499022, 0.018733055114746093, 0.018790399551391602, 0.01901296043395996, 0.019065536499023438, 0.019032032012939452, 0.018889888763427735, 0.020067167282104493, 0.01883955192565918, 0.01868387222290039, 0.018611743927001954, 0.01863257598876953, 0.018549375534057617, 0.018566688537597655, 0.018651615142822264, 0.018716672897338867, 0.01902592086791992, 0.01902707290649414, 0.01879532814025879, 0.01854038429260254, 0.018499807357788087, 0.018487295150756835, 0.018545888900756837, 0.018589664459228515, 0.018498367309570312, 0.018493440628051756, 0.01862246322631836, 0.018522111892700196, 0.018532352447509767, 0.01849555206298828, 0.018470367431640624, 0.018518720626831055, 0.018516864776611328, 0.018541248321533203, 0.01847318458557129]",tokens/s,53.57763379476243,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,bloom,bigscience/bloomz-560m,bigscience/bloomz-560m,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,934.838272,3667.787776,0.0,3265.265664,3264.463872,s,1,8.93175390625,8.93175390625,0.0,8.93175390625,8.93175390625,8.93175390625,8.93175390625,[8.93175390625],,kWh,3.1321823458332196e-06,3.3851274842275133e-07,1.4244455840001224e-06,4.895140678256093e-06,,MB,1243.398144,3684.564992,0.0,3271.55712,2839.956992,s,10,0.7659250259399415,0.07659250259399414,0.003076815300833982,0.07710174560546876,0.0788837158203125,0.07982680282592773,0.08058127243041993,"[0.08076988983154297, 0.07727120208740235, 0.07633779144287109, 0.07796460723876954, 0.07693228912353516, 0.07655712127685547, 0.07828675079345702, 0.07447676849365234, 0.07867414093017579, 0.06865446472167969]",tokens/s,3342.363695269487,kWh,2.6093500054687508e-06,2.8776418896290597e-07,1.7327444417499934e-06,4.62985863618165e-06,tokens/kWh,55293264.895692155,MB,1278.881792,3684.564992,0.0,3271.55712,2839.959552,s,10,12.405380981445312,1.2405380981445313,0.008799534108729933,1.24261279296875,1.2497301025390626,1.2524981079101563,1.2547125122070313,"[1.2472626953125, 1.232357177734375, 1.231828857421875, 1.226093017578125, 1.2428951416015626, 1.25526611328125, 1.2423304443359375, 1.233018310546875, 1.2452142333984375, 1.249114990234375]",tokens/s,50.784413710654185,kWh,3.55982921974478e-05,3.9260780819532144e-06,2.0262134265250058e-05,5.9786504544651074e-05,tokens/kWh,1053749.5121988433,,s,630,12.403163591384862,0.01968756125616649,0.00039919487537708014,0.019606127738952636,0.020029693412780762,0.020156766986846923,0.02090771106719971,"[0.02091427230834961, 0.020592256546020506, 0.020162080764770506, 0.019904767990112305, 0.01979248046875, 0.01971513557434082, 0.019659711837768556, 0.019647615432739258, 0.019533824920654298, 0.019499679565429688, 0.01954159927368164, 0.019612287521362303, 0.019582975387573243, 0.019533824920654298, 0.019526880264282228, 0.019524383544921874, 0.019499008178710937, 0.019533824920654298, 0.01960675239562988, 0.019581247329711914, 0.01956092834472656, 0.019515392303466796, 0.01960313606262207, 0.019589439392089843, 0.01956643295288086, 0.01959337615966797, 0.019594528198242187, 0.01966080093383789, 0.01974550437927246, 0.01958073616027832, 0.020684480667114258, 0.019675647735595703, 0.01970582389831543, 0.019802143096923828, 0.019607551574707033, 0.019652511596679686, 0.020240480422973633, 0.020004480361938477, 0.01979635238647461, 0.01983692741394043, 0.019893888473510743, 0.01975859260559082, 0.020482879638671875, 0.021190240859985353, 0.020058591842651366, 0.019954719543457032, 0.019833280563354493, 0.0198067512512207, 0.019781631469726564, 0.01962188720703125, 0.019612960815429688, 0.019953920364379884, 0.019730560302734373, 0.019784128189086914, 0.0196648006439209, 0.019693439483642577, 0.019607744216918944, 0.019808160781860353, 0.020088863372802735, 0.019793216705322265, 0.02003785514831543, 0.019448192596435546, 0.019441343307495116, 0.019732479095458985, 0.019559680938720705, 0.019309471130371094, 0.01934115219116211, 0.019290111541748048, 0.01925939178466797, 0.019399744033813476, 0.01931999969482422, 0.019230464935302734, 0.019369119644165038, 0.019401023864746094, 0.019458591461181642, 0.019298303604125978, 0.019295616149902342, 0.019365791320800782, 0.019305183410644532, 0.019363168716430665, 0.019401376724243163, 0.019364992141723634, 0.019284704208374023, 0.019292320251464844, 0.019286016464233398, 0.019412992477416992, 0.01930575942993164, 0.019252223968505858, 0.019326688766479493, 0.019269216537475587, 0.019460735321044923, 0.019295488357543945, 0.019403295516967775, 0.019619840621948242, 0.019928640365600585, 0.019519968032836912, 0.019349472045898437, 0.01944166374206543, 0.019320287704467774, 0.01930681610107422, 0.01928009605407715, 0.019212287902832033, 0.01929007911682129, 0.019298336029052735, 0.019369056701660156, 0.019959840774536133, 0.020810047149658204, 0.020058048248291015, 0.020058752059936524, 0.0198371524810791, 0.019979263305664064, 0.019960607528686523, 0.02004787254333496, 0.01992697525024414, 0.01966819190979004, 0.019716960906982422, 0.01978924751281738, 0.019976768493652344, 0.0198852481842041, 0.019800159454345705, 0.019868383407592772, 0.020059455871582033, 0.019716800689697264, 0.019582624435424804, 0.019837343215942382, 0.019987968444824217, 0.0206561279296875, 0.01985740852355957, 0.0193756160736084, 0.0193287353515625, 0.01939740753173828, 0.019368255615234375, 0.01973580741882324, 0.019478271484375, 0.019617727279663086, 0.019417856216430666, 0.019310592651367187, 0.019418880462646483, 0.01940915107727051, 0.019355648040771483, 0.019315935134887694, 0.019447839736938477, 0.01948748779296875, 0.01937926483154297, 0.019416000366210936, 0.020026432037353516, 0.019907520294189452, 0.01997209548950195, 0.01989366340637207, 0.019919456481933592, 0.019848640441894532, 0.0197956485748291, 0.01993756866455078, 0.01974127960205078, 0.019556352615356445, 0.019812351226806642, 0.019458047866821288, 0.019437023162841797, 0.019582496643066407, 0.02025984001159668, 0.0196378231048584, 0.019685823440551756, 0.019637632369995116, 0.019455711364746095, 0.019387296676635742, 0.019409984588623048, 0.01930473518371582, 0.019395231246948242, 0.019417087554931642, 0.019363008499145507, 0.01940768051147461, 0.019498432159423828, 0.019757631301879883, 0.01951945686340332, 0.019314720153808595, 0.019326623916625978, 0.01941744041442871, 0.01927987289428711, 0.019391904830932616, 0.019355775833129883, 0.019388896942138672, 0.019242591857910156, 0.019765695571899413, 0.019320671081542968, 0.01926976013183594, 0.01941094398498535, 0.01948591995239258, 0.01944246482849121, 0.01940025520324707, 0.02029363250732422, 0.0196727352142334, 0.019478879928588867, 0.019345407485961915, 0.019523584365844726, 0.019801599502563477, 0.01974025535583496, 0.019489696502685547, 0.019377151489257814, 0.019283168792724608, 0.019274816513061524, 0.01929903984069824, 0.019322240829467773, 0.0191778564453125, 0.019314815521240234, 0.019339391708374024, 0.01944393539428711, 0.019528703689575197, 0.019704191207885743, 0.019446016311645508, 0.019429407119750976, 0.019396671295166014, 0.019434719085693358, 0.019302816390991212, 0.019298751831054686, 0.019286048889160155, 0.019224544525146485, 0.019289215087890624, 0.019335296630859374, 0.019339136123657227, 0.019323808670043945, 0.019426719665527344, 0.019407295227050782, 0.019453216552734375, 0.019436384201049806, 0.019450944900512697, 0.019395519256591796, 0.019382272720336914, 0.01939164733886719, 0.01953468894958496, 0.01942732810974121, 0.019487808227539063, 0.019465152740478515, 0.019505151748657225, 0.019533151626586913, 0.019769855499267578, 0.020172927856445314, 0.0199005126953125, 0.019632064819335937, 0.01946009635925293, 0.01960550308227539, 0.019341312408447265, 0.01935270309448242, 0.019409791946411133, 0.01942527961730957, 0.01943552017211914, 0.019353471755981445, 0.019373344421386718, 0.01932169532775879, 0.019383359909057617, 0.01936787223815918, 0.01934832000732422, 0.019428543090820313, 0.02054089546203613, 0.01983558464050293, 0.019795967102050782, 0.01983283233642578, 0.01967830467224121, 0.020019712448120116, 0.0198701114654541, 0.019634208679199218, 0.019591360092163085, 0.01960300827026367, 0.019497055053710938, 0.019427295684814452, 0.019440959930419922, 0.019413856506347655, 0.019391935348510744, 0.01941721534729004, 0.019816896438598634, 0.019662527084350585, 0.019490720748901368, 0.019527103424072264, 0.01969660758972168, 0.019537919998168944, 0.01944576072692871, 0.019687423706054686, 0.01960140800476074, 0.01948467254638672, 0.019535871505737306, 0.01945599937438965, 0.01959321594238281, 0.019656095504760742, 0.01978972816467285, 0.01978438377380371, 0.0197425594329834, 0.019590368270874025, 0.019804832458496093, 0.019624223709106447, 0.01960313606262207, 0.019601728439331053, 0.01942732810974121, 0.01941641616821289, 0.019608224868774414, 0.019572736740112305, 0.019574783325195313, 0.019543136596679687, 0.019737503051757813, 0.01996112060546875, 0.020175807952880858, 0.019911935806274414, 0.019866144180297852, 0.01976896095275879, 0.01997452735900879, 0.022398591995239258, 0.020131519317626953, 0.019907264709472655, 0.019736480712890626, 0.01968137550354004, 0.019787519454956055, 0.020285247802734375, 0.019519935607910155, 0.019533824920654298, 0.019399904251098634, 0.01945382308959961, 0.01959619140625, 0.021041088104248047, 0.019963935852050783, 0.019802143096923828, 0.01980745506286621, 0.01984592056274414, 0.019805728912353517, 0.019777408599853517, 0.019614303588867187, 0.01964236831665039, 0.019735647201538087, 0.01978633689880371, 0.019750207901000977, 0.019655616760253906, 0.01982467269897461, 0.02003539276123047, 0.019834688186645508, 0.020285951614379884, 0.02004947280883789, 0.019798368453979493, 0.01968332862854004, 0.01965875244140625, 0.019772832870483398, 0.019503711700439453, 0.019552255630493166, 0.019312255859375, 0.01935807991027832, 0.019901567459106446, 0.019843360900878907, 0.019535648345947267, 0.019450687408447267, 0.019670783996582033, 0.019714303970336914, 0.019566591262817384, 0.019508607864379884, 0.019648448944091797, 0.01979433631896973, 0.019869728088378905, 0.019937536239624024, 0.01990220832824707, 0.01998054313659668, 0.019893695831298828, 0.02003139114379883, 0.0199003849029541, 0.01986777687072754, 0.019771968841552735, 0.020077983856201173, 0.02014064025878906, 0.020109312057495117, 0.0199554557800293, 0.01977120018005371, 0.019703392028808595, 0.020310880661010743, 0.019748863220214845, 0.019732479095458985, 0.019672447204589844, 0.019658784866333007, 0.020288095474243165, 0.019732160568237303, 0.020015424728393554, 0.019976192474365235, 0.019853311538696287, 0.020624927520751953, 0.02490006446838379, 0.020600831985473633, 0.02207744026184082, 0.020891647338867187, 0.02006425666809082, 0.01988412857055664, 0.019826591491699217, 0.01969152069091797, 0.01960140800476074, 0.019568639755249022, 0.019406528472900392, 0.019349824905395507, 0.019380224227905272, 0.019395872116088866, 0.019731168746948243, 0.019529472351074217, 0.019761375427246094, 0.01998441505432129, 0.02008678436279297, 0.019822399139404298, 0.01989651107788086, 0.019801536560058595, 0.01979395294189453, 0.01995008087158203, 0.01982467269897461, 0.0198756160736084, 0.01985763168334961, 0.020008512496948242, 0.019966400146484375, 0.019763200759887696, 0.01988380813598633, 0.01992252731323242, 0.019825279235839845, 0.01999571228027344, 0.020014015197753907, 0.019908607482910155, 0.01965452766418457, 0.0194520320892334, 0.019389568328857423, 0.01945235252380371, 0.019456447601318358, 0.019357311248779298, 0.01941516876220703, 0.019359743118286133, 0.01966441535949707, 0.019483583450317383, 0.01941100883483887, 0.019595071792602538, 0.019447776794433595, 0.019427263259887695, 0.019475648880004883, 0.019538719177246092, 0.01941302490234375, 0.019496320724487304, 0.01946224021911621, 0.019563039779663085, 0.019475872039794923, 0.019566783905029295, 0.019437984466552736, 0.01946214485168457, 0.019410688400268553, 0.019427007675170898, 0.01945452880859375, 0.01944758415222168, 0.020393856048583986, 0.019679487228393553, 0.019578880310058593, 0.019520864486694336, 0.019520160675048828, 0.01949286460876465, 0.019678848266601563, 0.01956492805480957, 0.01957472038269043, 0.019371456146240234, 0.0194463996887207, 0.019416223526000975, 0.01955241584777832, 0.019513280868530273, 0.01953219223022461, 0.019437919616699217, 0.019525375366210938, 0.019507040023803712, 0.01955673599243164, 0.01973865509033203, 0.01972380828857422, 0.019466720581054687, 0.019557695388793945, 0.01946259117126465, 0.019478784561157227, 0.019957311630249025, 0.01947488021850586, 0.019537919998168944, 0.01978495979309082, 0.019710720062255858, 0.019715967178344725, 0.019841152191162108, 0.019646368026733398, 0.019654783248901367, 0.01953993606567383, 0.019517440795898438, 0.01981439971923828, 0.019471839904785158, 0.019462688446044922, 0.019471935272216797, 0.019569087982177734, 0.019438751220703127, 0.01947257614135742, 0.019489055633544923, 0.01945619201660156, 0.01949305534362793, 0.01948057556152344, 0.01952067184448242, 0.019708480834960938, 0.019575071334838868, 0.019594432830810547, 0.01961657524108887, 0.0194703369140625, 0.019533824920654298, 0.019570688247680663, 0.019486719131469727, 0.01944780731201172, 0.019351551055908203, 0.019548160552978516, 0.019568639755249022, 0.01946419143676758, 0.01955401611328125, 0.019493152618408203, 0.02051919937133789, 0.019689472198486328, 0.01944166374206543, 0.019513343811035155, 0.019928735733032228, 0.019830976486206055, 0.01996134376525879, 0.019933055877685547, 0.019710432052612303, 0.019614015579223633, 0.019550207138061524, 0.019502527236938478, 0.019546112060546874, 0.019329151153564452, 0.019470272064208986, 0.019436031341552733, 0.01949286460876465, 0.019805248260498048, 0.02254697608947754, 0.019572288513183593, 0.01950601577758789, 0.01940015983581543, 0.01960982322692871, 0.019609920501708983, 0.01958502388000488, 0.01954207992553711, 0.019590623855590822, 0.019476224899291992, 0.01955449676513672, 0.019353408813476563, 0.019388256072998048, 0.019458431243896485, 0.019749055862426756, 0.01949430465698242, 0.01980624008178711, 0.01966556739807129, 0.01967043113708496, 0.01958585548400879, 0.019547487258911134, 0.01970243263244629, 0.019677024841308594, 0.019666528701782225, 0.01963270378112793, 0.019558176040649414, 0.020355295181274415, 0.02014556884765625, 0.02004617691040039, 0.02004812812805176, 0.020096128463745117, 0.020136831283569336, 0.020131839752197265, 0.020029247283935545, 0.019852991104125976, 0.019874303817749024, 0.019722240447998047, 0.020036672592163084, 0.01997724723815918, 0.01964227294921875, 0.01964851188659668, 0.019908479690551758, 0.01977356719970703, 0.01970128059387207, 0.019653087615966798, 0.020076063156127928, 0.01970652770996094, 0.019598560333251955, 0.019724639892578125, 0.019967967987060548, 0.01958140754699707, 0.019839328765869142, 0.020416000366210937, 0.020230304718017577, 0.019928287506103516, 0.01979267120361328, 0.019886079788208007, 0.020150272369384766, 0.019918848037719726, 0.0199105281829834, 0.019859519958496094, 0.020029504776000975, 0.020028608322143555, 0.020025888442993165, 0.0199703369140625, 0.020107263565063475, 0.02005958366394043, 0.020104927062988283, 0.02006831932067871, 0.019762048721313475, 0.019937280654907227, 0.019860544204711915, 0.019753759384155273, 0.019812511444091796, 0.019623647689819335, 0.019521823883056642, 0.019736032485961914, 0.01964291191101074, 0.01986764717102051, 0.01986742401123047, 0.019815776824951174, 0.019780479431152343, 0.019823776245117188, 0.01976419258117676, 0.019871616363525392, 0.02001257514953613, 0.0198493766784668, 0.020006784439086912, 0.020053728103637695, 0.020167392730712892, 0.020043935775756836, 0.020023136138916015, 0.019734527587890623, 0.019518688201904298, 0.01952137565612793, 0.01955936050415039, 0.019599552154541015, 0.019500864028930663, 0.019761152267456054, 0.019556352615356445, 0.01946441650390625, 0.019586816787719726, 0.01955023956298828, 0.01947654342651367, 0.01957881546020508, 0.01965056037902832, 0.019738624572753907, 0.019531839370727538]",tokens/s,50.79349275354165,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,910.90944,8768.06144,0.0,8365.539328,8230.228992,s,1,7.4174423828125,7.4174423828125,0.0,7.4174423828125,7.4174423828125,7.4174423828125,7.4174423828125,[7.4174423828125],,kWh,6.817816600005243e-06,7.23158748290547e-07,4.023336552000378e-06,1.1564311900296168e-05,,MB,1279.541248,8958.902272,0.0,8545.8944,8499.29728,s,10,2.4720732727050785,0.24720732727050784,0.010111952592268455,0.24897695922851562,0.25334713134765624,0.25358255462646484,0.25377089324951174,"[0.2176829833984375, 0.24699810791015625, 0.25329481506347656, 0.24830975341796874, 0.24923680114746094, 0.25324761962890624, 0.25381797790527344, 0.2487171173095703, 0.25207891845703123, 0.24868917846679686]",tokens/s,1035.5680101661012,kWh,6.686075579166368e-06,7.369100265720037e-07,4.45129775295425e-06,1.1874283358692623e-05,tokens/kWh,21559195.80718057,MB,1285.353472,8960.999424,0.0,8547.991552,8499.29984,s,10,18.74894958496094,1.8748949584960939,0.002637864965034121,1.8735967407226561,1.87883134765625,1.8791391845703125,1.8793854541015624,"[1.87133251953125, 1.8733330078125, 1.872854248046875, 1.8728079833984375, 1.8738604736328126, 1.873304931640625, 1.876041015625, 1.879447021484375, 1.8772054443359374, 1.878762939453125]",tokens/s,33.60188244920882,kWh,5.569641842625584e-05,6.141711014519738e-06,3.682104082044615e-05,9.865917026122174e-05,tokens/kWh,638562.0295933335,,s,630,18.746733732223497,0.02975672020987859,0.0003244896838928443,0.029706720352172852,0.02992302722930908,0.03002490882873535,0.031899384193420414,"[0.03172675132751465, 0.030295040130615233, 0.029833215713500977, 0.02954150390625, 0.02947491264343262, 0.029461280822753906, 0.029455839157104494, 0.029479263305664062, 0.029418912887573243, 0.02949724769592285, 0.029604736328125, 0.029464576721191408, 0.02953011131286621, 0.02957107162475586, 0.029515199661254883, 0.029528640747070314, 0.02946409606933594, 0.02948963165283203, 0.029542400360107423, 0.029491199493408202, 0.029497087478637694, 0.029573375701904298, 0.029585407257080077, 0.029631711959838866, 0.029573183059692382, 0.029606624603271483, 0.029626176834106444, 0.02959152030944824, 0.029622720718383788, 0.029622047424316407, 0.029597536087036132, 0.029691776275634765, 0.029839391708374022, 0.0299051513671875, 0.029849599838256836, 0.029857791900634766, 0.029865983963012696, 0.029747200012207032, 0.02970419120788574, 0.029692991256713867, 0.029727680206298828, 0.029761472702026368, 0.029712448120117186, 0.029671424865722655, 0.02977791976928711, 0.029685760498046877, 0.02971238327026367, 0.029659135818481445, 0.029615999221801758, 0.029662368774414062, 0.029640703201293944, 0.029694944381713866, 0.0300926399230957, 0.0296409912109375, 0.029668800354003905, 0.029682624816894532, 0.029755392074584962, 0.02969913673400879, 0.029974464416503907, 0.029703168869018554, 0.02980659294128418, 0.02982419204711914, 0.02988703918457031, 0.031983583450317384, 0.03050428771972656, 0.029844127655029296, 0.029600799560546873, 0.029506528854370117, 0.029538463592529297, 0.02945996856689453, 0.029437984466552734, 0.029528383255004884, 0.029533344268798827, 0.02953868865966797, 0.02953878402709961, 0.02960383987426758, 0.02961756706237793, 0.029594207763671877, 0.029478303909301756, 0.02951433563232422, 0.02954806327819824, 0.029589439392089845, 0.029526527404785157, 0.02950147247314453, 0.02957107162475586, 0.029538368225097655, 0.029603712081909178, 0.029605663299560547, 0.029611967086791993, 0.029638912200927733, 0.029679359436035155, 0.02970044708251953, 0.02969599914550781, 0.02969772720336914, 0.029714752197265625, 0.02986969566345215, 0.029962623596191406, 0.029894655227661132, 0.029877920150756836, 0.029763744354248046, 0.029800640106201173, 0.02977791976928711, 0.029716480255126954, 0.029638656616210936, 0.029702144622802733, 0.029765024185180664, 0.02973107147216797, 0.02975164794921875, 0.029691904067993165, 0.029718528747558592, 0.029726463317871092, 0.030217920303344727, 0.02974131202697754, 0.029736448287963867, 0.029739839553833008, 0.029662559509277344, 0.029698720932006838, 0.02968073654174805, 0.029674400329589845, 0.029650239944458007, 0.029780128479003905, 0.02982352066040039, 0.029755392074584962, 0.02977305603027344, 0.029846271514892577, 0.029904287338256837, 0.032004096984863284, 0.030353408813476562, 0.029860992431640625, 0.029588064193725585, 0.029492992401123047, 0.029506080627441405, 0.02951340866088867, 0.02953011131286621, 0.029512319564819336, 0.029531360626220703, 0.029533952713012696, 0.029518144607543945, 0.02946268844604492, 0.029750783920288085, 0.02956915283203125, 0.029545024871826173, 0.029552703857421876, 0.029549856185913086, 0.029539039611816406, 0.029607168197631838, 0.02955897521972656, 0.029534784317016602, 0.029700096130371095, 0.02960972785949707, 0.02968191909790039, 0.029616128921508788, 0.02960588836669922, 0.030097408294677733, 0.029675167083740236, 0.029638975143432618, 0.02973075294494629, 0.029831071853637696, 0.029873407363891602, 0.029936479568481444, 0.02985379219055176, 0.029833215713500977, 0.029703935623168944, 0.029746591567993166, 0.029762399673461913, 0.029667327880859375, 0.029712095260620117, 0.02967171287536621, 0.029681407928466796, 0.029690080642700196, 0.02967571258544922, 0.02969379234313965, 0.029671424865722655, 0.029661184310913087, 0.0296279354095459, 0.02966371154785156, 0.029628416061401368, 0.029644256591796876, 0.029714271545410155, 0.02969260787963867, 0.02976358413696289, 0.02975689506530762, 0.029743040084838867, 0.02970684814453125, 0.02973695945739746, 0.02979020881652832, 0.029829120635986327, 0.029848800659179688, 0.029858623504638672, 0.03194675254821777, 0.030357440948486327, 0.029797632217407225, 0.029629087448120116, 0.029472896575927734, 0.02949123191833496, 0.029439552307128906, 0.029515392303466798, 0.02950339126586914, 0.029543327331542968, 0.029529407501220704, 0.02953696060180664, 0.029519872665405275, 0.02958131217956543, 0.029533504486083984, 0.029489856719970703, 0.02955264091491699, 0.02954035186767578, 0.029549568176269532, 0.029586431503295898, 0.02959564781188965, 0.02959564781188965, 0.02956287956237793, 0.029550527572631834, 0.029716543197631836, 0.02959939193725586, 0.029706592559814452, 0.02965878486633301, 0.029704639434814453, 0.02971638488769531, 0.02972979164123535, 0.02981808090209961, 0.029853696823120116, 0.029914207458496093, 0.029814559936523436, 0.02982134437561035, 0.029800800323486327, 0.029793920516967772, 0.029726911544799804, 0.02965353584289551, 0.029742464065551758, 0.029694400787353515, 0.0297574405670166, 0.029679103851318358, 0.029770111083984373, 0.029716575622558594, 0.029722240447998045, 0.029748735427856447, 0.029655967712402344, 0.029664831161499025, 0.02962067222595215, 0.02969343948364258, 0.029653343200683593, 0.02967184066772461, 0.029855487823486328, 0.029706239700317383, 0.029834335327148437, 0.029747360229492186, 0.02987868881225586, 0.02975369644165039, 0.02978816032409668, 0.029822975158691405, 0.02995155143737793, 0.03221315383911133, 0.03041279983520508, 0.029878271102905272, 0.029628416061401368, 0.029584863662719726, 0.029550176620483398, 0.029488063812255858, 0.029483007431030273, 0.029470720291137696, 0.029635583877563477, 0.02952668762207031, 0.029483264923095703, 0.029493343353271483, 0.029491199493408202, 0.029519872665405275, 0.0295280647277832, 0.029718528747558592, 0.029863872528076173, 0.029585311889648438, 0.029638143539428712, 0.029669151306152344, 0.02952400016784668, 0.029561695098876954, 0.02955897521972656, 0.029570880889892577, 0.029560831069946288, 0.02961408042907715, 0.029663103103637695, 0.0296343994140625, 0.02968400001525879, 0.029714431762695313, 0.02977164840698242, 0.02992755126953125, 0.029939487457275392, 0.029958368301391602, 0.029832576751708983, 0.029874591827392577, 0.029837087631225587, 0.02980089569091797, 0.02969599914550781, 0.02976358413696289, 0.029683231353759765, 0.029669855117797853, 0.029654144287109375, 0.02965340805053711, 0.02971900749206543, 0.029732479095458984, 0.029754816055297853, 0.02971129608154297, 0.029698047637939453, 0.029736799240112306, 0.029735071182250977, 0.02973084831237793, 0.029738975524902345, 0.029761568069458007, 0.029846847534179686, 0.02976358413696289, 0.029711008071899414, 0.029767135620117187, 0.029760032653808593, 0.02976153564453125, 0.02982707214355469, 0.02988243293762207, 0.03183363151550293, 0.03044806480407715, 0.029879968643188478, 0.029581663131713867, 0.02953536033630371, 0.02950003242492676, 0.029466880798339843, 0.02953830337524414, 0.0295316162109375, 0.029514272689819335, 0.029642751693725586, 0.02952176094055176, 0.029616287231445312, 0.029558784484863283, 0.029613759994506834, 0.029601215362548828, 0.029559680938720703, 0.02950134468078613, 0.029527839660644532, 0.02953068733215332, 0.029538047790527343, 0.02953625679016113, 0.029525056838989257, 0.029617088317871094, 0.029613792419433595, 0.02968400001525879, 0.029650943756103516, 0.029669376373291017, 0.029663232803344725, 0.02967571258544922, 0.029790016174316408, 0.02969599914550781, 0.029920736312866212, 0.029946399688720704, 0.029927072525024415, 0.02983955192565918, 0.029778079986572267, 0.02977769660949707, 0.02974086380004883, 0.029751712799072266, 0.029683551788330077, 0.029784223556518555, 0.02969545555114746, 0.029714975357055664, 0.02969980812072754, 0.029693952560424806, 0.029767967224121093, 0.029724672317504884, 0.029720127105712892, 0.029721023559570313, 0.02970419120788574, 0.02971238327026367, 0.02970198440551758, 0.02970639991760254, 0.0297205753326416, 0.02974505615234375, 0.029743200302124025, 0.029802080154418945, 0.029768096923828126, 0.0297873592376709, 0.029829856872558593, 0.029890495300292967, 0.02992300796508789, 0.03180131149291992, 0.03034726333618164, 0.029828447341918946, 0.029634719848632814, 0.029499711990356444, 0.029611488342285157, 0.029498079299926757, 0.029509632110595704, 0.029521312713623047, 0.029585472106933595, 0.02962998390197754, 0.029651872634887694, 0.029538400650024416, 0.02956876754760742, 0.0295098876953125, 0.02952828788757324, 0.02951759910583496, 0.029548608779907226, 0.029705663681030274, 0.029579776763916016, 0.02957923126220703, 0.029649951934814452, 0.029606847763061522, 0.029687904357910157, 0.029594816207885743, 0.029610784530639648, 0.02962544059753418, 0.029631551742553712, 0.02963644790649414, 0.029611072540283202, 0.02964780807495117, 0.02979033660888672, 0.029923200607299805, 0.02991856002807617, 0.029855871200561525, 0.02984809684753418, 0.029743104934692382, 0.02977574348449707, 0.02972230339050293, 0.02975708770751953, 0.02969468879699707, 0.029771839141845703, 0.029714239120483397, 0.0296977596282959, 0.02979596710205078, 0.02982793617248535, 0.029857120513916015, 0.029846111297607423, 0.029789567947387696, 0.029821632385253906, 0.02976358413696289, 0.029742496490478516, 0.029829727172851563, 0.030000736236572265, 0.029976896286010742, 0.030102943420410155, 0.029952768325805665, 0.029964223861694336, 0.02987001609802246, 0.030031936645507813, 0.029937664031982423, 0.029980384826660156, 0.02997804832458496, 0.03199955177307129, 0.030455392837524416, 0.030016319274902344, 0.029675552368164063, 0.029680831909179688, 0.029653791427612305, 0.02966268730163574, 0.030222560882568358, 0.02965456008911133, 0.029782815933227538, 0.029646848678588866, 0.029659135818481445, 0.029632383346557618, 0.02972480010986328, 0.029908191680908202, 0.029690303802490235, 0.029810943603515626, 0.029681760787963866, 0.02979199981689453, 0.029840768814086913, 0.029921791076660157, 0.029807039260864258, 0.029726655960083007, 0.02970195198059082, 0.029766944885253905, 0.029782400131225586, 0.029739391326904296, 0.029741216659545898, 0.02970732879638672, 0.02980544090270996, 0.02977769660949707, 0.029878559112548827, 0.02993152046203613, 0.029890560150146486, 0.02991923141479492, 0.029824319839477538, 0.02984377670288086, 0.02977561569213867, 0.02983184051513672, 0.02973027229309082, 0.029788671493530275, 0.029801759719848633, 0.029799135208129882, 0.029738719940185548, 0.029745248794555663, 0.029745344161987305, 0.029705472946166992, 0.029741632461547853, 0.029723840713500975, 0.029717504501342775, 0.029814783096313476, 0.02975948715209961, 0.029750463485717773, 0.029772607803344727, 0.02983241653442383, 0.029765792846679687, 0.02982102394104004, 0.029771711349487303, 0.02977619171142578, 0.029788448333740235, 0.029753087997436523, 0.029862144470214843, 0.02993356704711914, 0.03209008026123047, 0.030459903717041017, 0.029910720825195313, 0.029698368072509765, 0.029640703201293944, 0.029631488800048827, 0.029620256423950195, 0.02969615936279297, 0.02957401657104492, 0.029577119827270508, 0.029601823806762694, 0.02957107162475586, 0.029616128921508788, 0.02958745574951172, 0.029569023132324217, 0.029507583618164062, 0.029534208297729493, 0.029612031936645508, 0.029561920166015623, 0.029604799270629884, 0.02962553596496582, 0.029567808151245118, 0.029665023803710937, 0.029572383880615234, 0.02960483169555664, 0.029679616928100585, 0.0296529598236084, 0.029640031814575196, 0.0295852165222168, 0.029645696640014648, 0.029652671813964845, 0.029737279891967772, 0.030455808639526367, 0.02991427230834961, 0.029894720077514647, 0.029874399185180665, 0.029825599670410156, 0.029855743408203125, 0.029826208114624022, 0.029733312606811522, 0.029736831665039064, 0.029772512435913084, 0.02972447967529297, 0.029852832794189453, 0.029817983627319335, 0.029951711654663087, 0.02994918441772461, 0.029934335708618164, 0.029820383071899412, 0.029813280105590822, 0.02979840087890625, 0.029847551345825195, 0.02987932777404785, 0.029901792526245117, 0.02980575942993164, 0.02982748794555664, 0.029796159744262696, 0.029823583602905275, 0.029872127532958984, 0.0297903995513916, 0.029817728042602538, 0.029850559234619142, 0.02993507194519043, 0.031926240921020504, 0.030361600875854492, 0.029925376892089843, 0.029662399291992186, 0.029585504531860353, 0.02955673599243164, 0.029534944534301756, 0.029533472061157227, 0.02950422477722168, 0.02958505630493164, 0.02957756805419922, 0.02960950469970703, 0.02963046455383301, 0.02959939193725586, 0.02965132713317871, 0.02958790397644043, 0.029618175506591796, 0.029739007949829102, 0.02960588836669922, 0.02960383987426758, 0.02959769630432129, 0.029661184310913087, 0.02956287956237793, 0.0295731201171875, 0.02959564781188965, 0.029591552734375, 0.029669151306152344, 0.029610336303710936, 0.029631359100341797, 0.029636896133422852, 0.029762271881103516, 0.029834943771362303, 0.029969919204711915, 0.029987648010253908, 0.03001523208618164, 0.030064640045166017, 0.030002496719360353, 0.030044511795043947, 0.029911647796630858, 0.029899904251098633, 0.030047103881835936, 0.03003625679016113, 0.02991689682006836, 0.029853248596191408, 0.02979270362854004, 0.02979216003417969, 0.029877599716186524, 0.029892799377441406, 0.029905216217041015, 0.02985091209411621, 0.029842304229736327, 0.029883615493774413, 0.0299683837890625, 0.029825376510620116, 0.03004444885253906, 0.02987766456604004, 0.02997929573059082, 0.029873407363891602, 0.029868671417236328, 0.02981648063659668, 0.029880992889404295, 0.029816831588745117, 0.0298885440826416]",tokens/s,33.60585417165772,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1203, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 977, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 977, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 755, in __init__ self.mlp = Qwen2MoeSparseMoeBlock(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 693, in __init__ [Qwen2MoeMLP(config, intermediate_size=config.moe_intermediate_size) for _ in range(self.num_experts)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 693, in [Qwen2MoeMLP(config, intermediate_size=config.moe_intermediate_size) for _ in range(self.num_experts)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 294, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 12.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 346985 has 14.73 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 14.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 26.12 MiB is free. Process 320737 has 14.71 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 47.00 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 614, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 274, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 336957 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 219, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 345513 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 180.12 MiB is free. Process 334649 has 14.56 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 1.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 180.12 MiB is free. Process 348977 has 14.56 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 1.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 349706 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 614, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 274, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 337322 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1203, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 977, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 977, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 750, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 349, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 346590 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 335443 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 614, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 274, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 338438 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1203, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 977, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 977, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 755, in __init__ self.mlp = Qwen2MoeSparseMoeBlock(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 693, in __init__ [Qwen2MoeMLP(config, intermediate_size=config.moe_intermediate_size) for _ in range(self.num_experts)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 693, in [Qwen2MoeMLP(config, intermediate_size=config.moe_intermediate_size) for _ in range(self.num_experts)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 294, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 12.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 345877 has 14.73 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 14.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1032, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 330803 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 351366 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 217, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 341477 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 68.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 26.12 MiB is free. Process 325051 has 14.71 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 47.00 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 350989 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 217, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 355025 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 80.12 MiB is free. Process 331572 has 14.66 GiB memory in use. Of the allocated memory 14.55 GiB is allocated by PyTorch, and 791.00 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1203, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 977, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 977, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 750, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 446, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 349, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 348123 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 335033 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 217, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 353825 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 351800 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 180.12 MiB is free. Process 333515 has 14.56 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 1.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,908.304384,9644.670976,0.0,9242.148864,8603.568128,s,1,7.30405712890625,7.30405712890625,0.0,7.30405712890625,7.30405712890625,7.30405712890625,7.30405712890625,[7.30405712890625],,kWh,8.046909033282646e-06,8.803042577801537e-07,4.332225687991853e-06,1.3259438979054653e-05,,MB,1360.973824,9896.329216,0.0,9481.224192,8972.092416,s,10,1.0041068496704102,0.10041068496704102,0.00260569636323953,0.10008121490478515,0.10253631134033203,0.10411090621948242,0.10537058212280273,"[0.09470301055908203, 0.09989405059814453, 0.10023945617675781, 0.1010225601196289, 0.0998605728149414, 0.09905020904541016, 0.1015421142578125, 0.10568550109863281, 0.1021864013671875, 0.0999229736328125]",tokens/s,2549.5294657538675,kWh,3.0706066851135146e-06,3.3862858086547285e-07,2.0435953848749766e-06,5.452830650853965e-06,tokens/kWh,46948092.906554505,MB,1393.573888,9900.52352,0.0,9485.418496,8972.094976,s,10,24.07470336914062,2.407470336914062,0.019414109268976538,2.4001225585937496,2.4351316650390626,2.439321081542969,2.442672614746094,"[2.382367919921875, 2.3879912109375, 2.397542724609375, 2.393806884765625, 2.443510498046875, 2.402702392578125, 2.4197607421875, 2.3944765625, 2.41834375, 2.43420068359375]",tokens/s,26.168546724756123,kWh,7.060234584988329e-05,7.787298943181126e-06,4.679086034932578e-05,0.00012518050514239023,tokens/kWh,503273.25271885435,,s,630,24.072041172027603,0.038209589161948554,0.0009151587100469621,0.03801534461975098,0.03870123100280762,0.039017434310913084,0.04304822502136232,"[0.04093289566040039, 0.03837984085083008, 0.037628063201904295, 0.03777462387084961, 0.037765857696533206, 0.037951488494873044, 0.037713920593261716, 0.03779699325561523, 0.037718910217285156, 0.037689342498779296, 0.03767295837402344, 0.03771187210083008, 0.03766681671142578, 0.037534751892089845, 0.037714401245117185, 0.03763987350463867, 0.037753662109375, 0.038250495910644534, 0.03757043075561523, 0.03754393768310547, 0.03785740661621094, 0.03795097732543945, 0.03792768096923828, 0.03864345550537109, 0.03779379272460937, 0.03750624084472656, 0.0374956169128418, 0.03751654434204101, 0.037647167205810544, 0.037856895446777346, 0.037678974151611325, 0.03779219055175781, 0.03789004898071289, 0.03798406219482422, 0.037658206939697264, 0.037752864837646484, 0.03803807830810547, 0.037738239288330075, 0.03828556823730469, 0.03748863983154297, 0.03767910385131836, 0.03803094482421875, 0.03759145736694336, 0.03764585494995117, 0.03743503952026367, 0.037477184295654296, 0.03774784088134766, 0.037677951812744144, 0.03774585723876953, 0.037558528900146486, 0.037384063720703124, 0.03756307220458984, 0.03763814544677734, 0.03786652755737305, 0.03766486358642578, 0.03747436904907227, 0.037660926818847654, 0.037574657440185545, 0.03838243103027344, 0.03795529556274414, 0.0376258544921875, 0.03781836700439453, 0.03801702499389648, 0.04275548934936523, 0.038978561401367184, 0.03788595199584961, 0.03764579010009766, 0.03755436706542969, 0.03804217529296875, 0.03749827194213867, 0.0373509750366211, 0.03752982330322266, 0.03751177597045898, 0.0377176628112793, 0.03743369674682617, 0.03767705535888672, 0.03766284942626953, 0.03851174545288086, 0.03758975982666016, 0.03741491317749023, 0.0375623664855957, 0.03767295837402344, 0.037763072967529294, 0.0377097282409668, 0.03754159927368164, 0.03760508728027344, 0.03759785461425781, 0.03787980651855469, 0.03775244903564453, 0.03799488067626953, 0.03810611343383789, 0.0380524787902832, 0.037853569030761716, 0.037652576446533206, 0.038272926330566406, 0.03746390533447266, 0.037713310241699216, 0.03807923126220703, 0.03833446502685547, 0.03835036849975586, 0.037994976043701174, 0.03772537612915039, 0.03784172821044922, 0.0376545295715332, 0.03760947036743164, 0.037490848541259766, 0.037674846649169924, 0.037582015991210936, 0.03812972640991211, 0.037827327728271486, 0.03792838287353516, 0.03767875289916992, 0.03754076766967773, 0.03792643356323242, 0.038074623107910155, 0.03826300811767578, 0.038268062591552736, 0.03813257598876953, 0.037913761138916015, 0.03788681411743164, 0.03778329467773438, 0.03801318359375, 0.03767078399658203, 0.03783078384399414, 0.03790595245361328, 0.03769606399536133, 0.0425492172241211, 0.03879935836791992, 0.03796377563476563, 0.03720924758911133, 0.037344062805175784, 0.037566558837890625, 0.03764015960693359, 0.03730633544921875, 0.037353473663330077, 0.037378047943115236, 0.03738623809814453, 0.04091852951049805, 0.0375830078125, 0.03791702270507812, 0.03762774276733399, 0.0399666862487793, 0.038376926422119144, 0.03780006408691406, 0.037863998413085936, 0.037668895721435544, 0.03779689788818359, 0.03778863906860352, 0.03817267227172851, 0.03846144104003906, 0.039098110198974606, 0.03821347045898438, 0.0379150390625, 0.037961727142333986, 0.037950687408447266, 0.037841697692871094, 0.03797510528564453, 0.03801571273803711, 0.03812931060791016, 0.03761004638671875, 0.0383403205871582, 0.03757084655761719, 0.03759862518310547, 0.03760188674926758, 0.037615615844726565, 0.03789004898071289, 0.037536800384521486, 0.037582942962646484, 0.039197502136230467, 0.03827004623413086, 0.03842351913452149, 0.037773311614990236, 0.03775183868408203, 0.03768537521362305, 0.03769251251220703, 0.037682945251464844, 0.03803116989135742, 0.03798649597167969, 0.03780374526977539, 0.03791628646850586, 0.03812179183959961, 0.03810073471069336, 0.03805859375, 0.038019073486328124, 0.03822182464599609, 0.03788595199584961, 0.03794124984741211, 0.03779379272460937, 0.038010879516601564, 0.04318220901489258, 0.03889152145385742, 0.03797395324707031, 0.03818297576904297, 0.03797980880737305, 0.038261089324951175, 0.03807436752319336, 0.03787510299682617, 0.03805807876586914, 0.03787811279296875, 0.03786284637451172, 0.03776176071166992, 0.03774796676635742, 0.03774675369262695, 0.038136512756347656, 0.03760732650756836, 0.03766486358642578, 0.03790233612060547, 0.03800038528442383, 0.03855180740356445, 0.037771007537841794, 0.037803550720214844, 0.03760179138183594, 0.03762607955932617, 0.03745372772216797, 0.037353569030761716, 0.03795737457275391, 0.03770188903808594, 0.03743718338012696, 0.03752985763549805, 0.03796511840820312, 0.03817903900146485, 0.03771670532226563, 0.03766220855712891, 0.03775718307495117, 0.03780198287963867, 0.03764393615722656, 0.038101215362548825, 0.0380428466796875, 0.037882785797119144, 0.038035457611083984, 0.03798134231567383, 0.03815212631225586, 0.03760220718383789, 0.03781017684936523, 0.03795283126831055, 0.03791737747192383, 0.03818086242675781, 0.03781820678710938, 0.03784729766845703, 0.03791241455078125, 0.0379469108581543, 0.03773904037475586, 0.037967872619628903, 0.03791667175292969, 0.03775033569335937, 0.03800457763671875, 0.0378658561706543, 0.03876476669311523, 0.0377355842590332, 0.03783571243286133, 0.03809820938110352, 0.03834534454345703, 0.043353439331054684, 0.03904787063598633, 0.03853100967407227, 0.03829913711547851, 0.03853977584838867, 0.0383752326965332, 0.038541152954101564, 0.038248001098632814, 0.03851139068603516, 0.03846118545532227, 0.03838115310668945, 0.03856246566772461, 0.03845260620117188, 0.03868121719360352, 0.03852492904663086, 0.038547199249267576, 0.038399391174316407, 0.03853603363037109, 0.0387677116394043, 0.03871836853027344, 0.03847577667236328, 0.03846963119506836, 0.038545406341552735, 0.03875167846679688, 0.038598495483398436, 0.03858505630493164, 0.03860636901855469, 0.038494144439697266, 0.03854313659667969, 0.03868672180175781, 0.04451200103759766, 0.03918438339233398, 0.03986163330078125, 0.03848787307739258, 0.03882476806640625, 0.03867443084716797, 0.038467422485351566, 0.03859404754638672, 0.038613086700439454, 0.038893184661865234, 0.03843782424926758, 0.03859833526611328, 0.03870671844482422, 0.03897206497192383, 0.03864998245239258, 0.03895500946044922, 0.038612831115722654, 0.038825279235839845, 0.038582592010498046, 0.03853894424438477, 0.03834751892089844, 0.03855081558227539, 0.03865683364868164, 0.03847782516479492, 0.03861708831787109, 0.03877251052856445, 0.03850876617431641, 0.03855155181884766, 0.03903811264038086, 0.038283710479736326, 0.038336929321289064, 0.038389537811279295, 0.038440608978271486, 0.044263103485107425, 0.03939360046386719, 0.03853670501708984, 0.0383691520690918, 0.038842975616455076, 0.03833216094970703, 0.038615009307861325, 0.038088897705078124, 0.03827846527099609, 0.03836556625366211, 0.038336097717285154, 0.03820624160766602, 0.038485790252685545, 0.03918595123291015, 0.03841443252563476, 0.03844140625, 0.038451393127441405, 0.03817881774902344, 0.03868876647949219, 0.038152191162109376, 0.0382946891784668, 0.038271839141845704, 0.03823011016845703, 0.03800998306274414, 0.03781916809082031, 0.037994495391845705, 0.037961727142333986, 0.03784908676147461, 0.03801702499389648, 0.03771187210083008, 0.037711200714111326, 0.0379068489074707, 0.037870849609375, 0.037733375549316404, 0.03788742446899414, 0.037537857055664064, 0.03780444717407227, 0.037807968139648436, 0.03785270309448242, 0.03762659072875976, 0.037556224822998044, 0.037335041046142575, 0.03750086212158203, 0.03781561660766602, 0.03773510360717774, 0.037462078094482425, 0.03759308624267578, 0.037547935485839845, 0.037781726837158205, 0.037665855407714846, 0.03767174530029297, 0.03793305587768555, 0.03787571334838867, 0.03822163009643555, 0.037988704681396486, 0.03788579177856445, 0.03762790298461914, 0.03764640045166016, 0.037680320739746094, 0.03777571105957031, 0.03799286270141602, 0.038300735473632816, 0.0383438720703125, 0.04278499221801758, 0.039060993194580076, 0.0379417610168457, 0.03774451065063476, 0.038158462524414065, 0.039059200286865235, 0.03792044830322266, 0.038085182189941405, 0.03772608184814453, 0.037787776947021484, 0.03860876846313477, 0.038473857879638675, 0.038045185089111325, 0.03773865509033203, 0.04001007843017578, 0.037934303283691406, 0.03773929595947265, 0.037665855407714846, 0.0376385612487793, 0.03763868713378906, 0.037666431427001955, 0.03775910568237305, 0.038201087951660155, 0.03967820739746094, 0.037984512329101563, 0.03839753723144531, 0.038160385131835936, 0.03854348754882812, 0.03797200012207031, 0.03799270248413086, 0.03770521545410156, 0.0380052490234375, 0.037881694793701175, 0.03774275207519531, 0.03788592147827148, 0.03847375869750977, 0.03797401428222656, 0.037750110626220704, 0.037879585266113285, 0.03831488037109375, 0.03866828918457031, 0.04796793746948242, 0.03819702529907226, 0.03822646331787109, 0.037693374633789065, 0.0377977294921875, 0.03794972610473633, 0.03833030319213867, 0.03812953567504883, 0.038735870361328126, 0.038021278381347653, 0.03817059326171875, 0.03868876647949219, 0.03815340805053711, 0.038617919921875, 0.038168575286865236, 0.03882144165039063, 0.038730175018310546, 0.03811484909057617, 0.0381956787109375, 0.038027263641357424, 0.037819713592529294, 0.03855839920043945, 0.04315574264526367, 0.038937854766845706, 0.03828591918945313, 0.0380840950012207, 0.03801990509033203, 0.03844095993041992, 0.03798969650268555, 0.03741971206665039, 0.0376627197265625, 0.03762483215332031, 0.037857566833496094, 0.03770217514038086, 0.03848550415039063, 0.03793171310424805, 0.03796073532104492, 0.03824060821533203, 0.038040191650390624, 0.03803340911865234, 0.03826800155639649, 0.03777833557128906, 0.037904384613037106, 0.03791462326049805, 0.03785542297363281, 0.037648193359375, 0.038112831115722653, 0.038150527954101565, 0.03811507034301758, 0.03790879821777344, 0.037996543884277346, 0.03764223861694336, 0.03787734222412109, 0.03769510269165039, 0.03783760070800781, 0.03765177536010742, 0.03746252822875976, 0.03786528015136719, 0.03757299041748047, 0.037459968566894535, 0.038094463348388674, 0.03791500854492187, 0.037956863403320315, 0.03763183975219726, 0.03752627182006836, 0.037569793701171875, 0.03790940856933594, 0.03815423965454102, 0.037959678649902344, 0.03810508728027344, 0.03797305679321289, 0.037902976989746096, 0.037988670349121095, 0.038084606170654296, 0.038010879516601564, 0.03781222534179687, 0.0381214714050293, 0.03774054336547852, 0.03783475112915039, 0.03774803161621094, 0.0380362548828125, 0.0377828483581543, 0.03784979248046875, 0.03801692962646484, 0.0378875846862793, 0.04556838226318359, 0.03982294464111328, 0.03899216079711914, 0.03848860931396485, 0.038416576385498044, 0.038311649322509765, 0.03820553588867188, 0.03826220703125, 0.03813974380493164, 0.03797177505493164, 0.037956382751464846, 0.03826496124267578, 0.03802316665649414, 0.037943294525146484, 0.03787571334838867, 0.03783472061157227, 0.03833443069458008, 0.03791059112548828, 0.03787980651855469, 0.038075454711914064, 0.037993408203125, 0.037928256988525394, 0.03868332672119141, 0.038100990295410156, 0.03797971343994141, 0.03795759963989258, 0.038020862579345706, 0.03816726303100586, 0.03806579208374023, 0.03838195037841797, 0.03796524810791015, 0.038664287567138675, 0.03801705551147461, 0.03880352020263672, 0.03807791900634765, 0.038242431640625, 0.03789494323730469, 0.03824832153320312, 0.03828134536743164, 0.0381030387878418, 0.03818086242675781, 0.037967296600341795, 0.03828384017944336, 0.038316032409667966, 0.03798204803466797, 0.03891958236694336, 0.039441089630126956, 0.03870102310180664, 0.03835657501220703, 0.038222335815429685, 0.038354942321777344, 0.038539264678955076, 0.03864371109008789, 0.03861260986328125, 0.03829388809204102, 0.03817881774902344, 0.0382033920288086, 0.038598976135253905, 0.03821529769897461, 0.038107200622558596, 0.03818230438232422, 0.03797180938720703, 0.03795430374145508, 0.042668033599853515, 0.0389857292175293, 0.03810067367553711, 0.03883174514770508, 0.03803206253051758, 0.038260353088378905, 0.038389663696289066, 0.038308319091796876, 0.038526561737060545, 0.038377056121826174, 0.03825667190551758, 0.03837212753295898, 0.0401033935546875, 0.0386912956237793, 0.03868473434448242, 0.038537120819091795, 0.038782527923583984, 0.038625823974609376, 0.03851875305175781, 0.03920896148681641, 0.03847731018066406, 0.03858262252807617, 0.03857222366333008, 0.03926742553710937, 0.03866291046142578, 0.038487617492675784, 0.03889577484130859, 0.038424671173095705, 0.03875875091552734, 0.03869091033935547, 0.038454814910888674, 0.038408065795898436, 0.03869952011108398, 0.03858214569091797, 0.03845040130615234, 0.038607742309570314, 0.03863689422607422, 0.038656097412109375, 0.03866864013671875, 0.038676734924316405, 0.03878911972045898, 0.038703102111816406, 0.03853033447265625, 0.03853337478637695, 0.03867491149902344, 0.03841952133178711, 0.03844169616699219, 0.038471233367919924, 0.03881027221679687, 0.03858406448364258, 0.03878441619873047, 0.03866476821899414, 0.03856579208374023, 0.0384721908569336, 0.038467456817626956, 0.0390445442199707, 0.03827360153198242, 0.0382033920288086, 0.03801497650146484, 0.037967681884765625, 0.03816886520385742, 0.038264736175537106, 0.03814348983764648]",tokens/s,26.171440780521692,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 217, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 352644 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 217, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 354605 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 349342 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 217, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 341876 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 180.12 MiB is free. Process 350107 has 14.56 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 1.06 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 333914 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,905.367552,1910.3744,0.0,1507.852288,1469.840384,s,1,7.608578125,7.608578125,0.0,7.608578125,7.608578125,7.608578125,7.608578125,[7.608578125],,kWh,5.350151012498828e-06,5.830258397176191e-07,2.019446059986496e-06,7.952622912202943e-06,,MB,1238.888448,1952.31744,0.0,1539.309568,1426.274304,s,10,0.8463200912475586,0.08463200912475585,0.0011618211891471634,0.08413674926757814,0.08541670837402343,0.08669286651611327,0.08771379302978516,"[0.08796902465820312, 0.08409222412109375, 0.08416655731201172, 0.08450518035888673, 0.08436310577392578, 0.08387987518310547, 0.08410694122314454, 0.08407379150390625, 0.0840302734375, 0.08513311767578124]",tokens/s,3024.86024670206,kWh,2.7945695280555403e-06,3.0804174773703215e-07,1.8184697087427918e-06,4.9210809845353645e-06,tokens/kWh,52021090.65152295,MB,1263.869952,1960.706048,0.0,1547.698176,1426.276864,s,10,14.446344848632814,1.4446344848632813,0.015475466391310592,1.4465339965820312,1.459764501953125,1.4646258422851561,1.4685149145507814,"[1.4566873779296876, 1.45500732421875, 1.4586842041015624, 1.4367249755859375, 1.4203336181640625, 1.4347694091796874, 1.4508912353515624, 1.421582763671875, 1.4694871826171876, 1.4421767578125]",tokens/s,43.60964704920654,kWh,4.1214034936945395e-05,4.5457046497189935e-06,1.828146303785781e-05,6.404120262452221e-05,tokens/kWh,983741.6759546685,,s,630,14.444035465240503,0.022927040421016634,0.0005207535000173047,0.022956799507141114,0.02332654628753662,0.023529793643951417,0.024250022068023687,"[0.023746015548706055, 0.023021823883056642, 0.02310758399963379, 0.023035263061523436, 0.022983295440673828, 0.022943328857421875, 0.023034271240234376, 0.02297817611694336, 0.023112064361572267, 0.023020864486694336, 0.02309190368652344, 0.022937599182128905, 0.022948991775512694, 0.022804832458496092, 0.022926048278808595, 0.022904640197753907, 0.023076736450195312, 0.022880064010620118, 0.02293996810913086, 0.022984575271606446, 0.02289686393737793, 0.023221376419067383, 0.023059232711791992, 0.023191072463989257, 0.02305836868286133, 0.02295631980895996, 0.023042303085327148, 0.023085056304931642, 0.02310758399963379, 0.02319699287414551, 0.023141056060791015, 0.023164031982421875, 0.02342348861694336, 0.023085472106933593, 0.02316080093383789, 0.02302566337585449, 0.023126016616821288, 0.02328780746459961, 0.02329804801940918, 0.0229171199798584, 0.023068672180175782, 0.02305379295349121, 0.0229135684967041, 0.022889856338500977, 0.02331667137145996, 0.02308755111694336, 0.023625471115112304, 0.02361574363708496, 0.02309071922302246, 0.02313852882385254, 0.023270944595336913, 0.02303664016723633, 0.023406591415405274, 0.023200000762939453, 0.023153600692749025, 0.02381644821166992, 0.023302719116210936, 0.023056095123291015, 0.023074176788330077, 0.022989728927612304, 0.023032960891723634, 0.02290380859375, 0.023498624801635743, 0.023791616439819335, 0.022957855224609375, 0.023410911560058593, 0.02281881523132324, 0.02279408073425293, 0.022494688034057617, 0.022325151443481444, 0.022429855346679687, 0.022436767578125, 0.022466272354125977, 0.022654016494750975, 0.022286783218383788, 0.02234364891052246, 0.022642463684082032, 0.02312182426452637, 0.022745952606201172, 0.022613216400146484, 0.022555423736572267, 0.02280793571472168, 0.02268582344055176, 0.022675968170166014, 0.023037120819091796, 0.02281350326538086, 0.023209983825683594, 0.022960128784179686, 0.02302566337585449, 0.023093248367309572, 0.024726688385009767, 0.024310239791870115, 0.023300479888916016, 0.023402303695678712, 0.023379936218261718, 0.023439456939697265, 0.023337087631225585, 0.02326118469238281, 0.023199039459228514, 0.023101919174194335, 0.022997215270996095, 0.02304819107055664, 0.02317228889465332, 0.02300601577758789, 0.02505523109436035, 0.02358233642578125, 0.023153024673461912, 0.02300908851623535, 0.0229869441986084, 0.023039936065673828, 0.02299295997619629, 0.02309529685974121, 0.023010719299316407, 0.02344550323486328, 0.023079519271850587, 0.02310758399963379, 0.022980127334594726, 0.02306287956237793, 0.0230850887298584, 0.023154272079467773, 0.02298521614074707, 0.02410259246826172, 0.02334342384338379, 0.02324412727355957, 0.02335958480834961, 0.023011903762817382, 0.023607776641845702, 0.023211679458618163, 0.023076383590698243, 0.023372575759887694, 0.023826688766479493, 0.024566816329956054, 0.02339023971557617, 0.023368160247802736, 0.023881887435913084, 0.023256607055664062, 0.023159168243408204, 0.023113887786865236, 0.023178688049316408, 0.023012992858886718, 0.02304083251953125, 0.02309503936767578, 0.02302547264099121, 0.023417407989501954, 0.023141439437866212, 0.023066944122314453, 0.023148511886596678, 0.02311235237121582, 0.022984064102172852, 0.022979040145874024, 0.023086463928222656, 0.023014175415039063, 0.02332598304748535, 0.02299078369140625, 0.02307676887512207, 0.023008127212524414, 0.022982656478881838, 0.023292064666748047, 0.02297657585144043, 0.023009056091308593, 0.023603200912475586, 0.023111167907714843, 0.022862112045288086, 0.023017696380615234, 0.023006624221801757, 0.023220832824707032, 0.023271488189697265, 0.023035167694091797, 0.023171743392944335, 0.02294281578063965, 0.02306505584716797, 0.02293600082397461, 0.022986751556396484, 0.023007232666015624, 0.02306662368774414, 0.022960351943969726, 0.022932575225830077, 0.02298313522338867, 0.022781568527221678, 0.022880863189697266, 0.023043136596679687, 0.023720928192138672, 0.02290889549255371, 0.022944831848144533, 0.022856639862060546, 0.023236608505249022, 0.02310758399963379, 0.022928543090820312, 0.023053152084350586, 0.023505023956298828, 0.022994943618774414, 0.023052255630493165, 0.02319977569580078, 0.023267135620117188, 0.023138496398925783, 0.023148191452026366, 0.02307516860961914, 0.023416831970214845, 0.023002752304077147, 0.023310720443725588, 0.023803520202636718, 0.023112064361572267, 0.023357120513916016, 0.022939359664916992, 0.02307731246948242, 0.02287424087524414, 0.023139680862426758, 0.022415584564208984, 0.02249513626098633, 0.022391359329223634, 0.02250457572937012, 0.022394912719726563, 0.022419488906860352, 0.02261689567565918, 0.022546239852905273, 0.022411455154418947, 0.022512672424316406, 0.023135007858276366, 0.02242188835144043, 0.022483936309814455, 0.022413280487060545, 0.02251865577697754, 0.022716415405273437, 0.022583040237426757, 0.02240127944946289, 0.022409215927124023, 0.022675455093383787, 0.022822463989257812, 0.023136383056640626, 0.022968639373779298, 0.02289788818359375, 0.023022239685058593, 0.023009408950805665, 0.022933439254760744, 0.02297248077392578, 0.022953983306884765, 0.023125951766967773, 0.02302694320678711, 0.02288928031921387, 0.022705984115600587, 0.02257734489440918, 0.022896671295166017, 0.022371456146240233, 0.022281055450439454, 0.0230350399017334, 0.022384767532348634, 0.02234441566467285, 0.02241676712036133, 0.022409856796264647, 0.022285696029663085, 0.022680192947387694, 0.022482048034667967, 0.022487232208251953, 0.022382463455200195, 0.02240982437133789, 0.022210111618041994, 0.02235603141784668, 0.02237718391418457, 0.022355968475341798, 0.022208511352539064, 0.022239231109619142, 0.022349311828613282, 0.02239263916015625, 0.02247340774536133, 0.022656511306762696, 0.022467264175415037, 0.022472511291503905, 0.02245631980895996, 0.022549631118774414, 0.022373247146606445, 0.022452224731445314, 0.022330368041992187, 0.022307008743286134, 0.022266687393188475, 0.02229043197631836, 0.02226380729675293, 0.022266944885253905, 0.022393119812011718, 0.022248096466064453, 0.022345087051391602, 0.02235158348083496, 0.022834047317504883, 0.02241334342956543, 0.023085056304931642, 0.022194175720214843, 0.022966272354125978, 0.02237798309326172, 0.022325759887695314, 0.02234124755859375, 0.022431968688964844, 0.022255775451660156, 0.022618112564086915, 0.022343679428100584, 0.022476863861083985, 0.022364095687866212, 0.022456159591674806, 0.02229596710205078, 0.022376928329467773, 0.022458656311035156, 0.022429695129394533, 0.02229862403869629, 0.022382591247558595, 0.02290892791748047, 0.023111295700073243, 0.023048255920410158, 0.022902368545532226, 0.02298748779296875, 0.02312771224975586, 0.02289289665222168, 0.022978239059448242, 0.023001407623291014, 0.023207935333251953, 0.023146495819091797, 0.02322768020629883, 0.023087200164794923, 0.023538015365600587, 0.02307084846496582, 0.023037952423095705, 0.02288640022277832, 0.022981983184814453, 0.022892383575439452, 0.022902624130249023, 0.02289148712158203, 0.02235308837890625, 0.022479263305664063, 0.02347161674499512, 0.02243062400817871, 0.02237376022338867, 0.022299264907836912, 0.022344831466674805, 0.02233763122558594, 0.02247555160522461, 0.022327423095703125, 0.0224703369140625, 0.02250556755065918, 0.02272591972351074, 0.02270230484008789, 0.02262486457824707, 0.02260313606262207, 0.022713119506835938, 0.02254627227783203, 0.022639680862426757, 0.022700992584228516, 0.02257494354248047, 0.02260188865661621, 0.022525087356567382, 0.022346303939819335, 0.02245369529724121, 0.022282304763793944, 0.022299264907836912, 0.022288288116455078, 0.022530303955078126, 0.022607872009277344, 0.022452224731445314, 0.022498687744140624, 0.02247091293334961, 0.0224136962890625, 0.022573055267333983, 0.02242118453979492, 0.022554079055786134, 0.022621023178100587, 0.022578847885131835, 0.022557024002075196, 0.022908992767333984, 0.02278803253173828, 0.02284761619567871, 0.027514751434326173, 0.022992992401123048, 0.022992223739624024, 0.023153152465820313, 0.023260736465454103, 0.022917631149291993, 0.023009279251098632, 0.022939647674560547, 0.02297654342651367, 0.023319744110107423, 0.023007808685302736, 0.022932735443115235, 0.023469663619995116, 0.023134239196777345, 0.022950111389160158, 0.022915199279785158, 0.023347328186035157, 0.022966175079345702, 0.0230350399017334, 0.02294371223449707, 0.022801279067993164, 0.023197696685791015, 0.023015424728393553, 0.022960128784179686, 0.02292531204223633, 0.02364825630187988, 0.023670783996582033, 0.023855104446411132, 0.023232511520385742, 0.023011327743530274, 0.02297177505493164, 0.022852415084838866, 0.02289574432373047, 0.022883007049560547, 0.022953792572021483, 0.02296441650390625, 0.022953311920166017, 0.022952608108520508, 0.02309084892272949, 0.023088544845581056, 0.02313667106628418, 0.02297500801086426, 0.02299193572998047, 0.023720863342285157, 0.022929439544677733, 0.02304204750061035, 0.02304204750061035, 0.022841344833374022, 0.022953983306884765, 0.02333603286743164, 0.022942623138427733, 0.022985984802246093, 0.02301424026489258, 0.02294902420043945, 0.0228338565826416, 0.022868032455444335, 0.022970367431640625, 0.022958080291748048, 0.02289459228515625, 0.022880512237548827, 0.022846687316894532, 0.02277337646484375, 0.02283359909057617, 0.022784479141235353, 0.022824960708618162, 0.02286796760559082, 0.02272051239013672, 0.023040000915527343, 0.022851200103759767, 0.022741472244262696, 0.02308905601501465, 0.022891551971435546, 0.023388511657714845, 0.023125919342041015, 0.02293974494934082, 0.023335103988647462, 0.02292736053466797, 0.022896223068237305, 0.022699743270874023, 0.02291097640991211, 0.023091583251953124, 0.022750816345214843, 0.022695743560791015, 0.023585695266723633, 0.022771711349487304, 0.022765567779541016, 0.02269593620300293, 0.02264419174194336, 0.022413856506347658, 0.022481952667236328, 0.02241606330871582, 0.02255081558227539, 0.022448095321655273, 0.02242121505737305, 0.022337856292724608, 0.022333280563354492, 0.022429983139038087, 0.022481824874877928, 0.02251055908203125, 0.022544384002685547, 0.023226367950439454, 0.022689279556274415, 0.02237286376953125, 0.02237785530090332, 0.0223536319732666, 0.02266819190979004, 0.022351871490478514, 0.022296575546264647, 0.022353919982910156, 0.022586687088012695, 0.022415775299072266, 0.022546720504760743, 0.022447839736938476, 0.022489023208618165, 0.02251103973388672, 0.022508447647094726, 0.0222608642578125, 0.02226211166381836, 0.022387136459350587, 0.02237001609802246, 0.022382495880126953, 0.02236787223815918, 0.02231737518310547, 0.022602272033691407, 0.022445247650146483, 0.022425792694091798, 0.022510208129882813, 0.022409215927124023, 0.023201791763305665, 0.022468608856201173, 0.022381696701049805, 0.022350847244262697, 0.02237843132019043, 0.02244806480407715, 0.02267510414123535, 0.022415327072143554, 0.02238083267211914, 0.02258937644958496, 0.02360531234741211, 0.023039615631103516, 0.022884735107421873, 0.02309113693237305, 0.02298636817932129, 0.022929759979248048, 0.02303580856323242, 0.022955615997314452, 0.022899295806884764, 0.02299622344970703, 0.02307967948913574, 0.023197696685791015, 0.02392678451538086, 0.023385919570922852, 0.023092832565307617, 0.023138368606567383, 0.02305289649963379, 0.023135936737060547, 0.023142656326293944, 0.023179264068603517, 0.023092479705810548, 0.023480255126953124, 0.025701183319091797, 0.02320342445373535, 0.0230664005279541, 0.02308358383178711, 0.023168575286865233, 0.023042816162109375, 0.023139936447143555, 0.02322787284851074, 0.02308121681213379, 0.023058143615722657, 0.02323459243774414, 0.023028415679931642, 0.023252031326293946, 0.023259552001953124, 0.0230382080078125, 0.02317750358581543, 0.023189504623413085, 0.02304323196411133, 0.023015392303466796, 0.02381683158874512, 0.02997068786621094, 0.023805183410644533, 0.023392671585083007, 0.02350729560852051, 0.02332415962219238, 0.023099903106689454, 0.023152639389038086, 0.02310691261291504, 0.023160831451416015, 0.02304476737976074, 0.023150592803955077, 0.02309744071960449, 0.02322627258300781, 0.023359487533569336, 0.023060480117797853, 0.023072927474975587, 0.023063871383666994, 0.023032352447509764, 0.02305023956298828, 0.02317305564880371, 0.023234783172607423, 0.023519744873046877, 0.023090272903442382, 0.022952863693237305, 0.022930688858032226, 0.022978527069091797, 0.02333161544799805, 0.02406982421875, 0.02279840087890625, 0.022704383850097657, 0.02267478370666504, 0.022911487579345705, 0.02270774459838867, 0.02279897689819336, 0.02290483283996582, 0.022554399490356446, 0.022498592376708985, 0.022538368225097655, 0.022357984542846678, 0.022236000061035155, 0.02235331153869629, 0.02245078468322754, 0.022603328704833985, 0.022570816040039063, 0.022661760330200197, 0.02268582344055176, 0.022779424667358397, 0.022790496826171874, 0.023009279251098632, 0.022757375717163086, 0.022847488403320314, 0.022834751129150392, 0.023144031524658205, 0.02276848030090332, 0.022781951904296875, 0.022898687362670898, 0.022958080291748048, 0.022794143676757812, 0.02283020782470703, 0.02288505554199219, 0.023099679946899414, 0.022890495300292968, 0.02294175910949707, 0.023017471313476562, 0.022983999252319337, 0.022839935302734374, 0.022849695205688476, 0.022957279205322267, 0.023014015197753906, 0.022916511535644533, 0.02302566337585449, 0.023695104598999022, 0.022999488830566406, 0.022929824829101563, 0.022951936721801756, 0.022869440078735353, 0.023078815460205078, 0.022917984008789062, 0.02318726348876953, 0.022988800048828126, 0.023109632492065428, 0.022975936889648437, 0.02284601593017578, 0.02288844871520996]",tokens/s,43.61661957394752,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 219, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 344703 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 219, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 343090 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1032, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 331205 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 614, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 142.12 MiB is free. Process 353457 has 14.60 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.53 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 614, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 367, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 274, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 338852 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 614, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 274, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 338078 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,905.662464,1910.3744,0.0,1507.852288,1469.840384,s,1,7.23841845703125,7.23841845703125,0.0,7.23841845703125,7.23841845703125,7.23841845703125,7.23841845703125,[7.23841845703125],,kWh,5.23891162089664e-06,5.707650066324037e-07,9.350007479924649e-07,6.744677375521509e-06,,MB,1238.646784,1950.220288,0.0,1537.212416,1426.274304,s,10,0.8662442932128906,0.08662442932128907,0.000867037330216701,0.08639787292480469,0.08706428680419923,0.08806823806762695,0.08887139907836915,"[0.0890721893310547, 0.08665862274169922, 0.0863180160522461, 0.08613299560546875, 0.0868411865234375, 0.08647772979736328, 0.08669792175292969, 0.08598502349853515, 0.08601123046875, 0.08604937744140626]",tokens/s,2955.2864244623047,kWh,2.844231381674553e-06,3.136599179609319e-07,1.8415332963105627e-06,4.999424595946047e-06,tokens/kWh,51205892.81566248,MB,1263.628288,1960.706048,0.0,1547.698176,1426.276864,s,10,15.384216064453128,1.5384216064453127,0.00850671212334075,1.5353947143554687,1.5505861938476564,1.554587469482422,1.5577884899902346,"[1.53514013671875, 1.5585887451171876, 1.5334144287109375, 1.5309671630859376, 1.5394664306640624, 1.5348612060546876, 1.5356492919921876, 1.549697021484375, 1.5365870361328124, 1.5298446044921874]",tokens/s,40.951062918030786,kWh,4.515822755165431e-05,4.9806258934629e-06,1.952898380008958e-05,6.966783724520678e-05,tokens/kWh,904291.0256889662,,s,630,15.382199411392222,0.024416189541892397,0.000353078107425358,0.02430958366394043,0.02483679428100586,0.025054969692230224,0.025818413372039804,"[0.02462131118774414, 0.02461942481994629, 0.024478815078735353, 0.024521631240844728, 0.024909408569335937, 0.024606687545776367, 0.02437478446960449, 0.024620288848876952, 0.024499904632568358, 0.02444879913330078, 0.024373472213745116, 0.02436102485656738, 0.02426054382324219, 0.024201215744018553, 0.02426016044616699, 0.024273279190063477, 0.024341728210449217, 0.024441535949707032, 0.02476166343688965, 0.024513376235961913, 0.024440832138061523, 0.024471103668212892, 0.024245920181274413, 0.024453535079956054, 0.024394111633300783, 0.024163679122924806, 0.02472947120666504, 0.024154912948608397, 0.024181919097900392, 0.02428316879272461, 0.024197887420654297, 0.024655935287475585, 0.024339775085449218, 0.024230239868164062, 0.024319488525390624, 0.024292192459106444, 0.02427494430541992, 0.024221696853637696, 0.02421286392211914, 0.024219743728637694, 0.024434335708618166, 0.02432908821105957, 0.024274303436279298, 0.024334560394287108, 0.024529312133789064, 0.0243056640625, 0.024270847320556642, 0.024266111373901368, 0.024240768432617188, 0.02427903938293457, 0.02428108787536621, 0.02419660758972168, 0.024418176651000975, 0.024222335815429687, 0.02416640090942383, 0.024221696853637696, 0.02447577667236328, 0.02419251251220703, 0.024211839675903322, 0.024291296005249024, 0.024239360809326174, 0.024421152114868165, 0.024371200561523438, 0.02431590461730957, 0.02756153678894043, 0.025287103652954102, 0.02430156707763672, 0.024352767944335937, 0.024363008499145508, 0.02424336051940918, 0.02427996826171875, 0.024181856155395507, 0.02423484802246094, 0.024160255432128908, 0.02414182472229004, 0.024160255432128908, 0.024219648361206055, 0.025226335525512695, 0.025955039978027342, 0.025045183181762694, 0.02611942481994629, 0.02522915267944336, 0.024998815536499023, 0.02493619155883789, 0.02493788719177246, 0.025184352874755858, 0.02498227119445801, 0.025069664001464844, 0.024994752883911134, 0.025318368911743164, 0.024993728637695313, 0.024915647506713868, 0.026132255554199218, 0.024731679916381834, 0.02467616081237793, 0.02479692840576172, 0.024638463973999023, 0.024774784088134764, 0.02468547248840332, 0.024658912658691405, 0.024793088912963866, 0.024411712646484375, 0.024287935256958007, 0.02418675231933594, 0.02430758476257324, 0.025411231994628906, 0.025536863327026368, 0.024372896194458007, 0.02477065658569336, 0.02482815933227539, 0.024319999694824217, 0.02424831962585449, 0.025165824890136718, 0.024231935501098634, 0.024247711181640624, 0.02425651168823242, 0.024179296493530275, 0.024207359313964845, 0.024305120468139648, 0.024301151275634765, 0.025168256759643556, 0.024855104446411133, 0.02447529602050781, 0.024299264907836914, 0.02427350425720215, 0.024141984939575194, 0.02429244804382324, 0.024193023681640623, 0.02414396858215332, 0.024116031646728514, 0.024073312759399414, 0.02410998344421387, 0.024200576782226563, 0.025361183166503907, 0.02422153663635254, 0.024255712509155272, 0.024269376754760742, 0.02427926445007324, 0.024360960006713867, 0.02427289581298828, 0.02411955261230469, 0.024604415893554686, 0.024169504165649416, 0.024115968704223632, 0.02428335952758789, 0.024172544479370117, 0.02414339256286621, 0.02407676887512207, 0.024106367111206055, 0.024836736679077147, 0.02429475212097168, 0.02443484878540039, 0.02437084770202637, 0.024298336029052733, 0.024358911514282225, 0.024270847320556642, 0.024217599868774413, 0.024310911178588867, 0.02413983917236328, 0.025124767303466796, 0.024263071060180663, 0.02467843246459961, 0.024279520034790038, 0.02429439926147461, 0.02435286331176758, 0.02434092712402344, 0.0243143367767334, 0.02427395248413086, 0.024203807830810546, 0.024190656661987303, 0.024160480499267577, 0.02411369514465332, 0.024247455596923827, 0.024131616592407226, 0.02425324821472168, 0.024244224548339844, 0.024318111419677733, 0.02438332748413086, 0.02428223991394043, 0.024490623474121093, 0.02448585510253906, 0.02494438362121582, 0.02450486373901367, 0.02429747200012207, 0.024311296463012694, 0.024257024765014647, 0.024878271102905275, 0.0243558406829834, 0.024749887466430663, 0.02471164894104004, 0.02440825653076172, 0.024309280395507813, 0.02421366310119629, 0.024322303771972656, 0.024258560180664062, 0.024212543487548827, 0.0242259521484375, 0.02430851173400879, 0.024217599868774413, 0.02431590461730957, 0.024193023681640623, 0.024192832946777345, 0.024238367080688477, 0.02423184013366699, 0.024209407806396483, 0.02438547134399414, 0.024229951858520508, 0.02423209571838379, 0.02421846389770508, 0.024218624114990234, 0.024156160354614258, 0.024229888916015626, 0.024345760345458985, 0.024306528091430663, 0.024428543090820314, 0.024440128326416014, 0.02463132858276367, 0.024421024322509765, 0.02431590461730957, 0.024442880630493165, 0.02450227165222168, 0.02410086441040039, 0.024233983993530273, 0.024360383987426758, 0.024175167083740234, 0.024250368118286132, 0.024229888916015626, 0.02434048080444336, 0.02498150444030762, 0.02428019142150879, 0.02432294464111328, 0.024236032485961914, 0.024218975067138673, 0.02423644828796387, 0.024176895141601564, 0.024176063537597655, 0.024123199462890627, 0.02416716766357422, 0.024481311798095703, 0.02424880027770996, 0.024403968811035157, 0.02425651168823242, 0.02430739212036133, 0.024291807174682618, 0.02440380859375, 0.02435686492919922, 0.024211456298828125, 0.024215551376342775, 0.024263839721679687, 0.024212320327758788, 0.024270847320556642, 0.02415772819519043, 0.02452118492126465, 0.024328191757202147, 0.024901439666748047, 0.024305120468139648, 0.024345312118530273, 0.024256383895874024, 0.024479488372802734, 0.024304000854492188, 0.02424831962585449, 0.02426214408874512, 0.024385440826416017, 0.02461961555480957, 0.024372703552246095, 0.024321599960327147, 0.024931583404541015, 0.024315616607666016, 0.024294559478759765, 0.024220415115356445, 0.024256607055664063, 0.024540576934814453, 0.024291936874389648, 0.024292863845825196, 0.02436479949951172, 0.024262912750244142, 0.02436147117614746, 0.024272735595703126, 0.024260768890380858, 0.024241567611694336, 0.024144479751586914, 0.024313407897949217, 0.02415763282775879, 0.024254623413085936, 0.025643135070800783, 0.025116416931152345, 0.024398176193237305, 0.024243871688842775, 0.024275936126708985, 0.024244447708129883, 0.024306848526000978, 0.02418649673461914, 0.024224767684936522, 0.02431385612487793, 0.02759065628051758, 0.025028608322143556, 0.024355968475341796, 0.02452569580078125, 0.02429465675354004, 0.024254528045654297, 0.024357568740844725, 0.024385536193847656, 0.02414182472229004, 0.02427801513671875, 0.024171520233154296, 0.02411644744873047, 0.0241560001373291, 0.02510745620727539, 0.024315744400024413, 0.024239936828613282, 0.024888896942138673, 0.024265567779541017, 0.02408639907836914, 0.024190975189208985, 0.024129535675048826, 0.024590335845947265, 0.024363008499145508, 0.024302911758422852, 0.024228544235229493, 0.02413792037963867, 0.024305471420288084, 0.02447769546508789, 0.024147743225097655, 0.025022687911987303, 0.024336383819580077, 0.024240127563476564, 0.024221696853637696, 0.024163328170776367, 0.024175615310668946, 0.02421683120727539, 0.02423583984375, 0.02429420852661133, 0.0242806396484375, 0.02436534309387207, 0.024209535598754883, 0.024117408752441408, 0.02405311965942383, 0.024274688720703125, 0.024263551712036133, 0.024417375564575194, 0.024425376892089845, 0.024652992248535156, 0.02441094398498535, 0.024172000885009766, 0.02473628807067871, 0.024313343048095702, 0.02437715148925781, 0.024182975769042967, 0.024227935791015624, 0.024314271926879884, 0.025191936492919922, 0.024483583450317384, 0.02431667137145996, 0.024795135498046874, 0.024432640075683593, 0.02469273567199707, 0.024363103866577147, 0.024326047897338866, 0.02449135971069336, 0.024302175521850586, 0.02434668731689453, 0.024401920318603516, 0.024304895401000975, 0.024316640853881837, 0.024272096633911132, 0.024232255935668946, 0.02439833641052246, 0.024260255813598634, 0.024744287490844726, 0.024240127563476564, 0.024299104690551757, 0.024373056411743164, 0.024415935516357422, 0.024496768951416014, 0.024338848114013673, 0.024239999771118164, 0.02422332763671875, 0.024109504699707032, 0.024416160583496094, 0.02432828712463379, 0.024174591064453126, 0.02409267234802246, 0.024952064514160155, 0.024264671325683593, 0.024199968338012696, 0.02420547294616699, 0.02414985656738281, 0.024203264236450195, 0.02455766487121582, 0.02423209571838379, 0.024315872192382813, 0.024166175842285156, 0.024227487564086915, 0.024142175674438476, 0.024260608673095704, 0.02438764762878418, 0.024292415618896484, 0.024302175521850586, 0.024413759231567383, 0.024464096069335937, 0.02448793601989746, 0.024247968673706054, 0.02416419219970703, 0.02446793556213379, 0.024342208862304687, 0.02450262451171875, 0.024078336715698243, 0.024200864791870117, 0.024334688186645508, 0.02446335983276367, 0.024193023681640623, 0.024894527435302734, 0.025880895614624023, 0.0242346248626709, 0.024383039474487306, 0.024305343627929688, 0.024478464126586913, 0.02424831962585449, 0.02449203109741211, 0.024354688644409178, 0.02492153549194336, 0.024383615493774415, 0.024232511520385743, 0.025104352951049805, 0.02433635139465332, 0.024227903366088866, 0.024236032485961914, 0.024329280853271483, 0.024457696914672852, 0.02446134376525879, 0.024352575302124025, 0.02416499137878418, 0.024215551376342775, 0.024671295166015624, 0.024252927780151368, 0.024277439117431642, 0.02431795120239258, 0.024276992797851563, 0.024219648361206055, 0.02428838348388672, 0.024218496322631837, 0.024442560195922853, 0.024262304306030272, 0.02430838394165039, 0.024215551376342775, 0.024258560180664062, 0.024403007507324218, 0.024311967849731445, 0.02442524719238281, 0.024358911514282225, 0.024293376922607423, 0.024160255432128908, 0.024552959442138672, 0.025466367721557616, 0.024549856185913085, 0.024441375732421874, 0.024336383819580077, 0.024331552505493164, 0.024255199432373045, 0.024288511276245116, 0.0244453125, 0.025133440017700195, 0.024187967300415038, 0.02431657600402832, 0.024258079528808593, 0.024392448425292968, 0.02566543960571289, 0.024553504943847657, 0.02456928062438965, 0.024240768432617188, 0.024565727233886718, 0.024328512191772463, 0.024336095809936523, 0.02421283149719238, 0.02448201560974121, 0.024267200469970704, 0.024090784072875977, 0.0242806396484375, 0.0244913272857666, 0.024507072448730467, 0.02435273551940918, 0.024475967407226563, 0.02468659210205078, 0.02467430305480957, 0.025266176223754884, 0.025001983642578125, 0.02482585525512695, 0.02489472007751465, 0.024933120727539063, 0.025062976837158205, 0.024969856262207032, 0.02482537651062012, 0.02460700798034668, 0.025429471969604493, 0.02516227149963379, 0.026161151885986327, 0.02475529670715332, 0.025039775848388672, 0.024635391235351564, 0.024226911544799806, 0.024183712005615234, 0.025044384002685546, 0.024746591567993165, 0.024543231964111328, 0.02483731269836426, 0.024503103256225588, 0.02446099281311035, 0.024742080688476564, 0.024356319427490235, 0.02443052864074707, 0.02427766418457031, 0.02454105567932129, 0.024331775665283203, 0.02424083137512207, 0.024321407318115235, 0.024233695983886718, 0.02422006416320801, 0.024302335739135743, 0.024254207611083985, 0.024257856369018553, 0.024441535949707032, 0.024444255828857422, 0.024299711227416993, 0.024318431854248045, 0.024315040588378908, 0.024269023895263673, 0.024367040634155273, 0.024448991775512696, 0.024465696334838867, 0.024367551803588867, 0.02435820770263672, 0.024312511444091797, 0.024427616119384765, 0.02441923141479492, 0.024321599960327147, 0.02434707260131836, 0.02424831962585449, 0.02431795120239258, 0.024379392623901368, 0.025091615676879883, 0.024992223739624023, 0.02447769546508789, 0.024381439208984376, 0.02430975914001465, 0.024314943313598632, 0.024707487106323242, 0.024407968521118165, 0.02435545539855957, 0.024474943161010742, 0.02448863983154297, 0.024375295639038085, 0.024180736541748047, 0.024198144912719727, 0.02425075149536133, 0.024287872314453125, 0.024401887893676758, 0.024333951950073242, 0.024273311614990235, 0.024293376922607423, 0.024461311340332033, 0.02423961639404297, 0.024378944396972656, 0.024336416244506835, 0.024325023651123046, 0.02428009605407715, 0.024325056076049803, 0.024264736175537108, 0.024457599639892576, 0.024231935501098634, 0.024210655212402343, 0.02408723258972168, 0.024708255767822266, 0.02449625587463379, 0.02434886360168457, 0.024199199676513673, 0.024179296493530275, 0.024205312728881836, 0.024491071701049805, 0.024258752822875977, 0.02413030433654785, 0.02432204818725586, 0.024483232498168944, 0.0240666561126709, 0.024162303924560546, 0.024231935501098634, 0.024252416610717774, 0.02449612808227539, 0.024397663116455078, 0.024135263442993164, 0.02428371238708496, 0.024178688049316405, 0.024260608673095704, 0.024294879913330077, 0.02430940818786621, 0.024164735794067384, 0.024169151306152343, 0.024145727157592774, 0.024242176055908202, 0.024180160522460938, 0.024150592803955078, 0.024313215255737305, 0.02423852729797363, 0.02415225601196289, 0.024233983993530273, 0.024236032485961914, 0.024309152603149413, 0.024267360687255858, 0.024298528671264648, 0.02434681510925293, 0.024264863967895508, 0.024336191177368165, 0.024099647521972658, 0.024206815719604494, 0.024192575454711915, 0.024192127227783203, 0.024102176666259766, 0.02415443229675293, 0.024107263565063475, 0.02498953628540039, 0.024430431365966798, 0.02417695999145508, 0.02418195152282715, 0.02442255973815918, 0.02432863998413086, 0.02422118377685547, 0.024330976486206055, 0.0241213436126709, 0.02489753532409668, 0.024233503341674803, 0.024313631057739257]",tokens/s,40.95643172675396,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 614, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 142.12 MiB is free. Process 340724 has 14.60 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.53 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 219, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.50 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.30 GiB is free. Process 342721 has 13.44 GiB memory in use. Of the allocated memory 13.33 GiB is allocated by PyTorch, and 1.86 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 350571 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 614, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 367, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 274, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 30.12 MiB is free. Process 339211 has 14.71 GiB memory in use. Of the allocated memory 14.37 GiB is allocated by PyTorch, and 229.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1032, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 331967 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,908.148736,9644.670976,0.0,9242.148864,8603.568128,s,1,7.34512890625,7.34512890625,0.0,7.34512890625,7.34512890625,7.34512890625,7.34512890625,[7.34512890625],,kWh,8.172709504196974e-06,8.942146916242757e-07,2.7525022019969647e-06,1.1819426397818214e-05,,MB,1237.389312,9894.232064,0.0,9481.224192,8972.092416,s,10,6.525970458984375,0.6525970458984375,0.004022121570438176,0.6538811340332031,0.6549237060546874,0.6551947937011718,0.6554116638183594,"[0.6409494018554688, 0.6526307983398437, 0.6517198486328125, 0.653466796875, 0.6535595092773437, 0.6548634643554687, 0.6544267578125, 0.6554658813476563, 0.6546852416992187, 0.6542027587890625]",tokens/s,392.2788213783009,kWh,1.9058075928649032e-05,2.101775203775548e-06,1.2662076101874892e-05,3.3821927234299474e-05,tokens/kWh,7569054.188620731,MB,1262.432256,9898.426368,0.0,9485.418496,8972.094976,s,10,25.03521923828125,2.503521923828125,0.010845116854443029,2.5069698486328127,2.5124940917968748,2.5158095458984375,2.5184619091796874,"[2.519125, 2.50672314453125, 2.487076904296875, 2.508948974609375, 2.510953369140625, 2.51175732421875, 2.483512451171875, 2.494474853515625, 2.507216552734375, 2.5054306640625]",tokens/s,25.164548950171348,kWh,7.294248712843832e-05,8.045844480247627e-06,4.838058384332364e-05,0.00012936891545200957,tokens/kWh,486979.42453858134,,s,630,25.031987205505356,0.0397333130246117,0.000568347995712648,0.03970408058166504,0.0401421630859375,0.04053649387359619,0.04196115474700928,"[0.04130374526977539, 0.0398584976196289, 0.039726367950439455, 0.04014147186279297, 0.039653438568115235, 0.039755775451660154, 0.0397946891784668, 0.039788703918457034, 0.03985916900634766, 0.03968851089477539, 0.03946144104003906, 0.04046368026733398, 0.03967251205444336, 0.03971072006225586, 0.03975161743164062, 0.03958585739135742, 0.039659008026123044, 0.03953100967407226, 0.03976169586181641, 0.03962492752075195, 0.04186703872680664, 0.03978659057617188, 0.03999654388427734, 0.0394700813293457, 0.039512065887451174, 0.04028211212158203, 0.04053718566894531, 0.03959475326538086, 0.04332287979125977, 0.04000425720214844, 0.03984326553344727, 0.0395880012512207, 0.039817630767822264, 0.040027198791503904, 0.039714847564697266, 0.03971939086914063, 0.03963520050048828, 0.04084345626831055, 0.04048012924194336, 0.03951875305175781, 0.039645278930664066, 0.039948287963867186, 0.03983359909057617, 0.039717918395996095, 0.03962569427490235, 0.04000153732299805, 0.04090038299560547, 0.040022144317626955, 0.04070137786865234, 0.03986048126220703, 0.03973980712890625, 0.03994966506958008, 0.03991769790649414, 0.04052547073364258, 0.03969500732421875, 0.03965756988525391, 0.03962073516845703, 0.03980287933349609, 0.04002396774291992, 0.04020649719238281, 0.0397468147277832, 0.03965776062011719, 0.03965500640869141, 0.04098137664794922, 0.03966566467285156, 0.040336990356445314, 0.03939779281616211, 0.04005478286743164, 0.03991878509521484, 0.03953251266479492, 0.03921564865112305, 0.03940383911132812, 0.03938947296142578, 0.039834785461425784, 0.03929145431518555, 0.03986636734008789, 0.03943552017211914, 0.039381759643554684, 0.03996057510375976, 0.03958560180664063, 0.0393054084777832, 0.039703807830810546, 0.040233726501464846, 0.039757823944091795, 0.03958915328979492, 0.03976192092895508, 0.0402125129699707, 0.03991622543334961, 0.03958784103393555, 0.03981414413452149, 0.03939142227172852, 0.039602272033691405, 0.039750144958496096, 0.039776481628417966, 0.03961430358886719, 0.03952860641479492, 0.03931676864624024, 0.03956150436401367, 0.039782848358154294, 0.039772159576416014, 0.039808158874511716, 0.03979123306274414, 0.039618305206298825, 0.039987678527832034, 0.03974553680419922, 0.039684097290039064, 0.039857566833496096, 0.03962278366088867, 0.0401080322265625, 0.0397869758605957, 0.039815166473388675, 0.03974911880493164, 0.03974195098876953, 0.0398131217956543, 0.039759521484375, 0.04010019302368164, 0.039994430541992185, 0.039768417358398436, 0.04067184066772461, 0.04007686233520508, 0.039860641479492184, 0.039833087921142575, 0.0397092170715332, 0.03977830505371094, 0.0397652473449707, 0.0406822395324707, 0.041135231018066404, 0.04008214569091797, 0.03983171081542969, 0.03965695953369141, 0.03980883026123047, 0.03956806564331055, 0.03934572982788086, 0.0397108154296875, 0.039802593231201173, 0.03968796920776367, 0.03966592025756836, 0.03992348861694336, 0.03931820678710937, 0.03957551956176758, 0.03955660629272461, 0.039826080322265624, 0.039913471221923826, 0.03985177612304688, 0.04005628967285156, 0.039803680419921876, 0.03972678375244141, 0.03957177734375, 0.03972476959228516, 0.039713054656982424, 0.039723007202148435, 0.0397127685546875, 0.039733505249023436, 0.03993574523925781, 0.03994937515258789, 0.03965840148925781, 0.03968767929077149, 0.03955091094970703, 0.03962044906616211, 0.040250110626220706, 0.03995548629760742, 0.0396789436340332, 0.04024895858764648, 0.039541118621826174, 0.03955712127685547, 0.039436286926269534, 0.040005630493164065, 0.03923551940917969, 0.03927017593383789, 0.039301406860351565, 0.039172096252441405, 0.039053375244140626, 0.03849619293212891, 0.03863488006591797, 0.038609535217285156, 0.040188961029052735, 0.03842272186279297, 0.03851343917846679, 0.038553726196289065, 0.038664287567138675, 0.03860454559326172, 0.038627361297607424, 0.03870896148681641, 0.038842529296875, 0.039212799072265624, 0.03954726409912109, 0.03898777770996094, 0.03851216125488281, 0.03858227157592774, 0.04104803085327149, 0.03896115112304688, 0.03909836959838867, 0.03936614227294922, 0.0393097915649414, 0.040036384582519534, 0.039485088348388674, 0.03915731048583984, 0.038897472381591795, 0.039269344329833984, 0.0392806396484375, 0.04002406311035156, 0.0393994255065918, 0.03929292678833008, 0.039366241455078124, 0.03934249496459961, 0.039327743530273435, 0.039501823425292966, 0.038988990783691405, 0.0394238395690918, 0.039201278686523434, 0.03974009704589844, 0.03952777481079101, 0.03958419036865234, 0.03962060928344727, 0.039841022491455075, 0.03955379104614258, 0.03949977493286133, 0.039519649505615234, 0.03951267242431641, 0.03955017471313477, 0.03966646575927734, 0.03995033645629883, 0.04009312057495117, 0.0431662712097168, 0.03995852661132813, 0.039839744567871094, 0.03952227020263672, 0.03987612915039063, 0.03986428833007812, 0.04238390350341797, 0.03995423889160156, 0.04097043228149414, 0.03970435333251953, 0.0398172492980957, 0.039800670623779295, 0.039626625061035155, 0.0396923828125, 0.03956364822387695, 0.03976544189453125, 0.03977395248413086, 0.04014163208007813, 0.03987046432495117, 0.03979673767089844, 0.03981926345825195, 0.03975987243652344, 0.04007731246948242, 0.039989246368408206, 0.039858177185058595, 0.040288257598876956, 0.04197580718994141, 0.04003430557250977, 0.040271873474121096, 0.04109107208251953, 0.03978854370117187, 0.03994009780883789, 0.0393994255065918, 0.03967951965332031, 0.03959344100952149, 0.03966844940185547, 0.039540897369384764, 0.03958182525634766, 0.039657470703125, 0.039546497344970705, 0.039472766876220707, 0.039588607788085935, 0.03977996826171875, 0.03961056137084961, 0.03982355117797851, 0.040204288482666016, 0.039847934722900394, 0.039717952728271486, 0.03977043151855469, 0.039819679260253905, 0.03998537445068359, 0.0400445442199707, 0.039924766540527346, 0.03964207839965821, 0.0398267822265625, 0.03973596954345703, 0.040169471740722655, 0.03993971252441406, 0.03978815841674805, 0.03978316879272461, 0.03993804931640625, 0.0404398078918457, 0.03995647811889649, 0.03966361618041992, 0.03978649520874023, 0.039815166473388675, 0.04014694213867188, 0.03997491073608399, 0.03971395111083984, 0.039930721282958985, 0.039839744567871094, 0.040275966644287106, 0.04008755111694336, 0.039702529907226565, 0.039583744049072264, 0.03967385482788086, 0.039959712982177736, 0.03982422256469727, 0.039602081298828126, 0.03989507293701172, 0.03974697494506836, 0.040204959869384764, 0.03983769607543945, 0.0400076789855957, 0.03973324966430664, 0.039946239471435545, 0.03988896179199219, 0.03975718307495117, 0.039936065673828125, 0.04024166488647461, 0.039763614654541014, 0.039793121337890626, 0.041121025085449216, 0.039850048065185543, 0.0397083854675293, 0.04001603317260742, 0.03989382553100586, 0.03992166519165039, 0.039537696838378905, 0.039481983184814454, 0.040535648345947264, 0.039701313018798826, 0.039693920135498044, 0.03962297439575195, 0.03963679885864258, 0.03963926315307617, 0.03988803100585937, 0.03958051300048828, 0.03956854248046875, 0.039990337371826175, 0.04375260925292969, 0.03985062408447266, 0.04025139236450195, 0.03988889694213867, 0.039937343597412106, 0.040075904846191404, 0.04000931167602539, 0.03975584030151367, 0.03968451309204102, 0.039213054656982424, 0.039675647735595704, 0.03989120101928711, 0.0397823371887207, 0.03956537628173828, 0.03958348846435547, 0.03990758514404297, 0.03971686553955078, 0.040013824462890625, 0.03954278564453125, 0.039628673553466796, 0.039747711181640624, 0.03995369720458984, 0.03968073654174804, 0.03965542221069336, 0.04038601684570312, 0.0396864013671875, 0.039532833099365235, 0.039657470703125, 0.03946406555175781, 0.03957030487060547, 0.039684097290039064, 0.03954064178466797, 0.039395423889160154, 0.0396759033203125, 0.039765182495117186, 0.04002470397949219, 0.039741439819335936, 0.03962496185302734, 0.039954368591308596, 0.039634334564208985, 0.04003286361694336, 0.03960985565185547, 0.03994060897827149, 0.03958780670166016, 0.04074908828735352, 0.040901214599609374, 0.03964255905151367, 0.03971513748168945, 0.03967001724243164, 0.0400711669921875, 0.039620223999023436, 0.03990524673461914, 0.03982912063598633, 0.039801631927490234, 0.039608318328857424, 0.03961254501342774, 0.03972060775756836, 0.03972937774658203, 0.039839744567871094, 0.04018380737304687, 0.04001587295532227, 0.03961193466186524, 0.03964896011352539, 0.04022761535644531, 0.039896255493164064, 0.03982828903198242, 0.039605728149414064, 0.039614974975585936, 0.03901033782958984, 0.039858177185058595, 0.0392806396484375, 0.03912268829345703, 0.03938739013671875, 0.03957680130004883, 0.03959040069580078, 0.03934143829345703, 0.03901295852661133, 0.038926078796386716, 0.03891891098022461, 0.03922310256958008, 0.03877478408813476, 0.038340606689453126, 0.038174240112304685, 0.03838819122314453, 0.03835055923461914, 0.03840438461303711, 0.038332416534423826, 0.03828736114501953, 0.038308895111083985, 0.038214622497558595, 0.038416000366210935, 0.03841791915893555, 0.04303756713867188, 0.03922275161743164, 0.03909891128540039, 0.039239070892333985, 0.03915836715698242, 0.038973438262939454, 0.03926153564453125, 0.0419252815246582, 0.04201238250732422, 0.03912323379516602, 0.03916511917114258, 0.039300960540771486, 0.039130081176757814, 0.039155712127685545, 0.038778881072998046, 0.03872358322143555, 0.04123065567016602, 0.03948524856567383, 0.03936928176879883, 0.038991039276123046, 0.039336097717285155, 0.039276351928710936, 0.03937571334838867, 0.03875993728637695, 0.03917465591430664, 0.038988929748535156, 0.039046016693115235, 0.03957158279418945, 0.03945171356201172, 0.03904595184326172, 0.0394851188659668, 0.03896352005004883, 0.0391572151184082, 0.039174686431884764, 0.039565345764160154, 0.03978598403930664, 0.03964976119995117, 0.03967967987060547, 0.039803424835205076, 0.03968560028076172, 0.039327552795410156, 0.03938105773925781, 0.03951836776733399, 0.03938662338256836, 0.039377025604248043, 0.03992009735107422, 0.039569793701171876, 0.039612350463867185, 0.04054188919067383, 0.03927059173583984, 0.03935641479492188, 0.039569408416748046, 0.039384544372558596, 0.03974780654907226, 0.03969260787963867, 0.03968000030517578, 0.04013663864135742, 0.03961248016357422, 0.039669761657714846, 0.03959996795654297, 0.03946294403076172, 0.03934998321533203, 0.040441665649414066, 0.039548831939697264, 0.03957206344604492, 0.039726272583007816, 0.03953311920166016, 0.04129359817504883, 0.03973510360717773, 0.039702720642089843, 0.03985055923461914, 0.039750816345214844, 0.039947105407714845, 0.03964495849609375, 0.03959830474853516, 0.039484607696533204, 0.039712799072265624, 0.0394796142578125, 0.03988147354125977, 0.04152608108520508, 0.040048255920410156, 0.0395494384765625, 0.03973459243774414, 0.0397558708190918, 0.03960825729370117, 0.03971699142456055, 0.039564640045166015, 0.0398427848815918, 0.03973129653930664, 0.03993180847167969, 0.04012441635131836, 0.03963904190063477, 0.039929855346679685, 0.03955641555786133, 0.03963974380493164, 0.03997647857666015, 0.03984000015258789, 0.03987020874023438, 0.03950947189331055, 0.03989932632446289, 0.039903488159179684, 0.04117561721801758, 0.039941600799560543, 0.039518688201904295, 0.039607872009277345, 0.0395494384765625, 0.03950374221801758, 0.03968179321289062, 0.03948796844482422, 0.039706527709960936, 0.039583744049072264, 0.03967795181274414, 0.039761470794677733, 0.039766048431396486, 0.04005100631713867, 0.04041532897949219, 0.03977561569213867, 0.03978060913085937, 0.03967567825317383, 0.03983011245727539, 0.0396544303894043, 0.039612991333007816, 0.0395463981628418, 0.039830142974853516, 0.03953689575195313, 0.03957350540161133, 0.039669727325439455, 0.03979267120361328, 0.03957350540161133, 0.039686145782470705, 0.039685504913330075, 0.039811710357666015, 0.039782398223876955, 0.03984569549560547, 0.03953641510009766, 0.039578014373779294, 0.03967795181274414, 0.039581024169921875, 0.03975775909423828, 0.03984867095947266, 0.039964672088623046, 0.03991551971435547, 0.04110720062255859, 0.03954278564453125, 0.03948774337768555, 0.0396055679321289, 0.039745471954345704, 0.039868961334228514, 0.03953481674194336, 0.039685760498046875, 0.03972748947143555, 0.03974697494506836, 0.03976595306396485, 0.03945334243774414, 0.03940966415405273, 0.03944038391113281, 0.039486656188964846, 0.039811614990234376, 0.03944857788085938, 0.039285022735595705, 0.03993395233154297, 0.03956918334960938, 0.03978812789916992, 0.0400574722290039, 0.03958784103393555, 0.039589473724365234, 0.03963740921020508, 0.039362655639648435, 0.03971062469482422, 0.0395552978515625, 0.039739166259765625, 0.039681121826171874, 0.039535518646240234, 0.039739105224609376, 0.0403642578125, 0.039892574310302735, 0.040202720642089844, 0.04016307067871094, 0.039702110290527344, 0.039419937133789065, 0.04070159912109375, 0.039375839233398435, 0.03949091339111328, 0.03986095809936523, 0.03949094390869141, 0.03995852661132813, 0.03971311950683594, 0.03969440078735351, 0.039567520141601566, 0.03954073715209961, 0.03958528137207031, 0.03952076721191406, 0.03985404968261719, 0.03974662399291992, 0.03949667358398438, 0.039755233764648436, 0.039653919219970704, 0.03965951919555664, 0.040005630493164065, 0.04017475128173828, 0.03976892852783203, 0.039626750946044925, 0.0416993293762207, 0.03990521621704102, 0.039852096557617185]",tokens/s,25.167798098803836,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 217, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 353028 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,908.025856,9644.670976,0.0,9242.148864,8603.568128,s,1,7.269357421875,7.269357421875,0.0,7.269357421875,7.269357421875,7.269357421875,7.269357421875,[7.269357421875],,kWh,7.93315690413389e-06,8.678519208594412e-07,4.118892183990752e-06,1.2919901008984083e-05,,MB,1357.39392,9896.329216,0.0,9481.224192,8972.092416,s,10,1.0594597549438478,0.10594597549438478,0.002209518405138895,0.10647923278808594,0.10758931198120118,0.10799595375061036,0.1083212671661377,"[0.09987356567382813, 0.1049052505493164, 0.10662521362304687, 0.10654803466796875, 0.10840259552001953, 0.1074989471435547, 0.1059940185546875, 0.10609283447265624, 0.10641043090820312, 0.1071088638305664]",tokens/s,2416.325856696352,kWh,3.1881384401715522e-06,3.5141630463014385e-07,2.1117378005000336e-06,5.65129254530173e-06,tokens/kWh,45299371.41775268,MB,1389.932544,9902.620672,0.0,9485.418496,8972.094976,s,10,26.619530029296875,2.6619530029296876,0.012659738058700366,2.6645010986328126,2.6783451171875,2.6787874267578125,2.6791412744140626,"[2.65330419921875, 2.66928564453125, 2.639783203125, 2.64390576171875, 2.668478759765625, 2.678246826171875, 2.658293701171875, 2.6679638671875, 2.679229736328125, 2.661038330078125]",tokens/s,23.666834061556898,kWh,7.7780948989827e-05,8.579331646471572e-06,5.169208302029968e-05,0.00013805236365659827,tokens/kWh,456348.5791283581,,s,630,26.616984779357928,0.04224918218945701,0.000744671521537597,0.04209475135803223,0.04267039489746094,0.04298309478759765,0.04492313083648682,"[0.0424010238647461, 0.04241648101806641, 0.0423853759765625, 0.042447296142578125, 0.042396671295166014, 0.0421426887512207, 0.042399166107177734, 0.04314748764038086, 0.042056064605712894, 0.0419409294128418, 0.041869022369384765, 0.042217056274414064, 0.04222848129272461, 0.04231987380981445, 0.042278911590576174, 0.04229497528076172, 0.041943359375, 0.041878654479980466, 0.04237196731567383, 0.041864990234375, 0.042024608612060546, 0.04209920120239258, 0.04203558349609375, 0.041900863647460936, 0.04183504104614258, 0.04178956985473633, 0.04183465576171875, 0.04192671966552734, 0.04183564758300781, 0.04236943817138672, 0.041968097686767576, 0.04224176025390625, 0.04221366500854492, 0.04209868621826172, 0.041985279083251954, 0.04201548767089844, 0.04261478424072265, 0.04207964706420898, 0.041998943328857424, 0.042008575439453126, 0.04207321548461914, 0.041836830139160154, 0.04188425445556641, 0.04198998260498047, 0.04225584030151367, 0.04225708770751953, 0.0421212158203125, 0.04195328140258789, 0.041957374572753905, 0.04202073669433594, 0.042000511169433596, 0.042033153533935545, 0.04206985473632813, 0.04193904113769531, 0.04182227325439453, 0.042141185760498044, 0.04214835357666016, 0.04204748916625976, 0.04211260986328125, 0.04211475372314453, 0.04197040176391602, 0.04262092971801758, 0.04183039855957031, 0.04274499130249024, 0.042051681518554686, 0.042004383087158204, 0.0419474868774414, 0.04210121536254883, 0.04177920150756836, 0.04202473449707031, 0.04210201644897461, 0.04199481582641602, 0.042243553161621095, 0.04410787200927734, 0.04214828872680664, 0.042045856475830076, 0.04206735992431641, 0.04208009719848633, 0.041796382904052735, 0.043296737670898436, 0.042036479949951175, 0.04202899169921875, 0.04184966278076172, 0.04178524780273438, 0.04190012741088867, 0.04205136108398438, 0.04198012924194336, 0.04204127883911133, 0.04169529724121094, 0.04190412902832031, 0.04189382553100586, 0.042086463928222656, 0.041867134094238284, 0.04188131332397461, 0.04165264129638672, 0.04429209518432617, 0.04252057647705078, 0.04214137649536133, 0.04205814361572266, 0.041882720947265625, 0.04213139343261719, 0.04190911865234375, 0.04251987075805664, 0.043987648010253906, 0.04234230422973633, 0.04212287902832031, 0.04239548873901367, 0.04230323028564453, 0.04206681442260742, 0.041924606323242186, 0.04219289779663086, 0.0422127685546875, 0.04196412658691406, 0.04218675231933594, 0.042251838684082034, 0.042084449768066405, 0.04203334426879883, 0.042070175170898436, 0.042060798645019534, 0.0516126708984375, 0.04270284652709961, 0.043302913665771485, 0.04231305694580078, 0.042309310913085936, 0.042064865112304686, 0.041910015106201175, 0.044463905334472656, 0.04198214340209961, 0.04176095962524414, 0.04175558471679688, 0.04178134536743164, 0.041820350646972655, 0.0418326416015625, 0.041806240081787106, 0.041600799560546874, 0.041754016876220705, 0.04158547210693359, 0.04163174438476563, 0.041653919219970706, 0.041703166961669924, 0.041562721252441405, 0.042012001037597654, 0.04174095916748047, 0.041799678802490234, 0.041729568481445316, 0.04169075012207031, 0.04154009628295898, 0.04172793579101562, 0.04159529495239258, 0.04165824127197266, 0.041758846282958985, 0.04208969497680664, 0.04186806488037109, 0.04181401443481445, 0.041744384765625, 0.04173619079589844, 0.041783008575439456, 0.04232150268554687, 0.04206639862060547, 0.042162078857421875, 0.04204553604125977, 0.04192217636108399, 0.04189654541015625, 0.041936641693115236, 0.041842784881591794, 0.04204723358154297, 0.04232003021240234, 0.04190758514404297, 0.04212211227416992, 0.041789119720458984, 0.04170988845825195, 0.041881057739257814, 0.04158518218994141, 0.04185238265991211, 0.04171011352539063, 0.041931873321533204, 0.04199856185913086, 0.04190041732788086, 0.0416723518371582, 0.041773086547851564, 0.04180841445922852, 0.041942432403564454, 0.04209120178222656, 0.04232806396484375, 0.04201219177246094, 0.04197951889038086, 0.042104736328125, 0.042048446655273436, 0.041850208282470706, 0.043057121276855466, 0.042205184936523435, 0.04197273635864258, 0.041716064453125, 0.04179830551147461, 0.04169318389892578, 0.041904319763183595, 0.04168431854248047, 0.04176035308837891, 0.041970558166503906, 0.04178732681274414, 0.04161891174316406, 0.04186582565307617, 0.042028545379638675, 0.04224256134033203, 0.042339393615722656, 0.042144287109375, 0.041947551727294925, 0.0420208625793457, 0.04202627182006836, 0.041925342559814456, 0.04187910461425781, 0.042146240234375, 0.042038753509521486, 0.04185961532592773, 0.04203513717651367, 0.041825729370117186, 0.04221811294555664, 0.04190371322631836, 0.04217273712158203, 0.04208224105834961, 0.04199756622314453, 0.0422305908203125, 0.04213555145263672, 0.0418573112487793, 0.04180972671508789, 0.04162508773803711, 0.041740798950195314, 0.04170137786865234, 0.04193417739868164, 0.04187827301025391, 0.041731422424316406, 0.04189158248901367, 0.04183529663085937, 0.041961185455322264, 0.04192902374267578, 0.04201260757446289, 0.04192179107666016, 0.04216083145141602, 0.04191651153564453, 0.041907905578613285, 0.04220044708251953, 0.04201161575317383, 0.04190774536132812, 0.04186569595336914, 0.04196352005004883, 0.04201635360717773, 0.04212348937988281, 0.041953472137451174, 0.04200191879272461, 0.041931262969970705, 0.04179692840576172, 0.04184889602661133, 0.042815807342529294, 0.0419323844909668, 0.04172227096557617, 0.041718975067138675, 0.0421855354309082, 0.04208163070678711, 0.04180854415893555, 0.042202720642089846, 0.04181798553466797, 0.04205839920043945, 0.04169279861450195, 0.041677055358886717, 0.04308582305908203, 0.04175222396850586, 0.04198950576782227, 0.04201776123046875, 0.041907264709472654, 0.041874366760253905, 0.041737598419189455, 0.041812255859375, 0.04160137557983398, 0.041717758178710936, 0.041777153015136716, 0.042213375091552735, 0.041992191314697266, 0.04176486587524414, 0.04165631866455078, 0.0419708480834961, 0.04181078338623047, 0.04192256164550781, 0.041774654388427736, 0.042842559814453125, 0.041903617858886716, 0.04183910369873047, 0.04192588806152344, 0.042054401397705075, 0.041869312286376956, 0.041831775665283205, 0.042068641662597654, 0.04225833511352539, 0.04233840179443359, 0.04213145446777344, 0.04225580978393555, 0.04234297561645508, 0.04218675231933594, 0.042512222290039064, 0.04291600036621094, 0.04293017578125, 0.04297510528564453, 0.0425753288269043, 0.04264134216308594, 0.05218716812133789, 0.04298963165283203, 0.04272208023071289, 0.04273955154418945, 0.04249190521240234, 0.042469375610351565, 0.04253696060180664, 0.04252467346191406, 0.042794239044189455, 0.04274441528320312, 0.042663616180419923, 0.04284163284301758, 0.0431328010559082, 0.04222166442871094, 0.04206796646118164, 0.04188979339599609, 0.0419997444152832, 0.04193548965454102, 0.042412033081054686, 0.04593868637084961, 0.042412033081054686, 0.04244889450073242, 0.04229529571533203, 0.041998336791992184, 0.04238921737670898, 0.04192448043823242, 0.042651966094970704, 0.045405792236328124, 0.042528865814208984, 0.042291614532470705, 0.042061408996582034, 0.04222355270385742, 0.04233059310913086, 0.04227686309814453, 0.04483686447143555, 0.042240001678466796, 0.04186111831665039, 0.04210483169555664, 0.04173209762573242, 0.041770145416259764, 0.042079200744628904, 0.04202620697021484, 0.041921184539794924, 0.04568585586547851, 0.042496929168701174, 0.043394081115722655, 0.042078369140625, 0.0421541748046875, 0.042068286895751955, 0.041854465484619144, 0.04248364639282227, 0.04224019241333008, 0.04233270263671875, 0.042493568420410154, 0.042424385070800784, 0.04241043090820312, 0.04244617462158203, 0.04270969772338867, 0.042403839111328126, 0.042323070526123045, 0.04256003189086914, 0.04237276840209961, 0.04241683197021484, 0.042446849822998046, 0.04224204635620117, 0.04223123168945313, 0.04226899337768555, 0.04278012847900391, 0.042192703247070314, 0.042408000946044924, 0.042361217498779295, 0.04266038513183594, 0.04261017608642578, 0.04251084899902344, 0.0425164794921875, 0.04306668853759766, 0.04252947235107422, 0.042544990539550784, 0.04272556686401367, 0.04234636688232422, 0.04248934555053711, 0.042579647064208984, 0.04239865493774414, 0.042253376007080075, 0.042240062713623044, 0.04230838394165039, 0.04218479919433594, 0.041992191314697266, 0.04240399932861328, 0.0426341438293457, 0.042931137084960935, 0.04220099258422851, 0.04261283111572266, 0.04269670486450195, 0.04243891143798828, 0.04248953628540039, 0.04219894409179688, 0.04223139190673828, 0.04198169708251953, 0.04182099151611328, 0.04189952087402344, 0.041912734985351564, 0.04218889617919922, 0.041883167266845704, 0.04200908660888672, 0.04183846282958984, 0.042006271362304684, 0.04195920181274414, 0.04192448043823242, 0.04207276916503906, 0.042081600189208986, 0.042019519805908206, 0.04208025741577148, 0.042262527465820314, 0.04210892868041992, 0.04188470458984375, 0.042130401611328125, 0.04186316680908203, 0.041918430328369144, 0.041735198974609374, 0.041761505126953126, 0.041669952392578126, 0.04198089599609375, 0.04197929763793945, 0.0418985595703125, 0.04185500717163086, 0.04202652740478516, 0.04198608016967773, 0.042823486328125, 0.0425432014465332, 0.04221807861328125, 0.04220451354980469, 0.042089054107666016, 0.042014015197753905, 0.04190483093261719, 0.04203260803222656, 0.0421258544921875, 0.042819263458251954, 0.04312819290161133, 0.04256796646118164, 0.042415679931640624, 0.042441505432128906, 0.042428417205810545, 0.042641407012939454, 0.04262911987304688, 0.042307422637939456, 0.04214799880981445, 0.04227231979370117, 0.04266630554199219, 0.0423138542175293, 0.042608638763427735, 0.04224204635620117, 0.04223385620117188, 0.04184883117675781, 0.04213145446777344, 0.04202678298950195, 0.04336563110351563, 0.04271203231811523, 0.04250400161743164, 0.04243270492553711, 0.042546688079833986, 0.04261939239501953, 0.04227065658569336, 0.04207417678833008, 0.042172416687011716, 0.04181564712524414, 0.04201308822631836, 0.04254515075683594, 0.041844833374023435, 0.0417729606628418, 0.04188703918457031, 0.04184953689575195, 0.042081729888916015, 0.04205132675170899, 0.04204217529296875, 0.041760768890380856, 0.04177852630615234, 0.04217059326171875, 0.04216617584228516, 0.04216675186157227, 0.041998401641845706, 0.041799678802490234, 0.04249375915527344, 0.0421828498840332, 0.04226867294311523, 0.042409984588623044, 0.04237516784667969, 0.04276838302612305, 0.0421396484375, 0.04257177734375, 0.043335105895996096, 0.042487743377685544, 0.04249059295654297, 0.042616382598876956, 0.04257321548461914, 0.04279801559448242, 0.04258575820922852, 0.042791038513183596, 0.04244044876098633, 0.04246371078491211, 0.04241779327392578, 0.043149600982666014, 0.04241196823120117, 0.04244940948486328, 0.04236835098266602, 0.04239632034301758, 0.042624191284179686, 0.042402015686035154, 0.042742591857910156, 0.042083873748779296, 0.04261299133300781, 0.04239974212646484, 0.04227481460571289, 0.0421580810546875, 0.04233232116699219, 0.04199612808227539, 0.04245836639404297, 0.04236947250366211, 0.04262259292602539, 0.04247187042236328, 0.042559326171875, 0.042508705139160156, 0.04272742462158203, 0.04274111938476562, 0.04258643341064453, 0.04254547119140625, 0.04328012847900391, 0.044415199279785156, 0.042731552124023436, 0.04264755249023437, 0.042769630432128905, 0.042595104217529295, 0.042890270233154296, 0.04221596908569336, 0.04222166442871094, 0.04197564697265625, 0.04206985473632813, 0.042035873413085935, 0.04236288070678711, 0.042493473052978514, 0.04258617782592773, 0.04239756774902344, 0.04226102447509766, 0.04219823837280273, 0.04202166366577149, 0.04197916793823242, 0.04217433547973633, 0.043070304870605466, 0.04332486343383789, 0.04236140823364258, 0.04244889450073242, 0.04220454406738281, 0.04202048110961914, 0.04199321746826172, 0.04217446517944336, 0.04585472106933594, 0.04264700698852539, 0.04237366485595703, 0.04250342559814453, 0.04225715255737305, 0.0422737922668457, 0.04223651123046875, 0.042610912322998046, 0.04230723190307617, 0.04495836639404297, 0.04281135940551758, 0.04264761734008789, 0.042691646575927736, 0.04358220672607422, 0.04407923126220703, 0.042668033599853515, 0.043030529022216796, 0.04219865417480469, 0.04217030334472656, 0.042129150390625, 0.041968318939208986, 0.04237315368652344, 0.04201878356933594, 0.042205184936523435, 0.042066177368164065, 0.04223769760131836, 0.042031105041503904, 0.04196694564819336, 0.04187612915039062, 0.042253822326660154, 0.042211841583251954, 0.042123264312744144, 0.042180606842041016, 0.04197135925292969, 0.04184099197387695, 0.041963489532470706, 0.041789409637451175, 0.04203939056396484, 0.04211062240600586, 0.04241641616821289, 0.0422256965637207, 0.04203724670410156, 0.042083744049072266, 0.04209519958496094, 0.042039295196533204, 0.041942047119140624, 0.04200751876831055, 0.04205158233642578, 0.04236048126220703, 0.04186966323852539, 0.042049537658691405, 0.0421163215637207, 0.042097633361816406, 0.04207302474975586, 0.04207500839233398, 0.04203724670410156, 0.04198166275024414, 0.04164182281494141, 0.042063518524169924, 0.04190227127075195, 0.04191206359863281, 0.04177187347412109, 0.04186111831665039, 0.04227648162841797, 0.04218304061889649, 0.042092544555664066, 0.042229759216308595, 0.042094303131103517, 0.04245113754272461, 0.04212131118774414, 0.041887680053710935, 0.04250764846801758]",tokens/s,23.669097203248185,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 614, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 142.12 MiB is free. Process 352201 has 14.60 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.53 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1032, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 332352 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,907.321344,9644.670976,0.0,9242.148864,8603.568128,s,1,7.255408203125,7.255408203125,0.0,7.255408203125,7.255408203125,7.255408203125,7.255408203125,[7.255408203125],,kWh,8.137915362537265e-06,8.901511703366888e-07,4.806114955985263e-06,1.3834181488859218e-05,,MB,1241.64096,9894.232064,0.0,9481.224192,8972.092416,s,10,6.565172546386718,0.6565172546386717,0.0037727107917042715,0.6578529052734374,0.6601906433105469,0.6601959991455078,0.6602002838134765,"[0.6466392211914063, 0.6547903442382812, 0.6577991333007812, 0.6549818115234375, 0.6556998291015625, 0.6580377807617187, 0.6579066772460938, 0.6602013549804687, 0.6589269409179688, 0.660189453125]",tokens/s,389.93643836656673,kWh,1.916432853671305e-05,2.1134719840393614e-06,1.2661676796000068e-05,3.3939477316752476e-05,tokens/kWh,7542838.612710125,MB,1266.83136,9898.426368,0.0,9485.418496,8972.094976,s,10,25.532588134765625,2.5532588134765626,0.013129707930949728,2.5529560546875,2.56809267578125,2.5723758056640627,2.575802309570313,"[2.541270751953125, 2.576658935546875, 2.548864013671875, 2.528070556640625, 2.567140869140625, 2.551242431640625, 2.54432421875, 2.554669677734375, 2.561695556640625, 2.558651123046875]",tokens/s,24.674349371663613,kWh,7.440672016495948e-05,8.205373494347653e-06,4.95059840491998e-05,0.00013211807770850692,tokens/kWh,476846.17497234076,,s,630,25.53009631347654,0.04052396240234375,0.0007552245071343777,0.04037017631530762,0.041170777130126957,0.04160150623321533,0.04399069892883302,"[0.04164601516723633, 0.040638591766357424, 0.040223873138427735, 0.04014339065551758, 0.04012271881103516, 0.04013900756835938, 0.039995006561279293, 0.04017164611816406, 0.04080640029907227, 0.04037836837768555, 0.04062822341918945, 0.040062721252441404, 0.040037921905517575, 0.04371926498413086, 0.040093822479248045, 0.03997241592407227, 0.040163776397705075, 0.03995427322387695, 0.03962076950073242, 0.04087542343139648, 0.041677406311035156, 0.04033468627929687, 0.0402193603515625, 0.040148479461669925, 0.04034336090087891, 0.04410156631469726, 0.040129215240478515, 0.039960609436035154, 0.04012995147705078, 0.04000831985473633, 0.03998668670654297, 0.03990163040161133, 0.04020163345336914, 0.04009203338623047, 0.04012464141845703, 0.04005478286743164, 0.040114177703857425, 0.040134464263916016, 0.04022496032714844, 0.039935455322265626, 0.04015478515625, 0.03977436828613281, 0.039813854217529296, 0.039897087097167966, 0.039914817810058595, 0.03988684844970703, 0.03997062301635742, 0.04043775939941406, 0.03991436767578125, 0.04008345413208008, 0.04286259078979492, 0.04016332626342774, 0.040013694763183595, 0.04007657623291016, 0.039854816436767575, 0.03975302505493164, 0.0397872314453125, 0.03982080078125, 0.04027248001098633, 0.03992764663696289, 0.03993123245239258, 0.04141139221191406, 0.04004643249511719, 0.04162691116333008, 0.04041116714477539, 0.04015801620483399, 0.04012851333618164, 0.040027454376220704, 0.039970977783203125, 0.03981289672851562, 0.040691776275634764, 0.04022524642944336, 0.040779903411865236, 0.040933502197265624, 0.04099683380126953, 0.04071219253540039, 0.04043571090698242, 0.040368000030517576, 0.04027769470214844, 0.0401064338684082, 0.04015923309326172, 0.039951744079589846, 0.03999193572998047, 0.0401506576538086, 0.03982169723510742, 0.044338848114013674, 0.041439582824707034, 0.041051841735839846, 0.0398482551574707, 0.03979228973388672, 0.0397022705078125, 0.040120929718017576, 0.04024886322021484, 0.03990371322631836, 0.03993190383911133, 0.03988479995727539, 0.040158912658691405, 0.0399117431640625, 0.039826465606689454, 0.04294915390014648, 0.04056927871704102, 0.040611328125, 0.04077817535400391, 0.04075321578979492, 0.04084531021118164, 0.040888320922851565, 0.04054227066040039, 0.040801536560058596, 0.04130633544921875, 0.041083358764648435, 0.0413573112487793, 0.04542031860351563, 0.04092655944824219, 0.0465313606262207, 0.04147209548950195, 0.0413040657043457, 0.04128153610229492, 0.041250911712646485, 0.041321441650390624, 0.04098345565795898, 0.041170303344726565, 0.042576446533203124, 0.04119955062866211, 0.04154790496826172, 0.04118505477905274, 0.04181836700439453, 0.04164326477050781, 0.04055526351928711, 0.04014694213867188, 0.04027772903442383, 0.040067359924316405, 0.040120319366455076, 0.03982745742797852, 0.039793697357177735, 0.039940414428710935, 0.040005664825439456, 0.04051769638061523, 0.04052211380004883, 0.04008073425292969, 0.0399450569152832, 0.040378078460693356, 0.04093366241455078, 0.0408985595703125, 0.040992767333984374, 0.04091305541992187, 0.045578079223632814, 0.04101456069946289, 0.040785728454589845, 0.04055337524414063, 0.04012851333618164, 0.04007731246948242, 0.0399854736328125, 0.03985347366333008, 0.04028444671630859, 0.04038860702514648, 0.040545886993408206, 0.040823135375976566, 0.04103379058837891, 0.04086783981323242, 0.040564735412597655, 0.0407204475402832, 0.04063382339477539, 0.04054473495483398, 0.04042924880981445, 0.0402537612915039, 0.04057292938232422, 0.04088217544555664, 0.04027536010742187, 0.040029918670654294, 0.03996761703491211, 0.04250944137573242, 0.04064547348022461, 0.03997216033935547, 0.040513919830322265, 0.03992777633666992, 0.039797119140625, 0.039860031127929685, 0.04010982513427734, 0.04019449615478515, 0.03993600082397461, 0.03999334335327148, 0.040066974639892575, 0.04029216003417969, 0.0400285758972168, 0.040161151885986325, 0.04050534439086914, 0.039984161376953126, 0.039946495056152345, 0.03984048080444336, 0.041895904541015626, 0.04069587326049805, 0.04035532760620117, 0.04006329727172851, 0.040315006256103514, 0.040670398712158204, 0.040581153869628905, 0.04049795150756836, 0.040486785888671876, 0.04049728012084961, 0.04024063873291016, 0.039919422149658206, 0.03996128082275391, 0.03982307052612305, 0.03986857604980469, 0.039800960540771486, 0.039981056213378906, 0.039972862243652346, 0.039982112884521484, 0.03981001663208008, 0.03982339096069336, 0.04001993560791016, 0.03998489761352539, 0.04007100677490234, 0.039777889251708984, 0.039979167938232425, 0.0398455696105957, 0.039846881866455075, 0.04006911849975586, 0.04022009658813477, 0.04034415817260742, 0.040419296264648436, 0.04055574417114258, 0.04064873504638672, 0.0405552978515625, 0.04089238357543945, 0.040120319366455076, 0.039890945434570314, 0.03988479995727539, 0.03988684844970703, 0.04042652893066406, 0.039809120178222655, 0.04024204635620117, 0.04010307312011719, 0.03996083068847656, 0.03985878372192383, 0.04064051055908203, 0.0399851188659668, 0.039863872528076175, 0.03987254333496094, 0.04007932662963867, 0.03987503814697266, 0.03978854370117187, 0.03997081756591797, 0.039954368591308596, 0.039873950958251955, 0.03990185546875, 0.03982950210571289, 0.039858177185058595, 0.0397209587097168, 0.03997475051879883, 0.0400549430847168, 0.039929409027099606, 0.04142256164550781, 0.040357566833496096, 0.03979324722290039, 0.03981107330322266, 0.040277183532714846, 0.040202495574951175, 0.04061036682128906, 0.04045363235473633, 0.04016793441772461, 0.04011318588256836, 0.04005775833129883, 0.04013808059692383, 0.04026867294311524, 0.04031584167480469, 0.04013119888305664, 0.040417057037353515, 0.04015907287597656, 0.04040323257446289, 0.04006335830688477, 0.040255104064941406, 0.04030860900878906, 0.0408724479675293, 0.040253440856933595, 0.04003392028808594, 0.040063232421875, 0.04021465682983399, 0.04028531265258789, 0.04045852661132812, 0.0405120964050293, 0.04193423843383789, 0.04139219284057617, 0.04165216064453125, 0.04144800186157226, 0.04150592041015625, 0.041382816314697264, 0.0413306884765625, 0.041256385803222655, 0.0412119026184082, 0.04171171188354492, 0.041306591033935545, 0.04117504119873047, 0.04071369552612305, 0.040683361053466795, 0.040534721374511716, 0.040425472259521485, 0.04048486328125, 0.043302913665771485, 0.04105955123901367, 0.041054878234863285, 0.0409986572265625, 0.04110521697998047, 0.04129644775390625, 0.041127166748046874, 0.040855743408203124, 0.04064230346679688, 0.04027577590942383, 0.04060825729370117, 0.040612350463867186, 0.040499198913574216, 0.04299980926513672, 0.040880126953125, 0.040581119537353515, 0.04043571090698242, 0.04187590408325195, 0.040995166778564456, 0.040987678527832035, 0.04077635192871094, 0.04055686569213867, 0.042262527465820314, 0.04080230331420898, 0.04179964828491211, 0.04059273529052734, 0.04050307083129883, 0.04050742340087891, 0.0405984001159668, 0.041181182861328124, 0.04067241668701172, 0.04051590347290039, 0.04075136184692383, 0.04057526397705078, 0.04028211212158203, 0.04050291061401367, 0.04035369491577148, 0.04112227249145508, 0.040306015014648436, 0.04038518524169922, 0.04031283187866211, 0.04019756698608398, 0.0402355842590332, 0.040177406311035155, 0.040358081817626956, 0.04017129516601563, 0.04016156768798828, 0.04008060836791992, 0.039929759979248046, 0.04025433731079102, 0.040097793579101565, 0.04025139236450195, 0.0404686393737793, 0.040167102813720705, 0.04061587142944336, 0.04026739120483398, 0.04038876724243164, 0.0402334098815918, 0.04006604766845703, 0.04017049789428711, 0.03994214248657227, 0.040011550903320314, 0.04109334564208984, 0.040341022491455075, 0.04010969543457031, 0.040119136810302734, 0.04010953521728516, 0.04008195114135742, 0.04042099380493164, 0.040462718963623044, 0.04031686401367188, 0.04042758560180664, 0.04060979080200195, 0.040347713470458985, 0.04069734573364258, 0.04025696182250976, 0.04020659255981445, 0.040398975372314454, 0.040693599700927736, 0.04084598541259766, 0.04149446487426758, 0.040664161682128906, 0.040201183319091796, 0.040253440856933595, 0.04020633697509766, 0.040269119262695316, 0.040315135955810544, 0.0401596794128418, 0.040081409454345705, 0.04033257675170898, 0.040739646911621095, 0.040273822784423825, 0.04004198455810547, 0.040097633361816404, 0.04005120086669922, 0.04016304016113281, 0.040169921875, 0.0400445442199707, 0.04007321548461914, 0.040769535064697264, 0.04008755111694336, 0.04004832077026367, 0.04102371215820313, 0.040078529357910155, 0.0401736946105957, 0.04045084762573242, 0.04038582229614258, 0.04060166549682617, 0.0403375358581543, 0.04021430587768555, 0.040231231689453126, 0.0401798095703125, 0.04027577590942383, 0.04011264038085938, 0.04008927917480469, 0.040726814270019535, 0.04033731079101562, 0.04066880035400391, 0.040290847778320316, 0.040228126525878906, 0.04006924819946289, 0.04050185775756836, 0.04038860702514648, 0.040027713775634764, 0.04028665542602539, 0.04015289688110352, 0.04011590576171875, 0.04062259292602539, 0.040202239990234374, 0.04021452713012695, 0.040689407348632814, 0.0407064323425293, 0.040822654724121096, 0.040564350128173825, 0.0412020492553711, 0.04059056091308594, 0.04055734252929687, 0.04069753646850586, 0.04050156784057617, 0.040369537353515624, 0.040823104858398435, 0.04049337768554687, 0.040531871795654296, 0.0422789421081543, 0.04090998458862305, 0.04109782409667969, 0.04071184158325195, 0.0405203857421875, 0.041592159271240235, 0.04042176055908203, 0.04487724685668945, 0.040607967376708985, 0.0405241928100586, 0.040401023864746095, 0.04023401641845703, 0.0402825927734375, 0.04069222259521484, 0.04040703964233398, 0.04063641738891602, 0.04037606430053711, 0.04034553527832031, 0.0404169921875, 0.040501216888427734, 0.04035385513305664, 0.04058579254150391, 0.04025548934936524, 0.0405667839050293, 0.04043775939941406, 0.040434719085693356, 0.04037936019897461, 0.04047372817993164, 0.04019219207763672, 0.04040902328491211, 0.04061014556884766, 0.04017747116088867, 0.04027043151855469, 0.040488639831542966, 0.04044012832641602, 0.04039436721801758, 0.04032902526855469, 0.04012908935546875, 0.040347137451171876, 0.039977470397949216, 0.0401448974609375, 0.04005263900756836, 0.040220767974853515, 0.04005295944213867, 0.04002547073364258, 0.04021903991699219, 0.040417278289794925, 0.04113353729248047, 0.04015353775024414, 0.04014083099365234, 0.040242366790771485, 0.04024409484863281, 0.04034764862060547, 0.0404213752746582, 0.04039907073974609, 0.04115420913696289, 0.041609153747558594, 0.040570911407470704, 0.040681121826171875, 0.04034620666503906, 0.04019731140136719, 0.04034764862060547, 0.04023164749145508, 0.042224193572998045, 0.040755199432373046, 0.040574977874755856, 0.04041932678222656, 0.04056291198730469, 0.0407529296875, 0.04056902313232422, 0.040597312927246096, 0.04047577667236328, 0.04021952056884766, 0.040474624633789064, 0.0402388801574707, 0.040224609375, 0.04050508880615234, 0.047248001098632815, 0.04351747131347656, 0.04143727874755859, 0.04087350463867188, 0.040573280334472654, 0.04033315277099609, 0.040323745727539065, 0.040515583038330076, 0.04030195236206055, 0.040421920776367186, 0.0407674560546875, 0.040745086669921875, 0.040699871063232425, 0.040759361267089844, 0.040471519470214844, 0.040635391235351564, 0.040503326416015624, 0.04063024139404297, 0.04039884948730469, 0.040486366271972656, 0.04065110397338867, 0.040468448638916014, 0.04041331100463867, 0.04032828903198242, 0.04025356674194336, 0.04034035110473633, 0.0403919677734375, 0.040119007110595704, 0.04076339340209961, 0.04017884826660156, 0.04030527877807617, 0.04030012893676758, 0.04037081527709961, 0.0405032958984375, 0.04022499084472656, 0.04059231948852539, 0.04019900894165039, 0.04013631820678711, 0.040451583862304685, 0.04020630264282227, 0.040493984222412106, 0.040642528533935546, 0.040564640045166016, 0.04070003128051758, 0.04029849624633789, 0.04048076629638672, 0.0404213752746582, 0.04022828674316406, 0.04015977478027344, 0.04153299331665039, 0.041826751708984374, 0.04099686431884766, 0.04040464019775391, 0.04028876876831055, 0.0404552001953125, 0.04040377426147461, 0.04019564819335938, 0.040231361389160156, 0.040325119018554685, 0.04038655853271484, 0.04039475250244141, 0.04037222290039062, 0.04036608123779297, 0.04026163101196289, 0.04024428939819336, 0.04030767822265625, 0.04025254440307617, 0.040340129852294924, 0.04031068801879883, 0.04040118408203125, 0.04033740615844727, 0.040306049346923827, 0.04036403274536133, 0.04038924789428711, 0.04056675338745117, 0.040505374908447266, 0.04063641738891602, 0.04046172714233399, 0.040403553009033207, 0.0402894401550293, 0.04034435272216797, 0.04028627014160156, 0.04050640106201172, 0.040422367095947265, 0.04045596694946289, 0.04090083312988281, 0.041003009796142575, 0.04122019195556641, 0.041032833099365236, 0.04106524658203125, 0.04136959838867187, 0.041073982238769534, 0.04092172622680664, 0.04094060897827148, 0.04068000030517578, 0.04112739181518555, 0.04209968185424805, 0.04054153442382812, 0.04046044921875, 0.040859233856201174, 0.04067327880859375, 0.04068649673461914, 0.040712158203125, 0.04060111999511719, 0.040644321441650394, 0.040338207244873046, 0.040376319885253906, 0.04045779037475586, 0.0407303352355957, 0.04047670364379883, 0.0404384651184082, 0.04040499114990234]",tokens/s,24.67675766923927,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 219, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 343481 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 217, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 536.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 452.12 MiB is free. Process 337701 has 14.30 GiB memory in use. Of the allocated memory 14.18 GiB is allocated by PyTorch, and 1.57 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1032, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 333101 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 217, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 340359 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 614, in __init__ self.self_attn = QWEN2_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 256.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 142.12 MiB is free. Process 339569 has 14.60 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 1.53 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 217, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 342334 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 219, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 345098 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1203, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 977, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 977, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 750, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 446, in __init__ super().__init__(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 349, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 348590 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,906.625024,8768.06144,0.0,8365.539328,8230.228992,s,1,7.27447021484375,7.27447021484375,0.0,7.27447021484375,7.27447021484375,7.27447021484375,7.27447021484375,[7.27447021484375],,kWh,6.585473474963995e-06,7.148839669203179e-07,2.7491688660008506e-06,1.0049526307885164e-05,,MB,1246.208,8958.902272,0.0,8545.8944,8499.29728,s,10,2.4514530944824218,0.24514530944824217,0.009671524999723603,0.24768851470947267,0.2503742416381836,0.2519477104187012,0.25320648544311525,"[0.21685848999023438, 0.2455242919921875, 0.2473954315185547, 0.24629408264160157, 0.2500245819091797, 0.24876515197753907, 0.2465208282470703, 0.25352117919921874, 0.24798159790039062, 0.24856745910644532]",tokens/s,1044.278597767948,kWh,6.6269450054648954e-06,7.308339062135562e-07,4.4069973527559465e-06,1.1764776264434398e-05,tokens/kWh,21759869.822080925,MB,1251.479552,8960.999424,0.0,8547.991552,8499.29984,s,10,18.656197265625003,1.8656197265625,0.0024404186331264946,1.8658623657226563,1.8684488403320312,1.8687800231933593,1.8690449694824218,"[1.8620548095703124, 1.8632591552734374, 1.8635701904296875, 1.86279248046875, 1.8662222900390626, 1.8691112060546875, 1.868375244140625, 1.8670894775390625, 1.86550244140625, 1.868219970703125]",tokens/s,33.76893967351038,kWh,5.511207011786845e-05,6.078299523874684e-06,3.6477007576641974e-05,9.766737721838511e-05,tokens/kWh,645046.5016494858,,s,630,18.65389499855042,0.029609357140556212,0.0004118933841714926,0.029562447547912596,0.02977725715637207,0.02989627151489258,0.032329371070861826,"[0.03175388717651367, 0.03019094467163086, 0.02975564765930176, 0.02946124839782715, 0.029361696243286134, 0.029338079452514647, 0.029328704833984375, 0.029299392700195312, 0.029335039138793945, 0.02930124855041504, 0.029310623168945314, 0.02933286476135254, 0.029287071228027345, 0.02939116859436035, 0.029396127700805665, 0.029342559814453124, 0.029360128402709962, 0.02940108871459961, 0.029337791442871092, 0.02934560012817383, 0.02938470458984375, 0.02937651252746582, 0.02936195182800293, 0.02941564750671387, 0.029470592498779296, 0.029450368881225587, 0.029478591918945314, 0.029440319061279297, 0.02950115203857422, 0.02940342330932617, 0.0294072322845459, 0.029552608489990233, 0.02968169593811035, 0.029665279388427734, 0.029739007949829102, 0.02965439987182617, 0.029690496444702147, 0.029666784286499024, 0.029690399169921874, 0.02959071922302246, 0.029637439727783203, 0.02960985565185547, 0.02956003189086914, 0.029573312759399416, 0.02951468849182129, 0.029544223785400392, 0.029534208297729493, 0.02953327941894531, 0.02952079963684082, 0.029518943786621094, 0.029514656066894532, 0.029551616668701174, 0.029680639266967773, 0.0295731201171875, 0.02961408042907715, 0.029896095275878908, 0.029634464263916017, 0.02959542465209961, 0.02961065673828125, 0.02956723213195801, 0.02957926368713379, 0.02963158416748047, 0.029596576690673827, 0.032168991088867185, 0.0304117431640625, 0.029722240447998045, 0.029429311752319335, 0.02930112075805664, 0.02933296012878418, 0.02931999969482422, 0.02928451156616211, 0.02933350372314453, 0.029288448333740235, 0.029294591903686523, 0.029345632553100586, 0.02935398483276367, 0.029358240127563478, 0.02939267158508301, 0.029300960540771484, 0.029353183746337892, 0.029360416412353516, 0.029344032287597657, 0.029327392578125, 0.029381984710693358, 0.029366304397583008, 0.029383487701416015, 0.02940108871459961, 0.02940083122253418, 0.029488544464111328, 0.029505695343017578, 0.029461055755615233, 0.029513856887817384, 0.02944819259643555, 0.029491199493408202, 0.02964422416687012, 0.029758016586303712, 0.02979612731933594, 0.02980601692199707, 0.029734880447387695, 0.0297457275390625, 0.029649023056030274, 0.02966316795349121, 0.029585023880004883, 0.029663423538208007, 0.02956048011779785, 0.02961027145385742, 0.02959404754638672, 0.029558687210083007, 0.02958140754699707, 0.029550592422485353, 0.029527711868286132, 0.02958064079284668, 0.029517023086547852, 0.029557567596435547, 0.029551519393920898, 0.029527711868286132, 0.029871999740600588, 0.02959779167175293, 0.029641151428222656, 0.029601791381835937, 0.029652896881103515, 0.02963055992126465, 0.029593376159667967, 0.029579359054565428, 0.02957904052734375, 0.029665632247924803, 0.032514270782470704, 0.03041676712036133, 0.029687936782836915, 0.029673471450805664, 0.029378559112548826, 0.029333568572998046, 0.029290431976318358, 0.029253631591796874, 0.02929971122741699, 0.029248512268066407, 0.029276159286499022, 0.02927743911743164, 0.029408000946044923, 0.029386144638061523, 0.02936793518066406, 0.029345951080322265, 0.02954287910461426, 0.029492576599121093, 0.02940025520324707, 0.029407039642333984, 0.029480960845947264, 0.02936627197265625, 0.029372095108032226, 0.029425439834594728, 0.02938934326171875, 0.029380607604980468, 0.029425664901733397, 0.029377695083618163, 0.029393184661865236, 0.02940979194641113, 0.029615488052368164, 0.029646656036376954, 0.029837984085083008, 0.029829343795776366, 0.029777183532714843, 0.029692352294921873, 0.029704479217529296, 0.029641855239868165, 0.029666175842285158, 0.02957926368713379, 0.029601696014404297, 0.029550176620483398, 0.029530624389648437, 0.029583232879638672, 0.029521888732910156, 0.02951593589782715, 0.02956492805480957, 0.029577215194702147, 0.029580703735351564, 0.029557184219360353, 0.029549760818481444, 0.0295533447265625, 0.029540479660034178, 0.029564895629882813, 0.02959174346923828, 0.029752351760864257, 0.02961097526550293, 0.02957926368713379, 0.029549983978271483, 0.02953481674194336, 0.02962588882446289, 0.029643232345581055, 0.029638656616210936, 0.03203164672851563, 0.03024233627319336, 0.029737407684326173, 0.029433408737182618, 0.029336032867431642, 0.029321216583251954, 0.029295679092407226, 0.029319616317749025, 0.029342208862304688, 0.0293253116607666, 0.02932713508605957, 0.02931046485900879, 0.029330144882202147, 0.029355392456054688, 0.029387039184570314, 0.02935024070739746, 0.02935398483276367, 0.029334911346435545, 0.029358720779418944, 0.02936422348022461, 0.029433855056762694, 0.029472768783569334, 0.029460031509399413, 0.029413631439208984, 0.029398719787597657, 0.02943846321105957, 0.029413440704345702, 0.029390783309936525, 0.029492448806762696, 0.029422304153442384, 0.029451488494873047, 0.029666143417358397, 0.029784000396728516, 0.02984761619567871, 0.02977292823791504, 0.02974604797363281, 0.029706239700317383, 0.029639680862426757, 0.02957606315612793, 0.029547903060913087, 0.029592191696166992, 0.02953023910522461, 0.029578399658203126, 0.029554624557495118, 0.029520448684692384, 0.029569568634033202, 0.029617984771728514, 0.029575168609619142, 0.029675615310668944, 0.029663135528564453, 0.029657024383544922, 0.029583263397216796, 0.029604000091552736, 0.029562335968017578, 0.029568864822387696, 0.029588159561157228, 0.029553728103637697, 0.029608896255493164, 0.029608959197998046, 0.029628543853759765, 0.02961440086364746, 0.02957779121398926, 0.029601791381835937, 0.03207993698120117, 0.030459072113037108, 0.029797183990478517, 0.029507583618164062, 0.02941043281555176, 0.029285087585449218, 0.029289663314819334, 0.029281471252441408, 0.029309919357299805, 0.029260704040527344, 0.02932521629333496, 0.02931711959838867, 0.029414527893066405, 0.02940812873840332, 0.02944000053405762, 0.029412479400634767, 0.02939369583129883, 0.029364320755004884, 0.029411327362060546, 0.029454336166381836, 0.029375551223754882, 0.029438911437988283, 0.02938470458984375, 0.029468448638916015, 0.029550527572631834, 0.029530399322509764, 0.029509632110595704, 0.029564863204956056, 0.029499263763427735, 0.02948860740661621, 0.02948281669616699, 0.029592479705810547, 0.02982815933227539, 0.029932479858398437, 0.029863712310791015, 0.02982524871826172, 0.029908992767333983, 0.029747200012207032, 0.029844703674316405, 0.02978691291809082, 0.02974444770812988, 0.02972480010986328, 0.0296507511138916, 0.029772256851196287, 0.029742464065551758, 0.029780895233154296, 0.029619712829589844, 0.029666847229003906, 0.02960505676269531, 0.029576992034912108, 0.029607936859130858, 0.029489152908325194, 0.029574495315551758, 0.029637279510498046, 0.02959769630432129, 0.029618175506591796, 0.02959756851196289, 0.029570783615112305, 0.029608287811279298, 0.029583423614501954, 0.029603168487548827, 0.029690528869628908, 0.029698047637939453, 0.03239487838745117, 0.030474592208862304, 0.029884416580200194, 0.029578496932983398, 0.029451007843017577, 0.029428735733032226, 0.029336191177368163, 0.029388160705566407, 0.029354784011840822, 0.029314943313598633, 0.02933328056335449, 0.029364799499511717, 0.02940825653076172, 0.02940771293640137, 0.029442592620849608, 0.029507583618164062, 0.029394271850585938, 0.02937923240661621, 0.029429088592529296, 0.02935862350463867, 0.02946236801147461, 0.02961609649658203, 0.029574848175048827, 0.0295897274017334, 0.029599584579467774, 0.029794368743896484, 0.029725183486938478, 0.029679424285888673, 0.029679807662963867, 0.029590591430664063, 0.029594560623168946, 0.029664575576782228, 0.02976201629638672, 0.029829343795776366, 0.02978793525695801, 0.029700319290161134, 0.02970966339111328, 0.02978985595703125, 0.02971664047241211, 0.029666240692138673, 0.02972662353515625, 0.02961574363708496, 0.029614048004150392, 0.029567136764526367, 0.029677824020385744, 0.0296362247467041, 0.029557279586791992, 0.029542240142822265, 0.02957529640197754, 0.02954617691040039, 0.029622528076171876, 0.029593536376953125, 0.029636383056640625, 0.029593183517456056, 0.02966912078857422, 0.029725120544433593, 0.029708736419677733, 0.02977791976928711, 0.0296627197265625, 0.02990336036682129, 0.02991542434692383, 0.02989641571044922, 0.029888511657714844, 0.032442558288574216, 0.0305, 0.0297762565612793, 0.029483232498168945, 0.029444000244140626, 0.02932975959777832, 0.02934752082824707, 0.02941152000427246, 0.029436031341552735, 0.029386335372924805, 0.029475231170654297, 0.029402528762817383, 0.02938252830505371, 0.029423999786376952, 0.029380960464477537, 0.02933875274658203, 0.029354719161987303, 0.029372575759887696, 0.02943417549133301, 0.029445472717285155, 0.029421920776367187, 0.029503231048583985, 0.029432064056396486, 0.029452192306518556, 0.029538528442382812, 0.029441919326782227, 0.029452287673950195, 0.029445472717285155, 0.029419296264648436, 0.029496192932128906, 0.029545568466186525, 0.029653568267822266, 0.02995814323425293, 0.02991494369506836, 0.030076831817626954, 0.029919008255004882, 0.029933439254760743, 0.02983011245727539, 0.029781951904296874, 0.029702207565307618, 0.0297205753326416, 0.029698047637939453, 0.0295731201171875, 0.029617151260375976, 0.029606271743774414, 0.029735551834106446, 0.029685760498046877, 0.029624319076538085, 0.029672992706298827, 0.029665536880493164, 0.029655263900756835, 0.029642591476440428, 0.029654624938964844, 0.02972275161743164, 0.029651391983032228, 0.030109344482421876, 0.029616479873657228, 0.02968979263305664, 0.02973689651489258, 0.029646976470947266, 0.02960588836669922, 0.029648895263671874, 0.029695743560791014, 0.03243209457397461, 0.03059097671508789, 0.02976473617553711, 0.029574272155761718, 0.029432735443115234, 0.0294050235748291, 0.029492223739624023, 0.02939904022216797, 0.029488224029541016, 0.02953625679016113, 0.029543327331542968, 0.029585407257080077, 0.02963372802734375, 0.029582143783569336, 0.02953353691101074, 0.02943574333190918, 0.02955961608886719, 0.02944812774658203, 0.029478975296020508, 0.029483007431030273, 0.029446144104003907, 0.029419519424438476, 0.029470720291137696, 0.029452320098876952, 0.029519071578979494, 0.02941414451599121, 0.029427679061889648, 0.02955824089050293, 0.029465152740478517, 0.029470720291137696, 0.029503488540649415, 0.029686912536621094, 0.029842016220092773, 0.029788448333740235, 0.02976358413696289, 0.029730400085449218, 0.029761951446533205, 0.02969558334350586, 0.029682304382324217, 0.029672895431518555, 0.029614559173583986, 0.029558656692504882, 0.029552064895629882, 0.02954911994934082, 0.02963599967956543, 0.02958633613586426, 0.029580320358276367, 0.029567968368530272, 0.029562559127807617, 0.029601791381835937, 0.029536191940307616, 0.029591615676879884, 0.029601247787475585, 0.029575712203979494, 0.02962544059753418, 0.02958198356628418, 0.029589759826660157, 0.029664480209350585, 0.029593727111816407, 0.029632448196411133, 0.029573856353759767, 0.029652992248535157, 0.029665056228637694, 0.032462753295898435, 0.03037139129638672, 0.029668256759643553, 0.02951372718811035, 0.02934169578552246, 0.029286399841308593, 0.029325504302978516, 0.02931488037109375, 0.02934566307067871, 0.029359296798706056, 0.029451200485229492, 0.029454111099243164, 0.029478464126586914, 0.029464351654052735, 0.029464544296264647, 0.029524896621704103, 0.029468416213989258, 0.029468511581420897, 0.029554431915283202, 0.029546688079833985, 0.029556703567504884, 0.029571807861328125, 0.029480192184448244, 0.029495840072631837, 0.029462528228759766, 0.02949068832397461, 0.02948761558532715, 0.029476863861083984, 0.02944630432128906, 0.02943734359741211, 0.02944380760192871, 0.029606624603271483, 0.029775583267211914, 0.029751392364501954, 0.02979859161376953, 0.0296975040435791, 0.029760032653808593, 0.029655040740966795, 0.029685312271118165, 0.02958585548400879, 0.029581279754638673, 0.029548608779907226, 0.029656511306762695, 0.02960848045349121, 0.029544223785400392, 0.02952009582519531, 0.02955673599243164, 0.02953215980529785, 0.029592863082885744, 0.029552831649780273, 0.02950147247314453, 0.029583871841430662, 0.02957926368713379, 0.02961164855957031, 0.02963478469848633, 0.029683040618896483, 0.029673376083374024, 0.02960272026062012, 0.029633855819702147, 0.029637407302856446, 0.02963017654418945, 0.029630495071411134, 0.02964614486694336, 0.032527713775634765, 0.030478111267089845, 0.029803359985351562, 0.02950553512573242, 0.029384639739990233, 0.02935001564025879, 0.029351423263549805, 0.02931295967102051, 0.029360639572143556, 0.0293253116607666, 0.03308544158935547, 0.02930803108215332, 0.029361024856567382, 0.029345792770385744, 0.029400928497314453, 0.02932547187805176, 0.02931609535217285, 0.02939593505859375, 0.029335039138793945, 0.02943235206604004, 0.029466623306274413, 0.02943791961669922, 0.029411359786987303, 0.029439584732055664, 0.02941788864135742, 0.029450239181518553, 0.029444223403930665, 0.02943561553955078, 0.029429920196533205, 0.029427135467529297, 0.0295614070892334, 0.029681663513183593, 0.029841407775878907, 0.029884416580200194, 0.029874176025390626, 0.029769279479980468, 0.029705823898315428, 0.029650976181030273, 0.029669376373291017, 0.029631488800048827, 0.02968966484069824, 0.029624319076538085, 0.02954854393005371, 0.029552064895629882, 0.029543039321899413, 0.029554624557495118, 0.029496511459350585, 0.02950432014465332, 0.029566976547241212, 0.02956492805480957, 0.02959347152709961, 0.029597824096679687, 0.029636608123779298, 0.029607936859130858, 0.029646623611450196, 0.029630687713623045, 0.02960335922241211, 0.02959014320373535, 0.029583200454711914, 0.02959292793273926, 0.02960860824584961, 0.029638656616210936, 0.029673471450805664]",tokens/s,33.773107442116356,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2024, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2982, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1104, in forward outputs = self.model( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 915, in forward layer_outputs = decoder_layer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 655, in forward hidden_states, self_attn_weights, present_key_value = self.self_attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 484, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,905.728,4682.809344,0.0,4280.287232,4115.121152,s,1,7.35367236328125,7.35367236328125,0.0,7.35367236328125,7.35367236328125,7.35367236328125,7.35367236328125,[7.35367236328125],,kWh,6.2510382666611495e-06,6.660010495222483e-07,1.9163904220187433e-06,8.833429738202141e-06,,MB,1261.318144,4988.993536,0.0,4575.985664,4408.410112,s,10,2.7949914855957028,0.27949914855957037,0.0020469865082645682,0.2793283996582031,0.2812862915039062,0.28277908630371096,0.2839733221435547,"[0.28427188110351564, 0.2767898864746094, 0.2785625915527344, 0.280115478515625, 0.28095455932617186, 0.27770108032226565, 0.27747503662109374, 0.27953765869140623, 0.28046417236328125, 0.279119140625]",tokens/s,915.9240781924532,kWh,8.482572096666748e-06,9.35084106441459e-07,5.624917198342778e-06,1.5042573401450986e-05,tokens/kWh,17018364.688538373,MB,1286.569984,4988.993536,0.0,4575.985664,4408.412672,s,10,16.283703247070314,1.6283703247070314,0.020045413005740236,1.6390998535156251,1.6465183715820313,1.6483929138183593,1.6498925476074218,"[1.6369945068359375, 1.6412052001953126, 1.646101806640625, 1.64560107421875, 1.6502674560546875, 1.64258740234375, 1.62020263671875, 1.5952696533203126, 1.6049317626953126, 1.600541748046875]",tokens/s,38.688988029387396,kWh,4.6824061024587236e-05,5.164924044528296e-06,2.9612027657855658e-05,8.160101272697119e-05,tokens/kWh,772049.241726836,,s,630,16.281280303955068,0.025843302069769966,0.0004951302317869193,0.02594003200531006,0.02622758331298828,0.026409839057922362,0.027212352924346924,"[0.02684137535095215, 0.026152704238891603, 0.025571584701538086, 0.025640512466430666, 0.025808895111083984, 0.026291807174682616, 0.025891679763793946, 0.025827327728271485, 0.025682207107543945, 0.02574633598327637, 0.025731584548950196, 0.025997087478637694, 0.025720415115356447, 0.02583033561706543, 0.02568191909790039, 0.025865856170654296, 0.025891199111938476, 0.02609766387939453, 0.026117504119873045, 0.025899295806884767, 0.02593212890625, 0.02589014434814453, 0.025854303359985353, 0.02577235221862793, 0.026056703567504884, 0.02599679946899414, 0.025872928619384766, 0.025876447677612303, 0.025882623672485353, 0.025757696151733397, 0.02575564765930176, 0.02597887992858887, 0.025821184158325194, 0.02637148857116699, 0.025958879470825195, 0.026075263977050782, 0.025906368255615233, 0.026139328002929688, 0.025875871658325195, 0.026921056747436525, 0.026415584564208984, 0.02602732849121094, 0.02603209686279297, 0.02597270393371582, 0.02606368064880371, 0.02598512077331543, 0.02595020866394043, 0.026142688751220704, 0.02596201515197754, 0.025903615951538086, 0.025925407409667967, 0.02591766357421875, 0.02628326416015625, 0.0261146240234375, 0.026050752639770507, 0.02593721580505371, 0.02600943946838379, 0.026108768463134764, 0.02591744041442871, 0.02589446449279785, 0.02613279914855957, 0.025962623596191406, 0.026015743255615235, 0.02676355171203613, 0.02647292709350586, 0.02610540771484375, 0.025999231338500975, 0.026819040298461914, 0.026365503311157227, 0.026040767669677733, 0.026031648635864258, 0.025917503356933595, 0.02601411247253418, 0.0259617919921875, 0.026019935607910157, 0.02588528060913086, 0.02590070343017578, 0.026089216232299806, 0.0262191047668457, 0.025940095901489258, 0.025880416870117186, 0.025791711807250976, 0.02588729667663574, 0.026124544143676758, 0.02593382453918457, 0.02591119956970215, 0.026046560287475585, 0.026064895629882814, 0.02599350357055664, 0.02603171157836914, 0.02591551971435547, 0.02607923126220703, 0.026189823150634766, 0.025874431610107423, 0.02598281669616699, 0.025870527267456055, 0.026076223373413084, 0.025961376190185546, 0.02609152030944824, 0.02592972755432129, 0.026599424362182617, 0.026103776931762697, 0.026113311767578126, 0.02635852813720703, 0.025981184005737304, 0.026072256088256834, 0.026269855499267577, 0.025868703842163086, 0.026001407623291017, 0.025978624343872072, 0.02596879959106445, 0.025886816024780275, 0.025773727416992187, 0.025927743911743163, 0.02594236755371094, 0.02589849662780762, 0.02601775932312012, 0.025982591629028322, 0.026000192642211914, 0.026097856521606445, 0.02600912094116211, 0.026018112182617188, 0.025862144470214843, 0.026038272857666016, 0.025963680267333984, 0.026032768249511718, 0.02659312057495117, 0.026046432495117188, 0.026990175247192383, 0.03084060859680176, 0.026021984100341795, 0.026112735748291014, 0.02590105628967285, 0.026019840240478515, 0.026047903060913084, 0.02603887939453125, 0.025980768203735353, 0.026148479461669923, 0.02626518440246582, 0.026303680419921874, 0.026103551864624024, 0.025907199859619142, 0.026286079406738282, 0.02602176094055176, 0.026034143447875975, 0.025969888687133787, 0.025932735443115234, 0.02596006393432617, 0.026082752227783203, 0.025977312088012697, 0.025967071533203125, 0.02593382453918457, 0.026081279754638673, 0.02598297691345215, 0.025998464584350588, 0.02599942398071289, 0.025958208084106444, 0.025881599426269532, 0.025881792068481447, 0.02604934310913086, 0.026005504608154296, 0.025939968109130858, 0.025987071990966795, 0.02592313575744629, 0.02596214485168457, 0.026012447357177733, 0.025956352233886718, 0.02593388748168945, 0.02603308868408203, 0.026036832809448244, 0.025894336700439453, 0.02596963119506836, 0.02588876724243164, 0.02614681625366211, 0.025957599639892578, 0.026101728439331055, 0.02586092758178711, 0.026175487518310548, 0.02594367980957031, 0.026102144241333006, 0.025903104782104492, 0.026543296813964844, 0.02607391929626465, 0.026224063873291015, 0.02604640007019043, 0.025946495056152343, 0.025929632186889647, 0.02606934356689453, 0.025972736358642577, 0.026687488555908204, 0.026177536010742186, 0.02608332824707031, 0.026045984268188476, 0.025853759765625, 0.026097440719604494, 0.025994112014770508, 0.02619385528564453, 0.02612361526489258, 0.02617353630065918, 0.026051071166992186, 0.026034080505371093, 0.02595574378967285, 0.02608620834350586, 0.026017311096191407, 0.026021408081054687, 0.025990079879760743, 0.025991167068481445, 0.027230207443237304, 0.028763935089111327, 0.02607049560546875, 0.025973503112792968, 0.02614035224914551, 0.025921056747436524, 0.02609984016418457, 0.025995935440063477, 0.02600720024108887, 0.026089279174804688, 0.025949760437011717, 0.026174432754516603, 0.02588467216491699, 0.025819040298461913, 0.025876575469970704, 0.026011648178100585, 0.025851839065551756, 0.025960512161254883, 0.02590924835205078, 0.026023935317993165, 0.025788511276245117, 0.026035680770874023, 0.025895200729370117, 0.026422687530517578, 0.02599398422241211, 0.026029056549072265, 0.026239999771118162, 0.02608064079284668, 0.02602009582519531, 0.026572864532470705, 0.026351200103759766, 0.025944063186645508, 0.0260283203125, 0.026064640045166017, 0.025924287796020507, 0.025968639373779297, 0.025861759185791016, 0.025901439666748047, 0.027066368103027344, 0.025959583282470704, 0.025971551895141602, 0.02593516731262207, 0.025977535247802733, 0.025970432281494142, 0.02601753616333008, 0.027279136657714843, 0.026013696670532226, 0.02586070442199707, 0.026060863494873045, 0.025937503814697265, 0.026826719284057617, 0.0262741756439209, 0.026136480331420898, 0.026484832763671876, 0.02612633514404297, 0.025923583984375, 0.026175615310668945, 0.02610367965698242, 0.026277887344360353, 0.026228736877441407, 0.026175487518310548, 0.026070079803466796, 0.02603718376159668, 0.026071039199829102, 0.026075071334838867, 0.02606035232543945, 0.02604921531677246, 0.025955135345458985, 0.026112255096435548, 0.02593459129333496, 0.02591948890686035, 0.025970367431640624, 0.02587091255187988, 0.026100479125976562, 0.02595123291015625, 0.0259289608001709, 0.025877248764038085, 0.026043903350830077, 0.026236640930175782, 0.02710812759399414, 0.026081279754638673, 0.026175199508666994, 0.02601807975769043, 0.02634307289123535, 0.026060319900512694, 0.026227455139160156, 0.026118207931518554, 0.026094688415527343, 0.02595033645629883, 0.026004159927368164, 0.02599888038635254, 0.027032127380371095, 0.02716864013671875, 0.02618288040161133, 0.025954975128173827, 0.02598080062866211, 0.026351999282836915, 0.025995071411132813, 0.026185792922973634, 0.02611827278137207, 0.026490560531616213, 0.026042688369750978, 0.026054655075073242, 0.025991167068481445, 0.026055904388427736, 0.02801744079589844, 0.026038272857666016, 0.026025983810424806, 0.02767660713195801, 0.026103424072265624, 0.02647436714172363, 0.026120607376098632, 0.026007583618164062, 0.025958816528320314, 0.025874303817749023, 0.025888128280639647, 0.025989919662475585, 0.026044031143188477, 0.025930047988891602, 0.026224000930786133, 0.025946815490722655, 0.025989120483398437, 0.026015743255615235, 0.0259451847076416, 0.026373023986816405, 0.026011648178100585, 0.025987071990966795, 0.026098848342895508, 0.026256223678588868, 0.026056703567504884, 0.026003456115722655, 0.02598297691345215, 0.026255359649658205, 0.026005504608154296, 0.026050559997558592, 0.025885759353637697, 0.025924543380737304, 0.02597068786621094, 0.02596019172668457, 0.02586444854736328, 0.02595840072631836, 0.026223871231079103, 0.026090208053588866, 0.025952287673950195, 0.02603001594543457, 0.02584172821044922, 0.02586419105529785, 0.02614476776123047, 0.02601590347290039, 0.025971551895141602, 0.026323551177978514, 0.02602025604248047, 0.026097280502319336, 0.026285663604736328, 0.026083423614501954, 0.025998207092285158, 0.025952064514160156, 0.026273792266845702, 0.02597887992858887, 0.025945791244506834, 0.02619599914550781, 0.026042495727539063, 0.02611203193664551, 0.02594326400756836, 0.025996192932128907, 0.02596575927734375, 0.02604729652404785, 0.026007551193237305, 0.02609766387939453, 0.02593791961669922, 0.025992576599121093, 0.02665007972717285, 0.026071008682250978, 0.02597667121887207, 0.026089632034301757, 0.026188352584838866, 0.026066944122314452, 0.026210304260253905, 0.025942272186279296, 0.025965984344482423, 0.02602448081970215, 0.02593484878540039, 0.02607187271118164, 0.026385984420776366, 0.02590764808654785, 0.026327039718627928, 0.026081279754638673, 0.025892831802368163, 0.02587552070617676, 0.025991296768188475, 0.02621731185913086, 0.026111360549926757, 0.025930368423461914, 0.025923072814941408, 0.025833343505859373, 0.025920127868652342, 0.025987071990966795, 0.02591334342956543, 0.025892255783081054, 0.025929887771606444, 0.026058528900146486, 0.02591948890686035, 0.02578019142150879, 0.025840320587158204, 0.025871456146240233, 0.025797536849975586, 0.02573516845703125, 0.025857088088989257, 0.025770015716552734, 0.02547599983215332, 0.025475200653076173, 0.02536025619506836, 0.025328767776489257, 0.025201440811157227, 0.025091232299804686, 0.025045536041259767, 0.0253853759765625, 0.025298303604125976, 0.02522175979614258, 0.0251146240234375, 0.02514739227294922, 0.025214975357055663, 0.025241472244262694, 0.025447616577148436, 0.025473215103149413, 0.025244415283203123, 0.0252227840423584, 0.025205120086669922, 0.025558752059936525, 0.025352479934692383, 0.025169919967651368, 0.02525388717651367, 0.025362239837646485, 0.02516371154785156, 0.026527616500854494, 0.025696895599365235, 0.025430015563964844, 0.02531328010559082, 0.025196128845214844, 0.025378591537475587, 0.025315071105957033, 0.025311296463012695, 0.026397504806518556, 0.025375839233398437, 0.02558412742614746, 0.025256351470947267, 0.025325567245483398, 0.02517398452758789, 0.025262111663818358, 0.025165824890136718, 0.025132991790771483, 0.02517919921875, 0.025358688354492186, 0.025225439071655274, 0.02520627212524414, 0.02526905632019043, 0.02513267135620117, 0.025330175399780275, 0.025419551849365233, 0.025259552001953126, 0.025223871231079102, 0.025247711181640625, 0.02515727996826172, 0.025364864349365236, 0.025189855575561523, 0.025202207565307617, 0.025332639694213867, 0.025409215927124022, 0.025561248779296875, 0.025405887603759766, 0.025339712142944337, 0.025134336471557616, 0.025254655838012695, 0.0255565128326416, 0.02523948860168457, 0.025487903594970704, 0.02531043243408203, 0.025172256469726564, 0.025214879989624024, 0.025320032119750976, 0.02527542304992676, 0.02523958396911621, 0.025174976348876953, 0.025204736709594725, 0.0251944637298584, 0.02520003128051758, 0.025172672271728515, 0.025284000396728516, 0.02514998435974121, 0.02543600082397461, 0.025181856155395508, 0.02520460891723633, 0.02521766471862793, 0.025225215911865235, 0.025157632827758788, 0.0252388801574707, 0.025166271209716796, 0.026402816772460938, 0.025707551956176758, 0.025844703674316405, 0.02529484748840332, 0.02525388717651367, 0.0253439998626709, 0.02531532859802246, 0.025194496154785157, 0.025126335144042968, 0.025227359771728516, 0.02518057632446289, 0.025133119583129884, 0.025188352584838865, 0.02523721694946289, 0.025096223831176757, 0.02554070472717285, 0.025136735916137694, 0.025047359466552736, 0.025106271743774413, 0.02515177536010742, 0.025106367111206056, 0.025120031356811522, 0.02509507179260254, 0.02679622459411621, 0.025143104553222655, 0.025456672668457032, 0.025232959747314453, 0.02518467140197754, 0.02507708740234375, 0.02559152030944824, 0.025174623489379884, 0.025403583526611328, 0.025168031692504884, 0.02575155258178711, 0.025384960174560548, 0.02530508804321289, 0.02527846336364746, 0.025269983291625976, 0.02854547119140625, 0.025859487533569335, 0.025893312454223633, 0.026068511962890624, 0.025987552642822265, 0.025786367416381836, 0.025915168762207032, 0.025845535278320314, 0.026296960830688478, 0.025802080154418944, 0.025629152297973634, 0.02553856086730957, 0.02549456024169922, 0.025414623260498048, 0.02517180824279785, 0.02523971176147461, 0.025192447662353516, 0.025411584854125976, 0.025243167877197267, 0.02522159957885742, 0.025259744644165038, 0.025071552276611328, 0.025262432098388674, 0.02521638488769531, 0.025282943725585937, 0.026314559936523436, 0.02559347152709961, 0.025415679931640626, 0.025395103454589844, 0.025399776458740236, 0.02528998374938965, 0.025252607345581053, 0.025393152236938478, 0.025796607971191408, 0.025380863189697265, 0.025208831787109375, 0.025536512374877928, 0.02532966423034668, 0.02525388717651367, 0.025292224884033203, 0.025463167190551757, 0.025305280685424803, 0.02535628890991211, 0.02528665542602539, 0.025653247833251954, 0.02541324806213379, 0.025244319915771484, 0.02520649528503418, 0.025499040603637696, 0.025352575302124022, 0.025387231826782226, 0.025210880279541017, 0.025413631439208984, 0.025251840591430662, 0.02530713653564453, 0.02530031967163086, 0.025383424758911134, 0.025395360946655274, 0.025636863708496094, 0.025167871475219726, 0.02532329559326172, 0.025159296035766603, 0.02523401641845703, 0.025185440063476564, 0.025359231948852538, 0.025130752563476563, 0.025267839431762695, 0.025303647994995116, 0.025298944473266603, 0.025325567245483398, 0.025247743606567383, 0.02519209671020508, 0.025186656951904297, 0.025226911544799804, 0.025377119064331054, 0.025212032318115234, 0.025792959213256837, 0.02510073661804199, 0.02527241516113281, 0.026357503890991212, 0.026554527282714843, 0.025329631805419923, 0.025285856246948242, 0.026348352432250977, 0.025608192443847655, 0.025106143951416016, 0.025323360443115235, 0.025135488510131837]",tokens/s,38.69474563661675,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 336168 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 219, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 286.12 MiB is free. Process 344252 has 14.46 GiB memory in use. Of the allocated memory 14.30 GiB is allocated by PyTorch, and 41.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,905.609216,1910.3744,0.0,1507.852288,1469.840384,s,1,7.4902998046875,7.4902998046875,0.0,7.4902998046875,7.4902998046875,7.4902998046875,7.4902998046875,[7.4902998046875],,kWh,5.198127316619623e-06,5.6625570156614e-07,2.0541683100061547e-06,7.818551328191917e-06,,MB,1357.627392,1952.31744,0.0,1537.212416,1426.274304,s,10,0.28543609619140625,0.028543609619140625,0.00014708506792359384,0.028536895751953124,0.02871155529022217,0.028764417552947997,0.028806707363128663,"[0.02869980812072754, 0.028581024169921875, 0.02839276885986328, 0.02854979133605957, 0.02852400016784668, 0.028450271606445313, 0.028372959136962892, 0.02868822479248047, 0.028359968185424803, 0.02881727981567383]",tokens/s,8968.732525977823,kWh,8.480818945531061e-07,9.351078392349782e-08,5.606091441391283e-07,1.5022018226157322e-06,tokens/kWh,170416515.3749022,MB,1390.497792,1964.900352,0.0,1547.698176,1426.276864,s,10,15.816748046874999,1.5816748046874998,0.0075148625056441955,1.5778760986328124,1.5931241943359375,1.5933386474609375,1.5935102099609375,"[1.57582568359375, 1.57372412109375, 1.588531005859375, 1.577494140625, 1.575567626953125, 1.5866553955078124, 1.578258056640625, 1.5740623779296874, 1.5935531005859376, 1.5930765380859375]",tokens/s,39.83119653502179,kWh,4.6850416000860814e-05,5.16725974547564e-06,2.0083018481860008e-05,7.210069422819645e-05,tokens/kWh,873777.9944338257,,s,630,15.814703456878645,0.0251027038998074,0.0003515752785932424,0.02501262378692627,0.025413993453979492,0.025632842636108398,0.02684667814254764,"[0.02542540740966797, 0.02522585678100586, 0.025183712005615234, 0.024955295562744142, 0.024928255081176756, 0.025024160385131836, 0.02494499206542969, 0.024936447143554686, 0.02527248001098633, 0.025048927307128908, 0.025143295288085937, 0.025432096481323243, 0.02509775924682617, 0.024987903594970703, 0.025093696594238282, 0.0251695671081543, 0.02498863983154297, 0.02494063949584961, 0.025678783416748046, 0.024922304153442383, 0.0249003849029541, 0.02491187286376953, 0.024877056121826172, 0.024925312042236327, 0.02481455993652344, 0.024825759887695312, 0.024860448837280273, 0.02485843276977539, 0.025086271286010743, 0.025107999801635743, 0.025016544342041015, 0.024995967864990233, 0.02505062484741211, 0.02502934455871582, 0.025083871841430665, 0.024963008880615235, 0.024872447967529295, 0.024935007095336914, 0.024922111511230468, 0.024964799880981447, 0.02494233512878418, 0.024858240127563477, 0.024959007263183595, 0.02485340881347656, 0.024825952529907228, 0.024879007339477538, 0.024870208740234375, 0.02485113525390625, 0.024813024520874024, 0.024832544326782228, 0.025053184509277345, 0.02525763130187988, 0.025180511474609375, 0.025020416259765626, 0.02490563201904297, 0.025130720138549806, 0.024950559616088868, 0.024944255828857422, 0.02558665657043457, 0.02491801643371582, 0.02482899284362793, 0.024789920806884767, 0.024978944778442383, 0.02507776069641113, 0.025030656814575194, 0.024943775177001953, 0.025043807983398437, 0.024893440246582032, 0.025798112869262695, 0.024910367965698243, 0.02489753532409668, 0.02491187286376953, 0.024874496459960937, 0.024994304656982422, 0.02488697624206543, 0.024840448379516603, 0.024937952041625976, 0.024883808135986327, 0.024943679809570313, 0.024945600509643555, 0.02485196876525879, 0.02493008041381836, 0.024840576171875, 0.02489583969116211, 0.024862592697143554, 0.024995967864990233, 0.02485798454284668, 0.024814207077026366, 0.024893440246582032, 0.02484396743774414, 0.024936128616333007, 0.02523980712890625, 0.02503868865966797, 0.024885791778564453, 0.025026559829711914, 0.024823999404907225, 0.02488096046447754, 0.025595903396606445, 0.025024511337280272, 0.02494259262084961, 0.02491529655456543, 0.025047712326049805, 0.024928096771240235, 0.024940704345703123, 0.024878271102905275, 0.025073984146118163, 0.024772224426269532, 0.024906400680541993, 0.025024736404418945, 0.02495078468322754, 0.024939872741699218, 0.024863391876220702, 0.024836095809936523, 0.02490777587890625, 0.024825983047485352, 0.025054719924926756, 0.024844383239746092, 0.025042623519897462, 0.025043327331542967, 0.025116895675659178, 0.025026559829711914, 0.025010175704956054, 0.02499500846862793, 0.024922367095947265, 0.025016895294189455, 0.025636255264282228, 0.02494259262084961, 0.024863136291503905, 0.025464832305908205, 0.024895456314086913, 0.025645088195800782, 0.025151327133178712, 0.025104543685913087, 0.025034751892089844, 0.024886655807495116, 0.025129600524902342, 0.02570444869995117, 0.025044992446899415, 0.024892864227294923, 0.02492406463623047, 0.024865440368652344, 0.025294527053833008, 0.02514262390136719, 0.02503104019165039, 0.025305055618286134, 0.024990079879760742, 0.02496940803527832, 0.02503891181945801, 0.025001983642578125, 0.025191648483276367, 0.025070240020751953, 0.024977535247802735, 0.025018016815185548, 0.024989984512329103, 0.02495699119567871, 0.025233184814453125, 0.02509436798095703, 0.02509004783630371, 0.025065376281738282, 0.025010112762451173, 0.02748624038696289, 0.02536460876464844, 0.028440736770629884, 0.025433887481689454, 0.025266239166259766, 0.02513100814819336, 0.02509823989868164, 0.025032384872436524, 0.02510860824584961, 0.025157983779907227, 0.025546112060546876, 0.02531551933288574, 0.025350624084472657, 0.025063232421875, 0.02501171112060547, 0.025047679901123047, 0.025696128845214845, 0.02497475242614746, 0.024989408493041994, 0.024969152450561524, 0.02495155143737793, 0.025643199920654298, 0.025025823593139648, 0.025012351989746093, 0.02505104064941406, 0.025043647766113283, 0.024948511123657226, 0.025198816299438476, 0.02494374465942383, 0.024975360870361327, 0.024913919448852538, 0.025046432495117187, 0.024975967407226563, 0.025008127212524413, 0.025061151504516602, 0.025096416473388672, 0.024961023330688475, 0.024949888229370117, 0.025367200851440428, 0.024828128814697266, 0.024900896072387695, 0.0248941764831543, 0.025628671646118165, 0.024895488739013674, 0.024888383865356446, 0.024888256072998046, 0.02488902473449707, 0.024922143936157225, 0.02502070426940918, 0.025454591751098633, 0.025079744338989258, 0.025231424331665038, 0.02510643196105957, 0.02495414352416992, 0.02492243194580078, 0.025715103149414064, 0.024997888565063478, 0.0249487361907959, 0.02484048080444336, 0.025433919906616212, 0.025021440505981447, 0.024883392333984376, 0.02541436767578125, 0.02489257621765137, 0.024990175247192385, 0.02495644760131836, 0.02495484733581543, 0.02592051124572754, 0.025057151794433595, 0.025052192687988282, 0.02488374328613281, 0.02490617561340332, 0.02491129684448242, 0.024936384201049804, 0.024940832138061524, 0.025049215316772462, 0.025028831481933595, 0.024995840072631836, 0.025010175704956054, 0.024862720489501954, 0.02498080062866211, 0.024840896606445312, 0.02512281608581543, 0.02498067283630371, 0.02493631935119629, 0.02490185546875, 0.024946975708007812, 0.024885696411132814, 0.024840192794799806, 0.025182207107543944, 0.02512076759338379, 0.025095296859741212, 0.024912256240844727, 0.024840192794799806, 0.025627904891967774, 0.02501260757446289, 0.02495692825317383, 0.025364864349365236, 0.025012224197387696, 0.024875200271606446, 0.024825664520263673, 0.02490982437133789, 0.02492624092102051, 0.02488265609741211, 0.024776191711425782, 0.025179391860961915, 0.0249562873840332, 0.025421632766723632, 0.024963647842407226, 0.025016319274902343, 0.025175519943237305, 0.025076255798339844, 0.02492006492614746, 0.024960351943969727, 0.025156255722045898, 0.02524883270263672, 0.025031391143798827, 0.0252860164642334, 0.024970079421997072, 0.025075231552124023, 0.024866432189941407, 0.024991647720336914, 0.024916927337646486, 0.02489084815979004, 0.024838687896728516, 0.024928255081176756, 0.024829248428344726, 0.024832416534423828, 0.024860960006713867, 0.025554943084716796, 0.025194496154785157, 0.02487295913696289, 0.02489958381652832, 0.024841951370239257, 0.025662912368774413, 0.02493427276611328, 0.024832992553710936, 0.024813343048095703, 0.025449888229370117, 0.024988479614257812, 0.02482697677612305, 0.0252825927734375, 0.02486502456665039, 0.02483827209472656, 0.024764032363891603, 0.025043264389038086, 0.025018527984619142, 0.0250281925201416, 0.024951616287231446, 0.025059328079223633, 0.02487615966796875, 0.02480988883972168, 0.024867136001586913, 0.024873119354248047, 0.0249036808013916, 0.024809375762939453, 0.02494076728820801, 0.02491596794128418, 0.024937664031982422, 0.024809375762939453, 0.02507868766784668, 0.025214975357055663, 0.024986976623535155, 0.024838815689086913, 0.024747264862060546, 0.024824800491333007, 0.0248502082824707, 0.024766559600830077, 0.025347999572753906, 0.025253568649291992, 0.02495065689086914, 0.025010623931884766, 0.02730188751220703, 0.02737558364868164, 0.025159391403198242, 0.02504275131225586, 0.02500044822692871, 0.025052223205566406, 0.024959936141967773, 0.025026079177856445, 0.02505132865905762, 0.025071903228759764, 0.02490163230895996, 0.025487360000610353, 0.02508185577392578, 0.027076608657836915, 0.025923583984375, 0.025308319091796875, 0.025612607955932617, 0.02488374328613281, 0.024913919448852538, 0.02491756820678711, 0.0251396484375, 0.024997888565063478, 0.024923839569091798, 0.024935840606689453, 0.024977312088012696, 0.025234432220458985, 0.025030656814575194, 0.025353824615478516, 0.025012639999389647, 0.02496512031555176, 0.024808544158935547, 0.02485545539855957, 0.024995071411132812, 0.024861440658569336, 0.024857759475708008, 0.02476678466796875, 0.025473119735717774, 0.024936607360839844, 0.024922399520874022, 0.02476851272583008, 0.025083904266357423, 0.027133951187133788, 0.02570854377746582, 0.024956512451171874, 0.025325983047485352, 0.02499135971069336, 0.024901983261108398, 0.024876991271972657, 0.024926271438598633, 0.0248668155670166, 0.02506060791015625, 0.024836864471435547, 0.02525971221923828, 0.024985727310180665, 0.024858816146850586, 0.02494259262084961, 0.02483404731750488, 0.025005472183227538, 0.024823999404907225, 0.024914560317993165, 0.02486249542236328, 0.02515558433532715, 0.024848384857177733, 0.0248668155670166, 0.024850528717041017, 0.025072832107543946, 0.025012960433959962, 0.026156288146972656, 0.025301759719848632, 0.02515724754333496, 0.024865087509155274, 0.024975423812866212, 0.02491334342956543, 0.02498412895202637, 0.024831775665283204, 0.025175264358520508, 0.025127935409545898, 0.025028608322143556, 0.024832000732421877, 0.024951871871948243, 0.024894399642944334, 0.02548531150817871, 0.025370624542236327, 0.025143104553222655, 0.025216447830200196, 0.025115392684936524, 0.02494441604614258, 0.024989919662475588, 0.02509619140625, 0.025057279586791992, 0.02520377540588379, 0.025314111709594727, 0.024947967529296875, 0.024970048904418944, 0.024875072479248046, 0.024868127822875976, 0.024832351684570313, 0.025026079177856445, 0.024797727584838867, 0.024909151077270507, 0.024894432067871095, 0.025143135070800782, 0.024966463088989258, 0.024864704132080077, 0.02574224090576172, 0.024999935150146483, 0.025083904266357423, 0.025761791229248047, 0.025470495223999023, 0.025210975646972656, 0.025087968826293945, 0.025105600357055665, 0.024861536026000976, 0.02491187286376953, 0.025060928344726563, 0.02489132881164551, 0.024922624588012695, 0.024974815368652342, 0.024994335174560546, 0.024870912551879884, 0.025036352157592773, 0.025012672424316405, 0.025310560226440428, 0.0250533447265625, 0.025038976669311524, 0.024871295928955078, 0.024892704010009765, 0.024867551803588867, 0.02507776069641113, 0.025241600036621094, 0.02514739227294922, 0.024970272064208984, 0.024885536193847656, 0.02498745536804199, 0.02515238380432129, 0.025075008392333984, 0.02503340721130371, 0.02498543930053711, 0.02499331283569336, 0.02494108772277832, 0.02491606330871582, 0.024946592330932618, 0.02493040084838867, 0.02484000015258789, 0.024887199401855468, 0.02489151954650879, 0.025034175872802735, 0.02503753662109375, 0.024970560073852538, 0.025112512588500977, 0.025051904678344727, 0.024993600845336913, 0.0249366397857666, 0.024938495635986328, 0.02506243133544922, 0.02494976043701172, 0.02496828842163086, 0.02494553565979004, 0.024938848495483397, 0.024866464614868165, 0.024900991439819335, 0.024906368255615235, 0.024958784103393555, 0.024903615951538085, 0.024922367095947265, 0.024864767074584963, 0.024917823791503906, 0.02505132865905762, 0.02487295913696289, 0.025059328079223633, 0.02487500762939453, 0.024915584564208983, 0.024987424850463867, 0.024847551345825194, 0.024908287048339844, 0.024828224182128905, 0.024864160537719726, 0.024844415664672853, 0.02493894386291504, 0.024790431976318358, 0.024910463333129882, 0.02481113624572754, 0.024998144149780275, 0.02489766311645508, 0.025390207290649416, 0.02493529510498047, 0.0248668155670166, 0.02512428855895996, 0.025910079956054686, 0.025206527709960937, 0.024979455947875977, 0.025173696517944336, 0.0250263671875, 0.025059167861938476, 0.025062047958374023, 0.02528563117980957, 0.025209375381469726, 0.02521718406677246, 0.025210592269897462, 0.02511846351623535, 0.025103200912475587, 0.02553388786315918, 0.025217599868774414, 0.026040319442749024, 0.025421184539794923, 0.025548736572265626, 0.025373376846313477, 0.02529484748840332, 0.02545033645629883, 0.025526432037353514, 0.025503744125366212, 0.02537993621826172, 0.02535740852355957, 0.025336864471435547, 0.02552911949157715, 0.025366527557373047, 0.025345375061035156, 0.025336479187011717, 0.025390623092651367, 0.025330144882202147, 0.02531942367553711, 0.027864191055297853, 0.02575811195373535, 0.025360864639282228, 0.025421823501586914, 0.02535219192504883, 0.02527027130126953, 0.025546207427978515, 0.025413951873779296, 0.02534543991088867, 0.02532374382019043, 0.025179840087890624, 0.025408031463623047, 0.02517030334472656, 0.025824607849121092, 0.026013376235961914, 0.02520460891723633, 0.025135839462280273, 0.02508768081665039, 0.025055551528930665, 0.025026559829711914, 0.025114368438720704, 0.025131263732910157, 0.025165023803710936, 0.02513545608520508, 0.02515193557739258, 0.025458688735961913, 0.025233407974243165, 0.025321279525756836, 0.02519647979736328, 0.02522137641906738, 0.02524569511413574, 0.025221120834350585, 0.025157632827758788, 0.025281600952148438, 0.025268287658691407, 0.025121120452880858, 0.025274303436279295, 0.025208799362182618, 0.02519308853149414, 0.025268512725830077, 0.025255647659301758, 0.025249792098999024, 0.02520243263244629, 0.025204767227172853, 0.02533964729309082, 0.025233888626098634, 0.025122783660888673, 0.02535807991027832, 0.02519478416442871, 0.025318752288818358, 0.02513577651977539, 0.025134527206420898, 0.025799232482910155, 0.02536857604980469, 0.026005504608154296, 0.02536038398742676, 0.025243648529052733, 0.025226591110229492, 0.0255164794921875, 0.025215200424194336, 0.025257120132446288, 0.025242464065551758, 0.02537676811218262, 0.025266176223754884, 0.025228639602661133, 0.02628374481201172, 0.025768863677978517, 0.02530646324157715, 0.025176767349243165, 0.025200639724731445, 0.02511631965637207, 0.025166175842285157, 0.02570150375366211, 0.025068416595458984, 0.02507161521911621, 0.025099872589111328, 0.025170015335083007]",tokens/s,39.83634607615606,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 335810 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 52.12 MiB is free. Process 334311 has 14.69 GiB memory in use. Of the allocated memory 14.47 GiB is allocated by PyTorch, and 108.93 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1203, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 977, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 977, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 750, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 349, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 347749 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 217, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 341119 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,906.30144,4682.809344,0.0,4280.287232,4115.121152,s,1,7.5672734375,7.5672734375,0.0,7.5672734375,7.5672734375,7.5672734375,7.5672734375,[7.5672734375],,kWh,6.013593016647671e-06,6.458906988459622e-07,1.9627793479970457e-06,8.622263063490679e-06,,MB,1357.848576,4991.090688,0.0,4575.985664,4408.410112,s,10,0.405246150970459,0.0405246150970459,0.0010754830454049622,0.04023990440368652,0.04088377304077149,0.0422539192199707,0.04335003616333008,"[0.04362406539916992, 0.040110816955566404, 0.03954483032226563, 0.04005523300170898, 0.04057929611206055, 0.04048505783081055, 0.03991904067993164, 0.04006403350830078, 0.04036899185180664, 0.04049478530883789]",tokens/s,6317.148216878721,kWh,1.4438124508620374e-06,1.5914527630567144e-07,9.513855449063385e-07,2.5543432720740473e-06,tokens/kWh,100221455.27532639,MB,1390.780416,4993.18784,0.0,4575.985664,4408.412672,s,10,16.754051147460938,1.6754051147460938,0.02049981832845731,1.686553955078125,1.690725634765625,1.6939338623046876,1.6965004443359375,"[1.64600732421875, 1.64257080078125, 1.644931884765625, 1.684254638671875, 1.68716357421875, 1.6900126953125, 1.69714208984375, 1.687793701171875, 1.6882301025390625, 1.6859443359375]",tokens/s,37.602845691173385,kWh,4.75280311370544e-05,5.242102310028096e-06,3.0587750797494797e-05,8.335788424457728e-05,tokens/kWh,755777.3397313448,,s,630,16.751672105789176,0.0265899557234749,0.00046375070685417873,0.0266900634765625,0.026951702308654787,0.02712471675872803,0.027999211711883545,"[0.025983999252319336, 0.025923583984375, 0.025911584854125976, 0.02589651107788086, 0.026959552764892578, 0.027025888442993164, 0.026009599685668947, 0.025971744537353517, 0.02619696044921875, 0.025866239547729493, 0.02581292724609375, 0.025869728088378906, 0.02618025588989258, 0.025839103698730468, 0.025938432693481447, 0.025886175155639648, 0.02611459159851074, 0.026095232009887694, 0.02642777633666992, 0.026555839538574218, 0.02632512092590332, 0.026056671142578126, 0.026010080337524413, 0.026088991165161134, 0.025878816604614257, 0.02599772834777832, 0.025838752746582032, 0.025944320678710938, 0.025874656677246095, 0.025958560943603517, 0.025839616775512695, 0.025968639373779297, 0.025819135665893556, 0.025851295471191405, 0.02583407974243164, 0.025960447311401368, 0.025899007797241212, 0.025797664642333986, 0.025837568283081053, 0.02596348762512207, 0.025927328109741212, 0.025968992233276367, 0.026187776565551758, 0.025874431610107423, 0.02588057518005371, 0.025774080276489256, 0.028279935836791992, 0.0262696647644043, 0.026151456832885743, 0.025987455368041992, 0.026109344482421876, 0.025926240921020506, 0.026060096740722655, 0.025952959060668947, 0.02599648094177246, 0.025944896697998047, 0.02613043212890625, 0.026396671295166017, 0.026789823532104493, 0.028964927673339844, 0.02591334342956543, 0.026183679580688478, 0.02592767906188965, 0.026233312606811523, 0.026172447204589843, 0.026051551818847656, 0.026051776885986328, 0.02594054412841797, 0.02604047966003418, 0.025900991439819335, 0.025966751098632813, 0.026064895629882814, 0.02594611167907715, 0.02592972755432129, 0.026480640411376953, 0.025991167068481445, 0.025924928665161134, 0.02607379150390625, 0.025964544296264647, 0.026072704315185546, 0.025923999786376953, 0.025937887191772462, 0.02596659278869629, 0.02740838432312012, 0.027314176559448244, 0.026003456115722655, 0.02617241668701172, 0.026407936096191405, 0.0261724796295166, 0.025883007049560546, 0.025862592697143555, 0.02600726318359375, 0.026081695556640624, 0.026180959701538085, 0.026088096618652343, 0.02672640037536621, 0.02630451202392578, 0.026015071868896483, 0.025992095947265623, 0.025904895782470704, 0.025935199737548827, 0.02596726417541504, 0.026136608123779298, 0.025874399185180665, 0.02595408058166504, 0.026009824752807616, 0.025976831436157227, 0.02589027214050293, 0.026055200576782227, 0.025959840774536135, 0.026074848175048827, 0.025983871459960936, 0.02601923179626465, 0.025973344802856447, 0.025851808547973632, 0.025744640350341796, 0.025966623306274413, 0.026129215240478516, 0.026042367935180662, 0.025886720657348632, 0.025874431610107423, 0.025935871124267578, 0.02588057518005371, 0.02594780731201172, 0.026043807983398438, 0.026089408874511718, 0.026154592514038087, 0.02612816047668457, 0.02611052894592285, 0.026013280868530272, 0.026257888793945312, 0.026203744888305663, 0.026658239364624022, 0.026422239303588866, 0.026220544815063477, 0.026107263565063477, 0.026003679275512694, 0.026278303146362304, 0.025999359130859375, 0.02596659278869629, 0.026099552154541017, 0.02605401611328125, 0.026004255294799803, 0.026038015365600586, 0.025925888061523437, 0.025994911193847656, 0.025975135803222655, 0.026044416427612304, 0.026011295318603515, 0.02618547248840332, 0.026135135650634765, 0.02634364891052246, 0.026086496353149413, 0.025977439880371093, 0.026112096786499023, 0.02612019157409668, 0.025993215560913087, 0.02596803283691406, 0.025903711318969725, 0.026111967086791994, 0.026025632858276367, 0.026177248001098632, 0.026696096420288085, 0.026265472412109376, 0.026194303512573243, 0.026248767852783204, 0.026159551620483397, 0.02607923126220703, 0.02604204750061035, 0.026115808486938476, 0.026352224349975587, 0.026388383865356444, 0.02613257598876953, 0.02617344093322754, 0.025965856552124023, 0.02588153648376465, 0.02598886489868164, 0.02592723274230957, 0.025857919692993163, 0.025946559906005858, 0.02587049674987793, 0.02633318328857422, 0.025845344543457032, 0.026136640548706055, 0.026155359268188478, 0.02609971237182617, 0.026036224365234374, 0.026076799392700196, 0.025948543548583985, 0.02604022407531738, 0.026019840240478515, 0.026191871643066408, 0.02611097526550293, 0.02611916732788086, 0.026068416595458985, 0.025996063232421877, 0.02605558395385742, 0.02641334342956543, 0.026732351303100584, 0.026761375427246093, 0.026782112121582033, 0.026781631469726563, 0.026744415283203125, 0.02785708808898926, 0.0267326717376709, 0.026918752670288086, 0.027048479080200194, 0.027346624374389648, 0.026935615539550782, 0.026951295852661133, 0.027717344284057616, 0.027277984619140626, 0.026771455764770507, 0.026808319091796876, 0.02694758415222168, 0.026631616592407228, 0.026757408142089843, 0.02665193557739258, 0.0269752311706543, 0.026746879577636717, 0.026703296661376955, 0.02675564765930176, 0.02693734359741211, 0.026914304733276367, 0.026868223190307617, 0.026671104431152344, 0.026812671661376953, 0.026722047805786135, 0.026736640930175783, 0.02677302360534668, 0.026737119674682615, 0.026789888381958008, 0.02663734436035156, 0.026735551834106447, 0.02665679931640625, 0.0265850887298584, 0.02730803108215332, 0.026703872680664063, 0.026488832473754883, 0.026578048706054687, 0.026624895095825194, 0.026549375534057618, 0.026654752731323242, 0.026702688217163085, 0.027146240234375, 0.026901920318603514, 0.026565216064453126, 0.026666080474853516, 0.02687887954711914, 0.026889247894287108, 0.026729183197021486, 0.02669491195678711, 0.027070783615112306, 0.02677395248413086, 0.026873823165893554, 0.026750751495361328, 0.026714368820190428, 0.026768831253051757, 0.026792768478393555, 0.026739744186401366, 0.02670252799987793, 0.02682035255432129, 0.026915103912353515, 0.026664735794067383, 0.02689811134338379, 0.02676543998718262, 0.02679974365234375, 0.026795808792114257, 0.026936319351196288, 0.02676121520996094, 0.026732351303100584, 0.02674073600769043, 0.026878143310546877, 0.026847232818603517, 0.02681772804260254, 0.02675974464416504, 0.0268351993560791, 0.02674608039855957, 0.026831647872924805, 0.02672640037536621, 0.02707196807861328, 0.026859615325927736, 0.02674732780456543, 0.02675916862487793, 0.026718112945556642, 0.02692633628845215, 0.026821216583251952, 0.026855680465698244, 0.026892288208007813, 0.026644479751586913, 0.026615808486938477, 0.02666700744628906, 0.026703872680664063, 0.02669718360900879, 0.026681888580322267, 0.026678911209106447, 0.02669401550292969, 0.02672630310058594, 0.026646623611450194, 0.02670796775817871, 0.02672230339050293, 0.026750911712646486, 0.026673215866088867, 0.026828800201416016, 0.02667286491394043, 0.026718496322631836, 0.026736640930175783, 0.026840576171875, 0.026716672897338867, 0.02698195266723633, 0.026900928497314455, 0.026627424240112305, 0.026694143295288086, 0.02668560028076172, 0.026788095474243163, 0.02701312065124512, 0.02715238380432129, 0.027971584320068358, 0.02999295997619629, 0.027022720336914063, 0.027279712677001952, 0.02677564811706543, 0.026963327407836916, 0.02670809555053711, 0.026817216873168945, 0.026779647827148437, 0.026703231811523436, 0.026794624328613282, 0.0267509765625, 0.02674835205078125, 0.026769983291625978, 0.027045888900756834, 0.026748479843139647, 0.02670636749267578, 0.026861568450927735, 0.026810016632080078, 0.026697504043579103, 0.026896223068237305, 0.026656543731689453, 0.0268185920715332, 0.026604448318481445, 0.02664838409423828, 0.026627679824829102, 0.026729055404663086, 0.026830848693847657, 0.026665983200073243, 0.026674175262451173, 0.02660700798034668, 0.0266778564453125, 0.026627487182617187, 0.02691747283935547, 0.026730079650878907, 0.026765727996826173, 0.027027456283569336, 0.026793983459472655, 0.026656448364257814, 0.02676153564453125, 0.026733983993530275, 0.026581600189208986, 0.026644479751586913, 0.026828800201416016, 0.02668339157104492, 0.02671615982055664, 0.026662176132202148, 0.0266060791015625, 0.02670614433288574, 0.02670796775817871, 0.026638336181640625, 0.026677248001098632, 0.02657689666748047, 0.02666700744628906, 0.026597375869750976, 0.02674483108520508, 0.026650592803955077, 0.026566688537597655, 0.026660863876342773, 0.02663862419128418, 0.02662575912475586, 0.02705433654785156, 0.02681875228881836, 0.026812255859375, 0.027418655395507814, 0.02687433624267578, 0.026868736267089844, 0.02688102340698242, 0.026709823608398436, 0.026988256454467775, 0.026732736587524415, 0.02745065689086914, 0.02676367950439453, 0.02688172721862793, 0.026762144088745117, 0.0267775993347168, 0.02674668884277344, 0.026767551422119142, 0.02676870346069336, 0.026691295623779296, 0.026874847412109375, 0.02686566352844238, 0.026944799423217772, 0.02680431938171387, 0.026894655227661133, 0.026707616806030274, 0.02674959945678711, 0.026632192611694337, 0.026608991622924804, 0.02675779151916504, 0.026845184326171875, 0.02662598419189453, 0.026673215866088867, 0.02658095932006836, 0.026798112869262695, 0.026712064743041993, 0.02717215919494629, 0.029690559387207032, 0.026955360412597655, 0.02661417579650879, 0.026793983459472655, 0.0269036808013916, 0.026718271255493163, 0.026710847854614257, 0.026688831329345703, 0.026759872436523436, 0.026806272506713868, 0.026662912368774414, 0.02749235153198242, 0.02834016036987305, 0.026851423263549806, 0.02674627113342285, 0.02676799964904785, 0.027180992126464843, 0.026814464569091798, 0.02666700744628906, 0.026879072189331055, 0.026766239166259767, 0.02676531219482422, 0.026889440536499023, 0.026984575271606446, 0.028850496292114256, 0.02688649559020996, 0.026684640884399414, 0.027252767562866213, 0.026791967391967774, 0.02680147171020508, 0.026678112030029295, 0.026781824111938475, 0.026684928894042968, 0.02680227279663086, 0.02679030418395996, 0.026927104949951174, 0.028010496139526365, 0.02746953582763672, 0.027584800720214842, 0.027049983978271484, 0.026860544204711914, 0.02679910469055176, 0.026679136276245116, 0.02678390312194824, 0.02674627113342285, 0.026553951263427734, 0.026698944091796874, 0.027002368927001953, 0.027117887496948243, 0.026806272506713868, 0.027064319610595702, 0.026853376388549805, 0.026721567153930665, 0.026608224868774413, 0.02652297592163086, 0.026634143829345702, 0.02659820747375488, 0.02660563278198242, 0.02655232048034668, 0.026712064743041993, 0.026913888931274416, 0.026749631881713868, 0.02687164878845215, 0.026662879943847657, 0.02657321548461914, 0.026615680694580077, 0.02659119987487793, 0.02663235282897949, 0.026725664138793945, 0.02687068748474121, 0.026588672637939452, 0.026468223571777343, 0.02661625671386719, 0.026488832473754883, 0.026767328262329103, 0.026550304412841796, 0.02654207992553711, 0.026583040237426758, 0.02662544059753418, 0.02657254409790039, 0.026820768356323244, 0.02667795181274414, 0.02662940788269043, 0.02660544013977051, 0.027130304336547853, 0.026587135314941408, 0.026667360305786134, 0.02705177688598633, 0.026997055053710937, 0.026826751708984374, 0.027187936782836913, 0.02695782470703125, 0.026720256805419923, 0.027031551361083983, 0.02661507225036621, 0.026857280731201173, 0.026827680587768556, 0.026902528762817384, 0.026869760513305665, 0.02684035110473633, 0.02673846435546875, 0.026780223846435545, 0.026593664169311523, 0.02674892807006836, 0.02669059181213379, 0.026872608184814455, 0.026697919845581054, 0.02656595230102539, 0.026716575622558594, 0.02662428855895996, 0.026754079818725587, 0.0268720645904541, 0.026643264770507814, 0.027097152709960937, 0.026602752685546877, 0.02675071907043457, 0.02679417610168457, 0.026815135955810546, 0.02694963264465332, 0.02735660743713379, 0.027617855072021483, 0.026755071640014647, 0.02680569648742676, 0.026810943603515627, 0.026681343078613282, 0.02697420883178711, 0.02675712013244629, 0.026658815383911134, 0.026752416610717773, 0.02670220756530762, 0.026685407638549805, 0.026716415405273437, 0.026698816299438478, 0.02668227195739746, 0.026705663681030275, 0.02702672004699707, 0.026887168884277345, 0.026736640930175783, 0.026620031356811524, 0.02655014419555664, 0.026695680618286134, 0.026636287689208983, 0.02717900848388672, 0.026755071640014647, 0.02680847930908203, 0.026668895721435548, 0.02676736068725586, 0.026821983337402343, 0.02669635200500488, 0.02648624038696289, 0.026696224212646485, 0.02674892807006836, 0.026707296371459962, 0.02726092720031738, 0.02676736068725586, 0.026916864395141602, 0.027047264099121095, 0.026624671936035155, 0.026689504623413084, 0.02668070411682129, 0.026956447601318358, 0.026843135833740234, 0.02674483108520508, 0.026785791397094725, 0.026712064743041993, 0.026863616943359377, 0.026697727203369142, 0.026643999099731447, 0.02670230484008789, 0.02676940727233887, 0.026632192611694337, 0.026685440063476562, 0.02674892807006836, 0.026624000549316407, 0.02671536064147949, 0.02666716766357422, 0.02676348876953125, 0.02664694404602051, 0.026976255416870116, 0.02675302314758301, 0.026664960861206056, 0.026685440063476562, 0.026654720306396484, 0.026677248001098632, 0.026685440063476562, 0.026635295867919923, 0.026926048278808595, 0.026621952056884765, 0.026724288940429688, 0.02677356719970703, 0.02675916862487793, 0.026582143783569337, 0.026722240447998046, 0.02701817512512207, 0.026908672332763672, 0.02671820831298828, 0.02675446319580078, 0.026600032806396483, 0.02690876770019531, 0.02673859214782715, 0.02672640037536621, 0.026625280380249024, 0.02679475212097168, 0.026714111328125, 0.026735776901245116, 0.026569568634033203, 0.026839040756225587, 0.026689535140991212, 0.026808319091796876, 0.026748672485351562, 0.02689833641052246, 0.0269069766998291, 0.026867712020874023, 0.026673152923583986, 0.026836095809936525, 0.026577791213989257]",tokens/s,37.60818597817941,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1203, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 977, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 977, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 750, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 349, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 346244 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1032, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.16 GiB. GPU 0 has a total capacity of 14.74 GiB of which 774.12 MiB is free. Process 332719 has 13.98 GiB memory in use. Of the allocated memory 13.72 GiB is allocated by PyTorch, and 148.73 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 217, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 339965 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1203, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 977, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 977, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 750, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 349, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 347353 has 14.73 GiB memory in use. Of the allocated memory 12.32 GiB is allocated by PyTorch, and 2.30 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,905.490432,4682.809344,0.0,4280.287232,4115.121152,s,1,7.299376953125,7.299376953125,0.0,7.299376953125,7.299376953125,7.299376953125,7.299376953125,[7.299376953125],,kWh,5.660697145928376e-06,6.171931436701053e-07,2.241112903977882e-06,8.519003193576364e-06,,MB,1238.880256,4988.993536,0.0,4575.985664,4408.410112,s,10,2.7811087951660154,0.2781108795166015,0.0015564051158022385,0.2778040008544922,0.2798568664550781,0.28081182556152345,0.28157579284667966,"[0.28176678466796873, 0.2768359985351562, 0.2776362609863281, 0.27609292602539065, 0.27669635009765625, 0.2787339172363281, 0.2778561401367188, 0.2796446533203125, 0.2777518615722656, 0.27809390258789063]",tokens/s,920.4961720482365,kWh,8.427887187499436e-06,9.294501469193105e-07,5.566988580571872e-06,1.492432591499062e-05,tokens/kWh,17153203.532151684,MB,1263.86176,4988.993536,0.0,4575.985664,4408.412672,s,10,14.965840576171875,1.4965840576171874,0.005196321061467178,1.4935886840820314,1.50277080078125,1.5044365478515624,1.5057691455078124,"[1.4908408203125, 1.506102294921875, 1.5012957763671875, 1.4939710693359376, 1.4923868408203125, 1.492239990234375, 1.492476318359375, 1.493206298828125, 1.5009205322265624, 1.502400634765625]",tokens/s,42.09586469891077,kWh,4.339269693374718e-05,4.786014609931979e-06,2.7918677096827204e-05,7.609738864050637e-05,tokens/kWh,827886.4902660447,,s,630,14.96376889801025,0.023752014123825802,0.00048624687901072545,0.023658416748046876,0.02402248954772949,0.024244022846221923,0.025313516693115237,"[0.0253253116607666, 0.02402124786376953, 0.023854848861694335, 0.02368451118469238, 0.023726943969726563, 0.023615488052368162, 0.023562240600585937, 0.023564287185668945, 0.023666688919067383, 0.023502815246582032, 0.02350067138671875, 0.023537376403808593, 0.023556543350219727, 0.0235798397064209, 0.023513919830322267, 0.02355561637878418, 0.02409622383117676, 0.02349273681640625, 0.023493087768554688, 0.02355036735534668, 0.02347007942199707, 0.02364825630187988, 0.02350694465637207, 0.023498367309570313, 0.02346214485168457, 0.02395523262023926, 0.02365679931640625, 0.023521120071411133, 0.023606784820556642, 0.02355471992492676, 0.023447551727294923, 0.023521280288696288, 0.023440767288208007, 0.02361612892150879, 0.023586816787719726, 0.023658496856689453, 0.023642112731933593, 0.023543296813964845, 0.023668928146362303, 0.02350057601928711, 0.023775775909423827, 0.023631296157836913, 0.02384339141845703, 0.023742048263549805, 0.023877824783325195, 0.02361280059814453, 0.023783872604370117, 0.02358518409729004, 0.023588191986083983, 0.023601823806762696, 0.023803903579711915, 0.023584863662719727, 0.023775136947631836, 0.023566112518310547, 0.023603424072265625, 0.023584768295288085, 0.02346700859069824, 0.02353459167480469, 0.023879680633544922, 0.023629823684692384, 0.023601152420043944, 0.023520288467407228, 0.024144319534301757, 0.024041471481323243, 0.025219007492065428, 0.024535072326660155, 0.02402060890197754, 0.024043167114257812, 0.024339168548583985, 0.023749759674072266, 0.02355289649963379, 0.02342092704772949, 0.02357414436340332, 0.023513471603393555, 0.02368716812133789, 0.023803903579711915, 0.023640352249145506, 0.023773151397705077, 0.023698623657226563, 0.027341375350952147, 0.023768064498901367, 0.023962015151977538, 0.02359119987487793, 0.023587135314941405, 0.023566335678100587, 0.023879264831542967, 0.023582752227783204, 0.023769472122192385, 0.02409267234802246, 0.024217599868774413, 0.023666688919067383, 0.023619583129882812, 0.023764991760253908, 0.025331071853637695, 0.024922143936157225, 0.02386764717102051, 0.023723552703857422, 0.024191999435424806, 0.02372787284851074, 0.023678783416748048, 0.02362598419189453, 0.023623680114746092, 0.02371379280090332, 0.023817888259887697, 0.0237238712310791, 0.023654783248901367, 0.023936992645263673, 0.023678335189819336, 0.02371664047241211, 0.023619583129882812, 0.02379123115539551, 0.023736352920532226, 0.023755008697509766, 0.023646303176879883, 0.02362931251525879, 0.023618047714233398, 0.023698720932006836, 0.02382307243347168, 0.023744096755981447, 0.023716192245483398, 0.02370902442932129, 0.023813919067382814, 0.023738847732543946, 0.023852895736694336, 0.02403366470336914, 0.024008352279663085, 0.02478473663330078, 0.024101184844970702, 0.023824384689331055, 0.023597055435180665, 0.02372377586364746, 0.023759103775024413, 0.024039424896240235, 0.02393087959289551, 0.023875583648681642, 0.023767040252685546, 0.02408448028564453, 0.023862464904785156, 0.023838720321655273, 0.024045503616333008, 0.02388038444519043, 0.024045759201049805, 0.024123392105102538, 0.0239749755859375, 0.023720895767211914, 0.02373219108581543, 0.02367081642150879, 0.02366464042663574, 0.02378246307373047, 0.02383305549621582, 0.023863712310791017, 0.023636032104492187, 0.02374684715270996, 0.023684831619262697, 0.023731584548950194, 0.02370569610595703, 0.023642656326293945, 0.02371321678161621, 0.023788095474243164, 0.02381395149230957, 0.023877567291259765, 0.024047872543334962, 0.02369740867614746, 0.02370150375366211, 0.02373206329345703, 0.023711008071899416, 0.023681568145751952, 0.023715360641479492, 0.02381216049194336, 0.024115936279296875, 0.023791135787963866, 0.023683584213256836, 0.023631776809692383, 0.02372617530822754, 0.0235883846282959, 0.0238002872467041, 0.024244224548339844, 0.02392678451538086, 0.023877119064331053, 0.023910783767700197, 0.023959680557250975, 0.024209407806396483, 0.0236844482421875, 0.023736160278320314, 0.023567167282104493, 0.023600896835327147, 0.02368115234375, 0.023703424453735352, 0.023693248748779296, 0.024558528900146485, 0.023905984878540038, 0.024035648345947267, 0.023723552703857422, 0.023759328842163085, 0.023648096084594727, 0.02359926414489746, 0.023611391067504883, 0.023578624725341796, 0.023556095123291015, 0.02368511962890625, 0.023516416549682617, 0.023630367279052735, 0.023560415267944335, 0.023595232009887696, 0.023557920455932618, 0.023619007110595704, 0.02361196708679199, 0.02366582489013672, 0.023692127227783202, 0.023783424377441405, 0.02370150375366211, 0.02371583938598633, 0.023629823684692384, 0.023525375366210938, 0.023577983856201173, 0.023654336929321288, 0.023718175888061525, 0.023686847686767577, 0.02381488037109375, 0.02368889617919922, 0.023494752883911132, 0.0238853759765625, 0.023837663650512694, 0.023766048431396486, 0.02377795219421387, 0.023625152587890625, 0.023605823516845703, 0.023576576232910155, 0.024458303451538085, 0.02372006416320801, 0.023646400451660155, 0.023689855575561525, 0.023560192108154295, 0.023734272003173826, 0.024243776321411132, 0.023811935424804687, 0.023658336639404295, 0.023654144287109376, 0.023612415313720703, 0.023654272079467773, 0.023611520767211912, 0.02365145683288574, 0.024296319961547852, 0.023737695693969725, 0.02367535972595215, 0.023586271286010742, 0.02357872009277344, 0.02353420829772949, 0.023547903060913086, 0.02356012725830078, 0.023613407135009767, 0.023756032943725587, 0.02472175979614258, 0.024002559661865236, 0.023735519409179687, 0.023735071182250978, 0.02369740867614746, 0.023658496856689453, 0.023609344482421874, 0.023588863372802735, 0.023646207809448243, 0.02349567985534668, 0.02347315216064453, 0.023598175048828125, 0.02376495933532715, 0.02358982467651367, 0.023611391067504883, 0.023521280288696288, 0.023627519607543945, 0.02349635124206543, 0.023571039199829103, 0.023557695388793945, 0.02357823944091797, 0.02373651123046875, 0.02378816032409668, 0.023633920669555664, 0.023821632385253907, 0.02371766471862793, 0.023804832458496093, 0.023786592483520507, 0.02383523178100586, 0.02398854446411133, 0.02384828758239746, 0.023540128707885744, 0.023611648559570313, 0.025032064437866212, 0.023720575332641602, 0.023558143615722657, 0.0235100154876709, 0.023444480895996093, 0.023482112884521483, 0.023505023956298828, 0.023568511962890625, 0.02371993637084961, 0.02370902442932129, 0.023689888000488282, 0.02361907196044922, 0.02365216064453125, 0.023579328536987305, 0.023739936828613282, 0.023589344024658204, 0.023547136306762695, 0.02353638458251953, 0.023556447982788085, 0.02358176040649414, 0.02358460807800293, 0.023927295684814453, 0.02380620765686035, 0.02370470428466797, 0.02352012825012207, 0.023535327911376955, 0.023501087188720703, 0.023504896163940428, 0.023721567153930666, 0.02358108711242676, 0.024629791259765624, 0.02524310493469238, 0.02376963233947754, 0.02358505630493164, 0.023889984130859375, 0.02369055938720703, 0.023680896759033204, 0.02359663963317871, 0.023607328414916993, 0.02345587158203125, 0.023949216842651368, 0.023689855575561525, 0.023664831161499023, 0.02369126319885254, 0.02364825630187988, 0.02369126319885254, 0.023580671310424805, 0.023533056259155274, 0.02364863967895508, 0.023631999969482422, 0.023617536544799804, 0.02366873550415039, 0.02367487907409668, 0.023584768295288085, 0.023451648712158202, 0.023542911529541015, 0.023573375701904296, 0.023504896163940428, 0.023578624725341796, 0.023719808578491212, 0.023539808273315428, 0.023465055465698242, 0.02353657531738281, 0.023609088897705077, 0.023463712692260743, 0.023536096572875975, 0.023516511917114256, 0.023618207931518555, 0.023545791625976562, 0.023533952713012694, 0.023710432052612303, 0.023641056060791015, 0.023777280807495117, 0.023558143615722657, 0.023590496063232422, 0.023773056030273437, 0.0235864315032959, 0.023638784408569338, 0.025907360076904296, 0.02367487907409668, 0.02349260711669922, 0.023414783477783203, 0.02351103973388672, 0.023705791473388672, 0.023729984283447265, 0.023673088073730468, 0.023469823837280274, 0.023613119125366212, 0.02351136016845703, 0.023441024780273437, 0.023480255126953124, 0.02346028709411621, 0.02352067184448242, 0.02426470375061035, 0.023767040252685546, 0.02358246421813965, 0.0234532470703125, 0.023501472473144533, 0.023511072158813477, 0.023799360275268553, 0.02357088088989258, 0.02347360038757324, 0.02371174430847168, 0.023494495391845702, 0.023472864151000975, 0.023480319976806642, 0.023640064239501952, 0.023529727935791014, 0.02355129623413086, 0.02345619201660156, 0.023613439559936524, 0.02349260711669922, 0.02371379280090332, 0.02349056053161621, 0.023641984939575197, 0.023624895095825195, 0.02363488006591797, 0.02372198486328125, 0.023562240600585937, 0.023514976501464845, 0.023379135131835937, 0.023470367431640625, 0.023525087356567383, 0.023505088806152343, 0.023704351425170897, 0.02431795120239258, 0.023719839096069336, 0.02372822380065918, 0.02533087921142578, 0.024037567138671875, 0.02367679977416992, 0.023638784408569338, 0.023523008346557617, 0.024108800888061523, 0.02357689666748047, 0.02361574363708496, 0.0237957763671875, 0.023637184143066405, 0.023707551956176756, 0.023701408386230468, 0.02379395294189453, 0.023760927200317382, 0.024857152938842775, 0.023721536636352538, 0.023649824142456054, 0.023548896789550782, 0.023504896163940428, 0.023619583129882812, 0.023670783996582033, 0.023543359756469727, 0.02350320053100586, 0.023582815170288086, 0.02358025550842285, 0.023488927841186523, 0.023899263381958007, 0.023581567764282226, 0.02457382392883301, 0.023885663986206056, 0.02372003173828125, 0.026579488754272462, 0.024807167053222657, 0.02360291290283203, 0.02362828826904297, 0.02463747215270996, 0.023740415573120118, 0.023449855804443358, 0.023452896118164063, 0.023429567337036133, 0.023498847961425783, 0.023488512039184572, 0.0234368953704834, 0.02340822410583496, 0.0235928955078125, 0.023452543258666993, 0.02350035285949707, 0.023619487762451173, 0.023674400329589843, 0.023727327346801757, 0.023641216278076173, 0.02360963249206543, 0.023597280502319337, 0.023699615478515627, 0.02363107109069824, 0.023575328826904298, 0.023591936111450194, 0.02366771125793457, 0.023472127914428712, 0.02348441505432129, 0.023609344482421874, 0.023755807876586914, 0.023698400497436524, 0.02370150375366211, 0.02366873550415039, 0.0234899845123291, 0.02350249671936035, 0.023530399322509766, 0.023916543960571288, 0.023584768295288085, 0.02366054344177246, 0.02354694366455078, 0.023526079177856447, 0.02352761650085449, 0.02356982421875, 0.02368783950805664, 0.023655807495117188, 0.023660768508911134, 0.02368953514099121, 0.023592704772949218, 0.023660768508911134, 0.02360697555541992, 0.023818687438964845, 0.023646207809448243, 0.023525344848632813, 0.023598175048828125, 0.02355670356750488, 0.023492959976196288, 0.02354380798339844, 0.023586048126220702, 0.023518976211547853, 0.02528463935852051, 0.023973440170288084, 0.023628192901611327, 0.023545856475830077, 0.023754751205444336, 0.0236810245513916, 0.02355580711364746, 0.023728416442871093, 0.024057088851928712, 0.024136255264282227, 0.02373184013366699, 0.023832639694213866, 0.023949151992797853, 0.023812448501586914, 0.023912864685058592, 0.024069248199462892, 0.023953664779663087, 0.024565696716308594, 0.024124000549316408, 0.023820287704467775, 0.02386944007873535, 0.02383180809020996, 0.02367519950866699, 0.023673280715942383, 0.023642112731933593, 0.02492416000366211, 0.02405948829650879, 0.024002975463867187, 0.024043455123901367, 0.024415456771850585, 0.024050527572631836, 0.023853023529052733, 0.02382771110534668, 0.02393907165527344, 0.02372483253479004, 0.023664255142211914, 0.023570175170898437, 0.023677568435668945, 0.023547199249267577, 0.023550655364990233, 0.023590272903442382, 0.023992959976196288, 0.02400364875793457, 0.02396460723876953, 0.02376265525817871, 0.023767616271972655, 0.023529184341430663, 0.023858240127563476, 0.023675039291381837, 0.023624479293823244, 0.02359609603881836, 0.02372003173828125, 0.023440223693847656, 0.023468000411987305, 0.023598976135253906, 0.023668895721435546, 0.023496223449707032, 0.023594591140747072, 0.023716543197631838, 0.02350230407714844, 0.02343190383911133, 0.023520544052124025, 0.023542495727539064, 0.024832000732421877, 0.023791616439819335, 0.023645887374877928, 0.0235864315032959, 0.02403398323059082, 0.02372198486328125, 0.023529472351074218, 0.023588863372802735, 0.023406591415405274, 0.02369740867614746, 0.023610815048217774, 0.02364259147644043, 0.02357801628112793, 0.023724735260009764, 0.02360028839111328, 0.023692127227783202, 0.023791040420532227, 0.02370207977294922, 0.023602752685546874, 0.023870208740234374, 0.023520959854125976, 0.023646207809448243, 0.023541759490966797, 0.02352742385864258, 0.024037376403808593, 0.023500799179077148, 0.024002559661865236, 0.023808000564575195, 0.023793664932250977, 0.023658496856689453, 0.023568384170532225, 0.02358428764343262, 0.023484832763671876, 0.02361071968078613, 0.023552736282348632, 0.02367228889465332, 0.023673376083374022, 0.023605247497558594, 0.02346188735961914, 0.023564287185668945, 0.023478271484375, 0.02358822441101074, 0.023580703735351562, 0.023583328247070313, 0.023631872177124022, 0.02359712028503418, 0.02359440040588379, 0.02415056037902832, 0.032349822998046875, 0.023669120788574218, 0.023633920669555664, 0.02379520034790039, 0.0238023681640625, 0.023883775711059572, 0.023875680923461914, 0.023770336151123048, 0.023575008392333983, 0.02358255958557129, 0.02362611198425293, 0.023859199523925782, 0.023975936889648438, 0.024276992797851563, 0.023843999862670898]",tokens/s,42.101692714846166,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,909.725696,3361.603584,0.0,2959.081472,2957.493248,s,1,7.60452490234375,7.60452490234375,0.0,7.60452490234375,7.60452490234375,7.60452490234375,7.60452490234375,[7.60452490234375],,kWh,5.595597087494752e-06,6.044805812134499e-07,1.8147236740262418e-06,8.014801342734444e-06,,MB,1250.607104,3430.8096,0.0,3017.801728,2552.887296,s,10,0.5818296318054199,0.05818296318054199,0.0012714890764313255,0.05797870445251465,0.05876977081298828,0.06024178085327148,0.06141938888549805,"[0.061713790893554686, 0.05771535873413086, 0.05802668762207031, 0.058192222595214844, 0.05762656021118164, 0.056909217834472656, 0.05793072128295899, 0.05700534439086914, 0.05826707077026367, 0.058442657470703124]",tokens/s,4399.913411175551,kWh,2.0635161706864815e-06,2.2756753727651738e-07,1.3624052370280678e-06,3.6534889449910666e-06,tokens/kWh,70070008.10854402,MB,1262.440448,3430.8096,0.0,3017.801728,2552.889856,s,10,14.633812255859375,1.4633812255859375,0.004870545399077899,1.4631318969726563,1.4688968383789063,1.4703733459472657,1.471554552001953,"[1.4611138916015625, 1.465158203125, 1.464537353515625, 1.454272705078125, 1.471849853515625, 1.4617264404296875, 1.467177490234375, 1.458755859375, 1.4685687255859374, 1.4606517333984375]",tokens/s,43.050982818762634,kWh,4.277376780681585e-05,4.717587946911047e-06,2.1461363021972387e-05,6.895271877569928e-05,tokens/kWh,913669.5567427404,,s,630,14.631768081665042,0.023225028701055618,0.00037203471789034475,0.02311768054962158,0.023592613601684572,0.0238322922706604,0.024739179592132572,"[0.023212032318115236, 0.02456163215637207, 0.023625759124755858, 0.023035903930664063, 0.02314854431152344, 0.02320902442932129, 0.023266239166259764, 0.023021408081054687, 0.023113119125366212, 0.024306432723999023, 0.023354623794555666, 0.023168960571289064, 0.023437215805053712, 0.02302864074707031, 0.023052288055419923, 0.02310051155090332, 0.023112607955932618, 0.023043136596679687, 0.02306662368774414, 0.023119808197021485, 0.023193920135498047, 0.02305094337463379, 0.023068031311035155, 0.02305196762084961, 0.023006143569946288, 0.023037824630737305, 0.02302521514892578, 0.023128639221191405, 0.02309119987487793, 0.023068256378173828, 0.023050559997558593, 0.02309744071960449, 0.023064704895019533, 0.0230828800201416, 0.02308710479736328, 0.023281087875366212, 0.023214048385620117, 0.02316041564941406, 0.02304921531677246, 0.02307891273498535, 0.023111679077148437, 0.023096511840820313, 0.02303878402709961, 0.023641439437866212, 0.023206016540527345, 0.02306265640258789, 0.02314179229736328, 0.02307583999633789, 0.023090431213378906, 0.023087263107299805, 0.023079103469848632, 0.02307244873046875, 0.023603904724121095, 0.023168575286865233, 0.023368160247802736, 0.023261119842529297, 0.023111520767211916, 0.02324265670776367, 0.023095359802246095, 0.023111648559570312, 0.02315907287597656, 0.02305023956298828, 0.023172128677368165, 0.0235098876953125, 0.023302112579345703, 0.023060480117797853, 0.023040288925170897, 0.023114496231079102, 0.023054431915283204, 0.023142400741577147, 0.02306732749938965, 0.02302790451049805, 0.023187648773193358, 0.02304800033569336, 0.02302566337585449, 0.023032960891723634, 0.02538585662841797, 0.02454528045654297, 0.02316464042663574, 0.023132448196411134, 0.02299888038635254, 0.02302137565612793, 0.023095552444458007, 0.02309129524230957, 0.023057823181152345, 0.023515743255615236, 0.025898847579956055, 0.023040159225463867, 0.02300284767150879, 0.023033727645874025, 0.023067039489746095, 0.023087039947509765, 0.023420543670654298, 0.0232412166595459, 0.02316431999206543, 0.023128511428833008, 0.023111328125, 0.02307267189025879, 0.023124767303466798, 0.023142208099365236, 0.023070655822753906, 0.023017471313476562, 0.023020927429199218, 0.02309939193725586, 0.022976160049438477, 0.023024608612060547, 0.023173120498657225, 0.023592575073242188, 0.023156160354614257, 0.023268320083618163, 0.023043487548828127, 0.023001535415649414, 0.02304627227783203, 0.02322163200378418, 0.023064992904663087, 0.023072288513183593, 0.02305504035949707, 0.023121919631958008, 0.022965696334838866, 0.023179840087890625, 0.023126016616821288, 0.023193632125854492, 0.02296953582763672, 0.02441267204284668, 0.02366841506958008, 0.023256704330444335, 0.02343280029296875, 0.023048864364624024, 0.022841119766235353, 0.02293516731262207, 0.023027872085571287, 0.022980831146240235, 0.02290278434753418, 0.02299443244934082, 0.02377289581298828, 0.024763168334960936, 0.023130111694335938, 0.02311577606201172, 0.024206464767456054, 0.023040895462036134, 0.022996320724487304, 0.02306934356689453, 0.022983840942382813, 0.022911296844482423, 0.022984832763671876, 0.02314896011352539, 0.023121471405029296, 0.023091648101806642, 0.023107295989990236, 0.023148832321166993, 0.022955520629882813, 0.024435295104980468, 0.023062400817871094, 0.023064672470092775, 0.023362783432006835, 0.023132896423339842, 0.023040000915527343, 0.023111679077148437, 0.023021568298339845, 0.0232857608795166, 0.023013376235961915, 0.023138303756713868, 0.023093248367309572, 0.0230645751953125, 0.024680448532104493, 0.023932928085327147, 0.023217248916625976, 0.023200672149658205, 0.024135488510131836, 0.023548095703125, 0.023152639389038086, 0.02306972885131836, 0.023070783615112306, 0.023159711837768555, 0.023035327911376954, 0.02390278434753418, 0.023134208679199218, 0.023291711807250978, 0.02316624069213867, 0.023165855407714844, 0.023168352127075194, 0.023155616760253905, 0.023082752227783204, 0.023179264068603517, 0.023113727569580078, 0.023073055267333983, 0.023051584243774414, 0.02306892776489258, 0.02304630470275879, 0.023300128936767577, 0.02317673683166504, 0.024014976501464842, 0.0230894718170166, 0.023023616790771483, 0.023031808853149413, 0.022962175369262695, 0.023023616790771483, 0.023035999298095702, 0.02309724807739258, 0.022988800048828126, 0.023145727157592774, 0.023011104583740234, 0.02308799934387207, 0.02308073616027832, 0.0231296329498291, 0.023044031143188478, 0.02303651237487793, 0.02299833679199219, 0.02322220802307129, 0.023125055313110352, 0.02315667152404785, 0.023040128707885743, 0.02310531234741211, 0.022994752883911132, 0.023101280212402344, 0.02296467208862305, 0.02296953582763672, 0.023470527648925783, 0.02300761604309082, 0.023002399444580077, 0.022897375106811522, 0.02297612762451172, 0.023181695938110352, 0.023005184173583985, 0.022900703430175782, 0.022949344635009767, 0.022958656311035157, 0.022883903503417968, 0.02296028709411621, 0.022900800704956054, 0.022863136291503907, 0.02294688034057617, 0.02286755180358887, 0.022988672256469726, 0.022933984756469728, 0.023097280502319337, 0.022994304656982423, 0.02298307228088379, 0.0230699520111084, 0.02297340774536133, 0.023355392456054686, 0.02310108757019043, 0.023085535049438475, 0.023230112075805665, 0.023087039947509765, 0.023099679946899414, 0.022976512908935546, 0.023064159393310548, 0.02300284767150879, 0.0230479679107666, 0.02322115135192871, 0.023980031967163085, 0.024158559799194335, 0.023592960357666014, 0.023603200912475586, 0.023629024505615236, 0.023759647369384764, 0.02355996894836426, 0.02351532745361328, 0.02359622383117676, 0.02333782386779785, 0.02330828857421875, 0.023100927352905275, 0.023093759536743166, 0.023053728103637695, 0.023254655838012697, 0.023154943466186524, 0.023212448120117187, 0.023099712371826172, 0.02307276725769043, 0.023242015838623047, 0.023227104187011717, 0.023461248397827147, 0.023794303894042967, 0.023647327423095704, 0.02373110389709473, 0.023826431274414063, 0.023988224029541014, 0.02404262351989746, 0.023767648696899416, 0.023803552627563476, 0.023716480255126952, 0.02379155158996582, 0.023875232696533202, 0.023837087631225586, 0.02386944007873535, 0.02365235137939453, 0.02350249671936035, 0.023125503540039064, 0.023120735168457033, 0.02308710479736328, 0.023115711212158205, 0.02298236846923828, 0.023262752532958984, 0.02324684715270996, 0.023116607666015625, 0.023188831329345704, 0.02310211181640625, 0.02317430305480957, 0.0231309757232666, 0.02311894416809082, 0.02305116844177246, 0.023048128128051758, 0.02312812805175781, 0.02307072067260742, 0.023011327743530274, 0.023259136199951173, 0.02311529541015625, 0.023101760864257814, 0.023135679244995117, 0.023009824752807616, 0.023001279830932617, 0.023085056304931642, 0.022990848541259764, 0.022992895126342772, 0.023666688919067383, 0.0230248966217041, 0.023036672592163087, 0.02306800079345703, 0.023482463836669923, 0.02304876708984375, 0.022953983306884765, 0.02348236846923828, 0.023104768753051758, 0.023436031341552734, 0.023092384338378905, 0.023112192153930664, 0.02314886474609375, 0.023070272445678712, 0.023046623229980467, 0.02299110412597656, 0.023000831604003905, 0.022951391220092772, 0.02332111930847168, 0.02333286476135254, 0.023009279251098632, 0.02355513572692871, 0.02303481674194336, 0.02299510383605957, 0.023012384414672852, 0.02308572769165039, 0.022986911773681642, 0.02326915168762207, 0.023230688095092773, 0.026355712890625, 0.023236576080322265, 0.023100543975830078, 0.023091487884521485, 0.023175039291381837, 0.02295459175109863, 0.02304751968383789, 0.023130943298339843, 0.023185184478759766, 0.02304636764526367, 0.023291040420532226, 0.023116640090942383, 0.023060287475585937, 0.023077056884765624, 0.023045856475830077, 0.023464223861694337, 0.023152639389038086, 0.023098495483398436, 0.02312895965576172, 0.02308844757080078, 0.023245119094848634, 0.023124351501464843, 0.023079072952270508, 0.023283391952514648, 0.023099552154541014, 0.023117536544799804, 0.02309267234802246, 0.02319651222229004, 0.023118911743164064, 0.023073728561401368, 0.02335468864440918, 0.023106239318847657, 0.02304204750061035, 0.023195648193359376, 0.023286239624023437, 0.023005088806152343, 0.02294588851928711, 0.023129791259765626, 0.022988447189331053, 0.023353504180908202, 0.022932031631469726, 0.022998592376708985, 0.023050624847412108, 0.023019519805908203, 0.02295542335510254, 0.02295180892944336, 0.023100128173828127, 0.02306662368774414, 0.023162399291992188, 0.023112159729003906, 0.023152639389038086, 0.022948928833007812, 0.022962272644042967, 0.023118688583374025, 0.023212160110473633, 0.02353548812866211, 0.023631391525268556, 0.023726303100585936, 0.023756799697875978, 0.023678943634033202, 0.023676511764526367, 0.023708351135253908, 0.024188928604125977, 0.02401443290710449, 0.023625408172607422, 0.023497440338134765, 0.023356672286987304, 0.023585567474365233, 0.023388128280639648, 0.023199071884155275, 0.023216800689697267, 0.02330838394165039, 0.023207263946533205, 0.023740447998046876, 0.023247039794921875, 0.023142656326293944, 0.023310239791870118, 0.023148735046386718, 0.02329599952697754, 0.023224319458007812, 0.02326736068725586, 0.024670175552368164, 0.023114912033081053, 0.023047008514404298, 0.023089183807373046, 0.023089311599731446, 0.02308691215515137, 0.02312771224975586, 0.02324720001220703, 0.023199743270874023, 0.02309529685974121, 0.023193599700927735, 0.023103168487548828, 0.02327814483642578, 0.023121088027954102, 0.023089727401733397, 0.023295648574829103, 0.023337535858154297, 0.02327961540222168, 0.023082399368286134, 0.023194175720214844, 0.02316700744628906, 0.023222272872924804, 0.02320524787902832, 0.02316147232055664, 0.02317478370666504, 0.02315056037902832, 0.023093664169311523, 0.02304204750061035, 0.023125600814819337, 0.02316739273071289, 0.023041055679321288, 0.023218368530273436, 0.023155040740966797, 0.023212160110473633, 0.02310905647277832, 0.023065439224243166, 0.023025087356567383, 0.023038047790527344, 0.023048095703125, 0.02307676887512207, 0.023068479537963867, 0.023034751892089842, 0.023350400924682616, 0.023085567474365236, 0.023058816909790038, 0.023041471481323243, 0.023443359375, 0.023005439758300782, 0.02313398361206055, 0.022961856842041016, 0.02294425582885742, 0.022986719131469727, 0.023165088653564453, 0.02314271926879883, 0.02294169616699219, 0.022986303329467772, 0.023012928009033203, 0.0229671688079834, 0.023158784866333007, 0.02291302490234375, 0.023205888748168944, 0.02305843162536621, 0.0230581111907959, 0.02310335922241211, 0.023144895553588868, 0.023182720184326173, 0.023394943237304688, 0.023197696685791015, 0.023262720108032226, 0.023222496032714843, 0.023275903701782227, 0.023166431427001952, 0.023089599609375, 0.023154687881469727, 0.023558143615722657, 0.023604639053344728, 0.023532127380371092, 0.023357440948486328, 0.023193599700927735, 0.023417024612426757, 0.02314896011352539, 0.02319561576843262, 0.023480255126953124, 0.023327072143554686, 0.023656736373901366, 0.023140064239501955, 0.023142400741577147, 0.023074560165405274, 0.023156864166259766, 0.02305615997314453, 0.02302601623535156, 0.022971519470214842, 0.023036800384521484, 0.022970367431640625, 0.02303513526916504, 0.02301145553588867, 0.02294438362121582, 0.02304719924926758, 0.023132448196411134, 0.023124671936035155, 0.023088287353515625, 0.023104352951049803, 0.023166976928710937, 0.02313216018676758, 0.02303385543823242, 0.02307411193847656, 0.023152576446533204, 0.023316736221313476, 0.02317513656616211, 0.023169343948364257, 0.0231549129486084, 0.02512076759338379, 0.023887231826782228, 0.023286239624023437, 0.023349279403686522, 0.023330047607421876, 0.02335014343261719, 0.02346940803527832, 0.023212703704833984, 0.023232511520385742, 0.02317027282714844, 0.02324764823913574, 0.02310246467590332, 0.02374684715270996, 0.023145183563232422, 0.024278112411499023, 0.02412771224975586, 0.023399040222167967, 0.023290176391601563, 0.02326092720031738, 0.02327142333984375, 0.02314854431152344, 0.023093151092529296, 0.02322470474243164, 0.02321379280090332, 0.025935871124267578, 0.023400640487670897, 0.02316841506958008, 0.023037696838378908, 0.023058816909790038, 0.02316726493835449, 0.0229736328125, 0.02325446319580078, 0.023038848876953125, 0.02302329635620117, 0.022976512908935546, 0.0230645751953125, 0.02306662368774414, 0.023003135681152344, 0.022992895126342772, 0.023002527236938478, 0.023150527954101562, 0.0239069766998291, 0.025839456558227537, 0.0231876163482666, 0.023209983825683594, 0.023256576538085938, 0.023271936416625977, 0.02303705596923828, 0.023019744873046876, 0.022888736724853517, 0.023103744506835937, 0.022999168395996094, 0.02307891273498535, 0.022966304779052735, 0.023289823532104494, 0.023101343154907226, 0.022972511291503905, 0.022992607116699218, 0.023466272354125975, 0.02305622482299805, 0.022997184753417967, 0.023056352615356445, 0.0231014404296875, 0.02309939193725586, 0.02307481575012207, 0.02295327949523926, 0.02300547218322754, 0.02318172836303711, 0.023158784866333007, 0.02296118354797363, 0.02306355285644531, 0.02319152069091797, 0.023178752899169923, 0.02319206428527832, 0.023068384170532228, 0.023068960189819337, 0.023248287200927736, 0.02301923179626465, 0.02312895965576172, 0.023171072006225587, 0.023218175888061524, 0.023117824554443358, 0.023113088607788088, 0.02332326316833496, 0.02323846435546875, 0.023371967315673828, 0.023242752075195314, 0.02321766471862793, 0.02328528022766113, 0.02326995277404785, 0.02333123207092285, 0.023109632492065428, 0.023191551208496093, 0.023267328262329103]",tokens/s,43.056997382937496,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,906.031104,3361.603584,0.0,2959.081472,2957.493248,s,1,7.21464892578125,7.21464892578125,0.0,7.21464892578125,7.21464892578125,7.21464892578125,7.21464892578125,[7.21464892578125],,kWh,5.374969395825246e-06,5.860204329073072e-07,2.3177796320272837e-06,8.278769460759836e-06,,MB,1220.407296,3430.8096,0.0,3017.801728,2552.887296,s,10,0.549932891845703,0.05499328918457032,0.0016117664661280922,0.0546092472076416,0.05577651634216308,0.05769388980865478,0.05922778858184815,"[0.05961126327514649, 0.05474163055419922, 0.05410879898071289, 0.0546627197265625, 0.053973342895507814, 0.05355558395385742, 0.055350433349609374, 0.05489039993286133, 0.0545557746887207, 0.05448294448852539]",tokens/s,4655.113447402723,kWh,2.003595348344697e-06,2.2096169278822058e-07,1.3338081446710974e-06,3.558365185804015e-06,tokens/kWh,71943149.91089275,MB,1232.510976,3430.8096,0.0,3017.801728,2552.889856,s,10,13.23736279296875,1.323736279296875,0.010672340699614544,1.3242601928710938,1.3355204467773438,1.3371098937988282,1.3383814514160157,"[1.3077044677734375, 1.30366748046875, 1.3195208740234374, 1.330189453125, 1.3316053466796876, 1.3222882080078124, 1.335167236328125, 1.3248397216796874, 1.3386993408203125, 1.3236806640625]",tokens/s,47.59256128680217,kWh,3.7483884743741126e-05,4.1340707900157125e-06,1.9684958670332486e-05,6.130291420408933e-05,tokens/kWh,1027683.6071815564,,s,630,13.23518908882142,0.021008236648922873,0.0003998959464638511,0.020983903884887696,0.02139603786468506,0.021494023513793946,0.022258285713195804,"[0.020736480712890627, 0.020707328796386718, 0.020539007186889648, 0.020635839462280273, 0.020662464141845704, 0.021396671295166016, 0.020678592681884767, 0.02057436752319336, 0.020654111862182616, 0.020767423629760744, 0.02062348747253418, 0.020661760330200195, 0.020670015335083006, 0.02054844856262207, 0.02059996795654297, 0.020898624420166014, 0.0205883846282959, 0.021434335708618163, 0.020678848266601563, 0.020563743591308595, 0.020505952835083007, 0.020675680160522462, 0.0206661434173584, 0.02061516761779785, 0.020648063659667967, 0.020591903686523437, 0.020597055435180665, 0.021014432907104492, 0.020822399139404296, 0.020600671768188476, 0.02051411247253418, 0.020570655822753907, 0.020788639068603516, 0.02063657569885254, 0.020723712921142577, 0.02083020782470703, 0.020692991256713866, 0.02057382392883301, 0.02137740707397461, 0.020633600234985353, 0.020719392776489258, 0.020689151763916017, 0.020524768829345702, 0.020748544692993164, 0.021184576034545897, 0.02058847999572754, 0.020671903610229494, 0.02059324836730957, 0.021533695220947266, 0.020644863128662108, 0.02063155174255371, 0.02064588737487793, 0.020723295211791993, 0.021422048568725587, 0.022194623947143555, 0.020696544647216798, 0.02065679931640625, 0.020620607376098634, 0.0207794246673584, 0.020697248458862304, 0.020630592346191405, 0.020587583541870118, 0.020625215530395508, 0.02143235206604004, 0.02064384078979492, 0.020653343200683592, 0.020720544815063476, 0.02125312042236328, 0.02055046463012695, 0.02052876853942871, 0.020590656280517577, 0.020573631286621093, 0.020595584869384766, 0.020690944671630858, 0.020486143112182616, 0.020535295486450195, 0.020481664657592772, 0.020474239349365233, 0.0205533447265625, 0.020492671966552734, 0.020531200408935548, 0.020551008224487306, 0.020565696716308594, 0.02052511978149414, 0.0206529598236084, 0.020518911361694335, 0.020526687622070314, 0.020642208099365233, 0.02052681541442871, 0.020492576599121095, 0.020575647354125978, 0.020546144485473632, 0.020574207305908202, 0.020541439056396483, 0.02052409553527832, 0.020570112228393556, 0.020585504531860352, 0.02047689628601074, 0.020521535873413085, 0.020879423141479492, 0.02068284797668457, 0.02050480079650879, 0.02059382438659668, 0.021113151550292968, 0.020622079849243163, 0.020711200714111328, 0.02056108856201172, 0.020558080673217773, 0.020601408004760742, 0.021651456832885742, 0.02067046356201172, 0.021302528381347656, 0.02064031982421875, 0.0206231689453125, 0.020652416229248047, 0.020731647491455077, 0.020611328125, 0.02061849594116211, 0.02096614456176758, 0.02228428840637207, 0.020762752532958985, 0.020971391677856447, 0.02059878349304199, 0.020649984359741212, 0.02063564872741699, 0.020570112228393556, 0.021186559677124024, 0.021397504806518555, 0.021555200576782226, 0.021204992294311522, 0.021094400405883788, 0.02118556785583496, 0.020984159469604493, 0.021194847106933593, 0.02128950309753418, 0.021214239120483397, 0.021179359436035158, 0.021212736129760743, 0.02115011215209961, 0.02109443283081055, 0.021159008026123048, 0.021088832855224608, 0.02108201599121094, 0.021043712615966798, 0.020999616622924804, 0.02099456024169922, 0.020961280822753905, 0.0209836483001709, 0.02093052864074707, 0.020880767822265625, 0.020865440368652344, 0.02077241516113281, 0.02057916831970215, 0.020565727233886718, 0.020558111190795897, 0.020941856384277344, 0.020646047592163087, 0.020810400009155273, 0.02096348762512207, 0.020862335205078124, 0.021011072158813475, 0.020694143295288087, 0.0208720645904541, 0.020894975662231446, 0.02068252754211426, 0.02069808006286621, 0.02107596778869629, 0.020699424743652342, 0.02058415985107422, 0.020526527404785156, 0.0206177921295166, 0.020602336883544924, 0.020457120895385743, 0.02079532814025879, 0.02096019172668457, 0.021063072204589844, 0.02096726417541504, 0.020951808929443358, 0.02088755226135254, 0.020670047760009767, 0.020685216903686524, 0.02143164825439453, 0.02092639923095703, 0.02093657684326172, 0.021188959121704102, 0.02097817611694336, 0.020969472885131835, 0.021036991119384764, 0.02081990432739258, 0.021123071670532227, 0.020883455276489257, 0.02074403190612793, 0.020800703048706053, 0.02070832061767578, 0.020899391174316405, 0.020836799621582032, 0.02083430480957031, 0.020748287200927733, 0.02071900749206543, 0.02103356742858887, 0.020785152435302736, 0.020791296005249024, 0.020883071899414064, 0.020765056610107423, 0.020854911804199218, 0.02084646415710449, 0.020596736907958983, 0.020543231964111328, 0.020664159774780273, 0.02078761672973633, 0.020931903839111327, 0.021002784729003906, 0.02140585517883301, 0.021354496002197267, 0.02118668746948242, 0.02123353576660156, 0.02127872085571289, 0.02127462387084961, 0.02144256019592285, 0.02121232032775879, 0.021154655456542968, 0.021342208862304687, 0.02132512092590332, 0.021129919052124024, 0.02115113639831543, 0.021131872177124023, 0.021372928619384765, 0.021233728408813476, 0.02122723197937012, 0.02138319969177246, 0.02123139190673828, 0.021985599517822266, 0.023000991821289063, 0.02133987236022949, 0.021494239807128907, 0.021344255447387696, 0.02126848030090332, 0.021243263244628906, 0.021160064697265626, 0.02117683219909668, 0.021164031982421876, 0.021123071670532227, 0.021014047622680665, 0.020890079498291015, 0.021024768829345702, 0.020841983795166014, 0.02156800079345703, 0.021912895202636718, 0.02098588752746582, 0.02084931182861328, 0.02085683250427246, 0.020889663696289064, 0.02081443214416504, 0.02074825668334961, 0.020725759506225586, 0.020780799865722656, 0.021022975921630858, 0.020938335418701173, 0.02146512031555176, 0.021096832275390626, 0.020972991943359377, 0.021129791259765624, 0.021087743759155272, 0.021219839096069337, 0.02120809555053711, 0.021050336837768555, 0.021063392639160156, 0.021047584533691405, 0.020964927673339843, 0.02097593688964844, 0.02083033561706543, 0.020676607131958007, 0.02066022491455078, 0.020527103424072265, 0.020622783660888672, 0.020562112808227537, 0.02059507179260254, 0.02305433654785156, 0.02166169548034668, 0.02077257537841797, 0.02071785545349121, 0.020750591278076172, 0.02132307243347168, 0.020862592697143554, 0.021004831314086914, 0.02113974380493164, 0.0211680965423584, 0.020997535705566405, 0.02122313690185547, 0.021212064743041992, 0.02123321533203125, 0.021315071105957033, 0.021486528396606447, 0.021259328842163087, 0.021304256439208986, 0.021235328674316406, 0.021311744689941407, 0.02129520034790039, 0.021520416259765626, 0.02124412727355957, 0.021339935302734377, 0.021216352462768553, 0.02119772720336914, 0.02117193603515625, 0.02150739288330078, 0.02130143928527832, 0.02119526481628418, 0.021374271392822265, 0.021216224670410157, 0.02134012794494629, 0.021352479934692383, 0.021534719467163087, 0.021112127304077147, 0.021338783264160156, 0.02124188804626465, 0.021807104110717773, 0.021448703765869142, 0.021073280334472658, 0.021092384338378907, 0.020912736892700196, 0.02069068717956543, 0.0205928955078125, 0.0205883846282959, 0.0206399040222168, 0.020760576248168947, 0.020591808319091798, 0.02070147132873535, 0.02072835159301758, 0.02072915267944336, 0.020742080688476563, 0.020848608016967772, 0.020773664474487304, 0.02090611267089844, 0.020963199615478517, 0.02088502311706543, 0.02081430435180664, 0.021493759155273438, 0.020962560653686523, 0.02091494369506836, 0.021073440551757812, 0.020722143173217772, 0.02062848091125488, 0.02079795265197754, 0.02103536033630371, 0.02057436752319336, 0.020829376220703126, 0.021062719345092774, 0.021272319793701172, 0.021358591079711914, 0.021487648010253907, 0.021311456680297852, 0.02126438331604004, 0.021289087295532225, 0.021284736633300782, 0.021387264251708983, 0.021364736557006835, 0.021497856140136717, 0.021331968307495116, 0.021480575561523437, 0.021292959213256836, 0.021298143386840822, 0.021192703247070312, 0.021338111877441408, 0.02115167999267578, 0.021553247451782227, 0.021016223907470703, 0.020820512771606445, 0.020671295166015624, 0.02061008071899414, 0.02065763282775879, 0.020555231094360353, 0.02065001678466797, 0.020814815521240235, 0.020787200927734374, 0.020928512573242186, 0.02066431999206543, 0.020686496734619142, 0.020674911499023438, 0.02085686492919922, 0.02077241516113281, 0.020687263488769533, 0.020996095657348633, 0.020846015930175783, 0.020852319717407225, 0.020689376831054686, 0.020885631561279296, 0.02094118309020996, 0.021139455795288087, 0.02103500747680664, 0.0211778564453125, 0.02124236869812012, 0.02128860855102539, 0.02144700813293457, 0.021325183868408204, 0.021383808135986327, 0.02126233673095703, 0.021344160079956053, 0.02122537612915039, 0.021287103652954102, 0.021323104858398438, 0.021203039169311523, 0.021240480422973634, 0.02124995231628418, 0.021450143814086914, 0.02120467185974121, 0.0213799991607666, 0.021407743453979493, 0.02125209617614746, 0.02124799919128418, 0.021499456405639647, 0.021232032775878908, 0.02143008041381836, 0.02133331108093262, 0.02118320083618164, 0.02133011245727539, 0.021379072189331053, 0.02125619125366211, 0.0213515510559082, 0.021311840057373046, 0.021395967483520507, 0.021360319137573244, 0.021135168075561525, 0.021176864624023437, 0.0212992000579834, 0.02127462387084961, 0.02120419120788574, 0.021250848770141603, 0.021141504287719725, 0.021322879791259765, 0.021220096588134764, 0.021291135787963867, 0.021374303817749022, 0.021141664505004883, 0.021369344711303712, 0.021352447509765626, 0.02104902458190918, 0.021072191238403322, 0.020972896575927734, 0.02089766311645508, 0.020895967483520506, 0.020802112579345704, 0.02118275260925293, 0.020981311798095703, 0.020859743118286134, 0.020924224853515624, 0.0208437442779541, 0.02089472007751465, 0.021440511703491212, 0.020889120101928713, 0.02099452781677246, 0.021020671844482423, 0.020973567962646485, 0.020930559158325195, 0.021172000885009767, 0.02114156723022461, 0.0210761604309082, 0.02123075294494629, 0.020996320724487306, 0.020839231491088867, 0.020622495651245118, 0.02059942436218262, 0.020573408126831054, 0.020511520385742187, 0.020878591537475587, 0.020723840713500977, 0.020628095626831055, 0.020686656951904296, 0.020596063613891602, 0.02059062385559082, 0.020568704605102538, 0.02063363265991211, 0.0205817928314209, 0.020484832763671874, 0.020508703231811524, 0.020996095657348633, 0.020533248901367186, 0.020497631072998047, 0.020560543060302736, 0.020727935791015624, 0.02057222366333008, 0.020869056701660157, 0.020602655410766602, 0.020572479248046876, 0.020726688385009767, 0.020517087936401366, 0.020570911407470704, 0.02077052879333496, 0.021203231811523438, 0.02128691291809082, 0.023742464065551756, 0.02329987144470215, 0.021486944198608398, 0.021342655181884766, 0.021971391677856444, 0.021356000900268554, 0.021486112594604492, 0.021970943450927736, 0.021489599227905273, 0.021381439208984374, 0.02129280090332031, 0.02123980712890625, 0.021301248550415038, 0.021420032501220702, 0.021219327926635743, 0.02195612716674805, 0.021373119354248047, 0.02124172782897949, 0.02142243194580078, 0.02130745506286621, 0.02144451141357422, 0.02413372802734375, 0.021342208862304687, 0.0215285758972168, 0.021432640075683594, 0.02125164794921875, 0.0212741756439209, 0.021151327133178712, 0.021220096588134764, 0.021274944305419922, 0.02113644790649414, 0.02120742416381836, 0.021329792022705078, 0.02117487907409668, 0.021184352874755858, 0.02145193672180176, 0.021140480041503908, 0.021196800231933592, 0.021323776245117186, 0.02118377685546875, 0.02117091178894043, 0.021200895309448242, 0.021145599365234375, 0.02115519905090332, 0.021109376907348633, 0.021802400588989256, 0.02110915184020996, 0.0211212158203125, 0.021135135650634764, 0.021026496887207032, 0.02102239990234375, 0.021025632858276366, 0.020905439376831054, 0.02089423942565918, 0.020935840606689453, 0.02096009635925293, 0.021040384292602538, 0.02110540771484375, 0.020866207122802734, 0.020859743118286134, 0.02082592010498047, 0.02086521530151367, 0.02086092758178711, 0.020975616455078124, 0.02099203109741211, 0.021044479370117188, 0.021231456756591795, 0.02113212776184082, 0.02112441635131836, 0.021007072448730468, 0.021245504379272463, 0.021305791854858397, 0.021518047332763673, 0.021408031463623047, 0.021016672134399415, 0.02081590461730957, 0.021470752716064453, 0.022376575469970704, 0.020891679763793945, 0.02096636772155762, 0.0207061767578125, 0.020622751235961915, 0.020509408950805663, 0.02067865562438965, 0.020539007186889648, 0.020488607406616212, 0.02050864028930664, 0.02048204803466797, 0.020415807723999025, 0.020545888900756835, 0.02048031997680664, 0.020505632400512695, 0.020603776931762696, 0.020578432083129882, 0.02057369613647461, 0.02087715148925781, 0.020572832107543945, 0.020582399368286132, 0.020535295486450195, 0.020596927642822265, 0.02058998489379883, 0.020602272033691405, 0.020738271713256835, 0.020978208541870116, 0.021225183486938477, 0.021205535888671877, 0.021217184066772463, 0.021262527465820313, 0.021292768478393554, 0.0213090877532959, 0.021518367767333985, 0.021384864807128905, 0.021283008575439452, 0.02119923210144043, 0.021375263214111328, 0.02123366355895996, 0.0212807674407959, 0.021376256942749024, 0.02155187225341797, 0.021424095153808595, 0.02126646423339844, 0.021307392120361326, 0.021828800201416015, 0.02136147117614746, 0.021292863845825197, 0.021450271606445314, 0.021371551513671875, 0.021279872894287108, 0.02126095962524414, 0.02123081588745117, 0.021283296585083007, 0.021236032485961915, 0.021176544189453125, 0.021090303421020508, 0.021364736557006835, 0.021159936904907226, 0.021123071670532227, 0.02105926322937012, 0.021061952590942384, 0.02100223922729492, 0.020946847915649415]",tokens/s,47.60037773333402,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,906.002432,1910.3744,0.0,1507.852288,1469.840384,s,1,7.28424365234375,7.28424365234375,0.0,7.28424365234375,7.28424365234375,7.28424365234375,7.28424365234375,[7.28424365234375],,kWh,5.408250662488474e-06,5.89228687531843e-07,9.252785179991729e-07,6.9227578680194895e-06,,MB,1360.83456,1954.414592,0.0,1539.309568,1426.274304,s,10,0.2544509754180908,0.02544509754180908,0.0005515788628767666,0.02536892795562744,0.025756505012512206,0.026344140529632566,0.02681424894332886,"[0.025625919342041014, 0.025457664489746092, 0.02693177604675293, 0.02528019142150879, 0.025159263610839845, 0.024800832748413087, 0.02508608055114746, 0.025494272232055665, 0.02507708740234375, 0.02553788757324219]",tokens/s,10060.877132790078,kWh,7.363674453713914e-07,8.120830954315963e-08,4.817384206549257e-07,1.2993141755694766e-06,tokens/kWh,197027019.9567381,MB,1393.913856,1962.8032,0.0,1547.698176,1426.276864,s,10,14.106373046875001,1.4106373046875,0.011375361685033053,1.4105760498046875,1.4243697387695313,1.4252024475097655,1.425868614501953,"[1.4201143798828124, 1.404700439453125, 1.411345458984375, 1.41626611328125, 1.40358642578125, 1.409806640625, 1.4048375244140625, 1.3854962158203126, 1.42603515625, 1.4241846923828125]",tokens/s,44.66066492829385,kWh,4.037607458962015e-05,4.453052244279772e-06,1.812097090594343e-05,6.295009773984336e-05,tokens/kWh,1000792.7272863479,,s,630,14.10402961921693,0.022387348601931617,0.00040483494046313735,0.022430831909179688,0.022682092666625978,0.02284711217880249,0.02387084428787232,"[0.02267407989501953, 0.022390527725219725, 0.0224597110748291, 0.022456575393676757, 0.02236182403564453, 0.02256585693359375, 0.022382591247558595, 0.022401023864746093, 0.022419456481933595, 0.02267158317565918, 0.02257004737854004, 0.022464799880981445, 0.02251206398010254, 0.022558080673217774, 0.022528095245361326, 0.022450719833374023, 0.022508768081665038, 0.022446432113647462, 0.02260639953613281, 0.022660991668701173, 0.02249910354614258, 0.0224749755859375, 0.022460416793823244, 0.022767648696899415, 0.022529727935791017, 0.02332419204711914, 0.02248691177368164, 0.022765951156616213, 0.022600191116333008, 0.022540288925170897, 0.022575103759765625, 0.022468608856201173, 0.022671295166015626, 0.022495296478271483, 0.022666528701782228, 0.022422239303588866, 0.02255462455749512, 0.02312396812438965, 0.023023616790771483, 0.022555679321289063, 0.022434783935546876, 0.0224718074798584, 0.022604576110839845, 0.022448223114013673, 0.022540288925170897, 0.022614015579223632, 0.02246643257141113, 0.022530176162719726, 0.02242355155944824, 0.022542335510253905, 0.02244812774658203, 0.022409215927124023, 0.022536191940307617, 0.02239606475830078, 0.023012384414672852, 0.022503231048583986, 0.02245337677001953, 0.022382976531982422, 0.022441471099853515, 0.02229145622253418, 0.022405120849609376, 0.022257471084594728, 0.022187904357910158, 0.02237081527709961, 0.02225017547607422, 0.022168352127075196, 0.022219711303710938, 0.022425600051879883, 0.022190080642700196, 0.022147071838378905, 0.02206934356689453, 0.022138784408569336, 0.022114303588867186, 0.022071296691894532, 0.021966047286987304, 0.021951263427734374, 0.02206924819946289, 0.021934080123901366, 0.021975040435791016, 0.02186240005493164, 0.021991424560546875, 0.022034143447875975, 0.024895776748657228, 0.02224742317199707, 0.022177791595458983, 0.02229814338684082, 0.02221718406677246, 0.022212575912475586, 0.022153087615966797, 0.022167167663574218, 0.022178335189819334, 0.02224947166442871, 0.02210633659362793, 0.022063936233520508, 0.022004703521728515, 0.022146272659301757, 0.02203487968444824, 0.022165855407714843, 0.022177791595458983, 0.022192127227783204, 0.021966848373413086, 0.022098016738891602, 0.022109567642211916, 0.022285888671875, 0.022282751083374023, 0.022190559387207032, 0.022147071838378905, 0.022128639221191407, 0.02213667106628418, 0.02220252799987793, 0.022370304107666016, 0.022310752868652344, 0.022716512680053712, 0.022569023132324218, 0.022943744659423827, 0.022548479080200197, 0.022777856826782225, 0.022540512084960936, 0.022396703720092774, 0.022503423690795898, 0.022511615753173828, 0.022597696304321287, 0.022458303451538087, 0.022374399185180666, 0.022552223205566407, 0.02303420829772949, 0.022540224075317382, 0.022541919708251954, 0.022620576858520508, 0.022540288925170897, 0.022449951171875, 0.02250499153137207, 0.022360767364501953, 0.022361440658569334, 0.022294208526611327, 0.022618175506591797, 0.022367136001586914, 0.022359840393066405, 0.022349279403686525, 0.022225664138793944, 0.02240716743469238, 0.022413312911987306, 0.02206073570251465, 0.02204876708984375, 0.02209414482116699, 0.021937599182128908, 0.022313535690307616, 0.02205286407470703, 0.021931488037109374, 0.022034975051879884, 0.02201545524597168, 0.022048479080200196, 0.02198201560974121, 0.022071296691894532, 0.022148544311523438, 0.022254144668579102, 0.02211840057373047, 0.022181888580322266, 0.02272489547729492, 0.0223251838684082, 0.02230771255493164, 0.022358943939208984, 0.02244371223449707, 0.022357343673706054, 0.022522848129272462, 0.022622207641601562, 0.02242905616760254, 0.0225982723236084, 0.022458368301391602, 0.02265088081359863, 0.022460416793823244, 0.022457792282104493, 0.022599552154541017, 0.0230263671875, 0.02256438446044922, 0.022522335052490235, 0.02253129577636719, 0.022457120895385742, 0.022564863204956053, 0.022618112564086915, 0.022600927352905274, 0.02301113510131836, 0.022574047088623046, 0.02255459213256836, 0.022464544296264648, 0.022378528594970703, 0.022650144577026368, 0.022604480743408203, 0.022425600051879883, 0.022323968887329102, 0.022433887481689452, 0.022440767288208006, 0.022356895446777342, 0.022345727920532226, 0.022372352600097657, 0.0226278076171875, 0.02237308883666992, 0.0223940486907959, 0.02254649543762207, 0.022600255966186523, 0.02265433692932129, 0.02259008026123047, 0.02275065612792969, 0.022618751525878906, 0.022589632034301758, 0.022467807769775392, 0.022554752349853515, 0.022626720428466796, 0.022590944290161133, 0.02252262306213379, 0.0225314884185791, 0.022462047576904298, 0.022998847961425782, 0.022635295867919923, 0.022412832260131837, 0.02252662467956543, 0.022378496170043945, 0.022501375198364256, 0.022456575393676757, 0.022469728469848634, 0.022370399475097655, 0.0223176326751709, 0.02239923286437988, 0.02245123291015625, 0.022348255157470704, 0.02237468719482422, 0.022394176483154296, 0.022431968688964844, 0.02228895950317383, 0.022703775405883787, 0.022382816314697265, 0.02222489547729492, 0.022239231109619142, 0.022208511352539064, 0.022161407470703123, 0.022330528259277345, 0.02233839988708496, 0.022378015518188476, 0.022344160079956054, 0.022289920806884765, 0.022473215103149414, 0.022450176239013672, 0.024268287658691406, 0.02258995246887207, 0.02250054359436035, 0.02255081558227539, 0.02254902458190918, 0.022387935638427735, 0.02231705665588379, 0.02232352066040039, 0.022206943511962892, 0.02227939224243164, 0.022214656829833986, 0.02234163284301758, 0.022215839385986327, 0.02453590393066406, 0.023750656127929686, 0.02224732780456543, 0.022331487655639647, 0.021909151077270508, 0.021977439880371093, 0.02203647994995117, 0.021975040435791016, 0.021900928497314454, 0.021934240341186524, 0.022046943664550782, 0.022155263900756835, 0.021938175201416017, 0.021992671966552736, 0.023730911254882813, 0.02391993522644043, 0.022316064834594727, 0.022338560104370117, 0.022474687576293947, 0.022252256393432618, 0.02235807991027832, 0.02226790428161621, 0.022245119094848633, 0.022248767852783204, 0.022332351684570314, 0.0223023681640625, 0.022540639877319336, 0.02262835121154785, 0.022474143981933595, 0.022352479934692384, 0.022511615753173828, 0.02361356735229492, 0.023639711380004882, 0.02244630432128906, 0.022347455978393556, 0.023216224670410155, 0.02235603141784668, 0.022184064865112305, 0.0219169921875, 0.021819391250610352, 0.021892831802368163, 0.021752672195434572, 0.021847679138183595, 0.02184441566467285, 0.021862495422363282, 0.02205801582336426, 0.02187548828125, 0.021844224929809572, 0.021761983871459962, 0.021826784133911134, 0.021932832717895506, 0.02197324752807617, 0.021982303619384767, 0.02179539108276367, 0.021851808547973632, 0.021808767318725587, 0.021807935714721678, 0.021768192291259765, 0.021772287368774415, 0.021794815063476563, 0.021835168838500976, 0.02181193542480469, 0.021804479598999022, 0.02185260772705078, 0.02185398483276367, 0.02176633644104004, 0.021696319580078127, 0.021696735382080078, 0.021866304397583008, 0.02188822364807129, 0.021908447265625, 0.021864416122436524, 0.021818592071533204, 0.02190412712097168, 0.021891103744506837, 0.02201398468017578, 0.02233353614807129, 0.022652864456176758, 0.022595552444458007, 0.02253753662109375, 0.02255462455749512, 0.022620288848876954, 0.022659648895263673, 0.022742944717407225, 0.022570783615112305, 0.0226856632232666, 0.02275321578979492, 0.022709760665893555, 0.022560928344726564, 0.02294655990600586, 0.02265497589111328, 0.022690847396850587, 0.023141151428222657, 0.02254252815246582, 0.022738943099975584, 0.022646976470947267, 0.022646591186523436, 0.02251571273803711, 0.022581024169921873, 0.022515840530395508, 0.022463712692260742, 0.022412160873413085, 0.022403072357177735, 0.02239251136779785, 0.022397087097167968, 0.022368160247802735, 0.02224924850463867, 0.02237487983703613, 0.022548479080200197, 0.022464384078979494, 0.022470176696777342, 0.022573663711547853, 0.02240643119812012, 0.022493215560913087, 0.022481056213378908, 0.022714912414550783, 0.022665216445922853, 0.022607872009277344, 0.02262835121154785, 0.022531520843505858, 0.022333951950073243, 0.022280511856079103, 0.022253311157226563, 0.022449983596801757, 0.02197222328186035, 0.02214297676086426, 0.021998559951782227, 0.022079456329345704, 0.021982879638671876, 0.02188323211669922, 0.022408256530761717, 0.021952512741088868, 0.021987520217895507, 0.022147167205810548, 0.022134912490844726, 0.022123039245605467, 0.022298431396484374, 0.02235379219055176, 0.022573375701904298, 0.022427648544311524, 0.022457408905029296, 0.02243270492553711, 0.022583295822143554, 0.022503423690795898, 0.022580799102783204, 0.02268169593811035, 0.02249558448791504, 0.022718463897705078, 0.022618112564086915, 0.022633920669555663, 0.022821632385253907, 0.022603584289550782, 0.022663423538208008, 0.022609216690063477, 0.022478687286376954, 0.023683679580688476, 0.022560192108154298, 0.02245484733581543, 0.02244313621520996, 0.02209993553161621, 0.022079456329345704, 0.0219465274810791, 0.021766944885253905, 0.02171494483947754, 0.021909503936767577, 0.021817344665527344, 0.021835775375366212, 0.02183750343322754, 0.022040767669677733, 0.021935680389404296, 0.021793312072753906, 0.02185772705078125, 0.02178927993774414, 0.022091167449951172, 0.02205558395385742, 0.022342655181884767, 0.02238355255126953, 0.022470367431640624, 0.02256924819946289, 0.0223538875579834, 0.022493120193481445, 0.022374048233032226, 0.022632831573486327, 0.022496831893920897, 0.02257744026184082, 0.022399200439453124, 0.02240559959411621, 0.022483327865600585, 0.022247520446777344, 0.022255615234375, 0.02218601608276367, 0.02200595283508301, 0.02197203254699707, 0.022058847427368165, 0.02236476707458496, 0.02183363151550293, 0.022901119232177733, 0.021811199188232423, 0.021702655792236326, 0.021825536727905274, 0.021698368072509765, 0.021835712432861327, 0.022121759414672853, 0.02176304054260254, 0.021751808166503905, 0.02177987289428711, 0.02182819175720215, 0.021744863510131836, 0.02168502426147461, 0.021733375549316408, 0.021769407272338868, 0.021825408935546874, 0.022117311477661133, 0.021778432846069336, 0.02185830307006836, 0.02184726333618164, 0.021768991470336913, 0.021681247711181642, 0.021908607482910156, 0.021839616775512695, 0.02174076843261719, 0.02175632095336914, 0.02171536064147949, 0.021776639938354492, 0.021849855422973633, 0.021850112915039063, 0.021773759841918944, 0.021897695541381837, 0.021938079833984374, 0.021825727462768556, 0.02177769660949707, 0.02180784034729004, 0.021742879867553713, 0.021744352340698242, 0.021777887344360352, 0.02172332763671875, 0.021919103622436525, 0.022051807403564452, 0.022194175720214843, 0.022328895568847658, 0.022456159591674806, 0.022312959671020507, 0.022338144302368163, 0.022872064590454103, 0.022429695129394533, 0.022396928787231447, 0.022410560607910156, 0.022152000427246094, 0.022326208114624022, 0.02185798454284668, 0.02189958381652832, 0.021798912048339843, 0.022206464767456056, 0.022261119842529296, 0.022394752502441405, 0.02241551971435547, 0.022557279586791993, 0.02249241638183594, 0.022532640457153322, 0.022548704147338866, 0.02262156867980957, 0.022585535049438478, 0.022648832321166993, 0.022552703857421873, 0.02267091178894043, 0.022633216857910157, 0.02247270393371582, 0.022609920501708985, 0.02351923179626465, 0.02485043144226074, 0.02259984016418457, 0.022617952346801758, 0.022529375076293944, 0.02269366455078125, 0.022591455459594727, 0.022432607650756838, 0.022674976348876955, 0.022643327713012695, 0.022541376113891603, 0.022575967788696288, 0.02269699287414551, 0.0225515193939209, 0.02251366424560547, 0.022779359817504882, 0.022622432708740235, 0.022703903198242188, 0.022654943466186524, 0.0225152645111084, 0.022473087310791017, 0.02262444877624512, 0.022639039993286134, 0.022759424209594727, 0.02257868766784668, 0.022501888275146483, 0.0226376953125, 0.02256287956237793, 0.022680383682250976, 0.022554719924926758, 0.022571008682250978, 0.022558048248291017, 0.022854240417480468, 0.022556480407714845, 0.0226345272064209, 0.022722688674926758, 0.022800384521484376, 0.022795520782470702, 0.02270083236694336, 0.022484575271606445, 0.022509504318237304, 0.022916704177856444, 0.024004480361938477, 0.022606208801269533, 0.022704191207885742, 0.022560352325439452, 0.022641056060791014, 0.022495231628417968, 0.02252739143371582, 0.025383167266845703, 0.02324323272705078, 0.022601600646972655, 0.02283839988708496, 0.02238332748413086, 0.022786272048950194, 0.02240425682067871, 0.022866336822509766, 0.022747520446777345, 0.022777503967285156, 0.02261027145385742, 0.022468320846557616, 0.02262224006652832, 0.022423807144165038, 0.02249113655090332, 0.02249920082092285, 0.022413440704345703, 0.022525951385498046, 0.022389984130859374, 0.02244278335571289, 0.022441408157348634, 0.022347551345825195, 0.022543136596679687, 0.02248089599609375, 0.0225086727142334, 0.022536352157592775, 0.02260041618347168, 0.022552576065063477, 0.02244607925415039, 0.022418495178222655, 0.02258016014099121, 0.022433696746826173, 0.022560863494873046, 0.022828416824340822, 0.02250553512573242, 0.0223887996673584, 0.0229071044921875, 0.022524192810058595, 0.022566911697387695, 0.022605823516845702, 0.02246659278869629, 0.02253206443786621, 0.02247270393371582, 0.022478464126586915, 0.02248742485046387, 0.02250547218322754, 0.022534143447875975, 0.02249692726135254, 0.02244233512878418, 0.0224849910736084, 0.022665216445922853, 0.022380544662475587, 0.022483232498168946, 0.022541343688964845, 0.0226495361328125, 0.022621408462524414, 0.022510368347167967, 0.022562816619873048]",tokens/s,44.6680854343653,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 217, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 536.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 452.12 MiB is free. Process 336563 has 14.30 GiB memory in use. Of the allocated memory 14.18 GiB is allocated by PyTorch, and 1.57 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 217, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 258.12 MiB is free. Process 354215 has 14.49 GiB memory in use. Of the allocated memory 14.32 GiB is allocated by PyTorch, and 53.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 219, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.50 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.30 GiB is free. Process 343877 has 13.44 GiB memory in use. Of the allocated memory 13.33 GiB is allocated by PyTorch, and 1.86 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,906.080256,4682.809344,0.0,4280.287232,4115.121152,s,1,7.39337451171875,7.39337451171875,0.0,7.39337451171875,7.39337451171875,7.39337451171875,7.39337451171875,[7.39337451171875],,kWh,5.965517250039435e-06,6.390820866174008e-07,2.2255573360052683e-06,8.830156672662103e-06,,MB,1331.154944,4991.090688,0.0,4575.985664,4408.410112,s,10,0.3831572151184083,0.038315721511840824,0.001204510387043785,0.037967903137207035,0.03868256492614746,0.04025579509735107,0.04151437923431396,"[0.041829025268554684, 0.03759516906738281, 0.037496673583984376, 0.0376416015625, 0.038230751037597654, 0.03807823944091797, 0.037857566833496094, 0.038332958221435544, 0.03827465438842773, 0.03782057571411133]",tokens/s,6681.330532191271,kWh,1.3707022514868733e-06,1.5116455265426165e-07,9.113048500657927e-07,2.433171654206928e-06,tokens/kWh,105212470.1343527,MB,1363.693568,4991.090688,0.0,4575.985664,4408.412672,s,10,14.590572387695312,1.4590572387695313,0.00872155139799272,1.4599227294921875,1.46907197265625,1.4700561279296875,1.4708434521484375,"[1.471040283203125, 1.4590107421875, 1.460834716796875, 1.467773681640625, 1.4632822265625, 1.468853271484375, 1.45364697265625, 1.44453125, 1.4459283447265625, 1.4556708984375]",tokens/s,43.17856649210683,kWh,4.2464448113928273e-05,4.683467779594264e-06,2.7374301111734504e-05,7.452221700525703e-05,tokens/kWh,845385.4774014008,,s,630,14.588541658401486,0.023156415330796015,0.0003313964716630725,0.02310084819793701,0.023453717613220213,0.023605227279663084,0.024203889961242685,"[0.02379417610168457, 0.023718048095703125, 0.02343462371826172, 0.023302175521850585, 0.023485023498535155, 0.023399776458740234, 0.023453535079956053, 0.023550527572631836, 0.023453184127807617, 0.02380467224121094, 0.023500799179077148, 0.023452959060668944, 0.023439456939697265, 0.023455360412597655, 0.02340671920776367, 0.023603519439697265, 0.02351161575317383, 0.023230464935302734, 0.02324684715270996, 0.023678335189819336, 0.02314713668823242, 0.023121791839599608, 0.023220703125, 0.023284927368164062, 0.023325151443481445, 0.023496543884277344, 0.023121503829956053, 0.023556671142578124, 0.02353971290588379, 0.02310348892211914, 0.023007232666015624, 0.023043296813964845, 0.023286848068237304, 0.023039039611816407, 0.023184032440185548, 0.023154495239257812, 0.02303558349609375, 0.023103200912475585, 0.023124767303466798, 0.023276927947998047, 0.023336864471435546, 0.023266016006469728, 0.02325676727294922, 0.02336390495300293, 0.023236608505249022, 0.023449344635009764, 0.023398656845092774, 0.023609344482421874, 0.023385311126708986, 0.023609888076782225, 0.024019136428833007, 0.023279136657714843, 0.02352387237548828, 0.023113727569580078, 0.02327347183227539, 0.023111167907714843, 0.02339302444458008, 0.023215103149414062, 0.023257919311523437, 0.02317843246459961, 0.023109888076782225, 0.023128032684326172, 0.023224863052368164, 0.023377151489257814, 0.023217920303344727, 0.02316963195800781, 0.023089567184448243, 0.02303126335144043, 0.023357376098632813, 0.0231694393157959, 0.023206079483032226, 0.023232383728027342, 0.023102720260620116, 0.023782400131225585, 0.023279487609863283, 0.02302479934692383, 0.023026527404785155, 0.023029056549072266, 0.02309190368652344, 0.023037952423095705, 0.023014720916748048, 0.023001632690429687, 0.02322243118286133, 0.02305638313293457, 0.022943744659423827, 0.0230645751953125, 0.023047487258911134, 0.023153343200683595, 0.02328985595703125, 0.02313590431213379, 0.02305878448486328, 0.023118080139160155, 0.02313599967956543, 0.022980384826660157, 0.023032032012939452, 0.023137952804565428, 0.023138656616210937, 0.02309939193725586, 0.02306662368774414, 0.023070112228393554, 0.023123935699462892, 0.023007200241088866, 0.023089824676513673, 0.02332467269897461, 0.023569887161254882, 0.023208736419677734, 0.02315567970275879, 0.02309404754638672, 0.02332876777648926, 0.023165023803710938, 0.023199647903442384, 0.023439359664916993, 0.02313216018676758, 0.023468032836914062, 0.023040000915527343, 0.022956031799316406, 0.023813440322875978, 0.02312668800354004, 0.023089183807373046, 0.022983680725097655, 0.0231147518157959, 0.023056575775146484, 0.02318921661376953, 0.023005279541015625, 0.022950944900512697, 0.023188447952270506, 0.02354351997375488, 0.023083295822143555, 0.023085056304931642, 0.02307276725769043, 0.023119647979736327, 0.02333718490600586, 0.023183584213256836, 0.02303683280944824, 0.022960575103759765, 0.02332512092590332, 0.0231395206451416, 0.02304217529296875, 0.023114431381225587, 0.02290483283996582, 0.02309939193725586, 0.023219743728637696, 0.023089599609375, 0.023138336181640625, 0.023104799270629882, 0.023314943313598634, 0.023056608200073242, 0.023109632492065428, 0.023138303756713868, 0.022972415924072266, 0.023164928436279295, 0.022990848541259764, 0.023678432464599608, 0.023040544509887694, 0.023216352462768555, 0.02311759948730469, 0.023001087188720702, 0.022964223861694336, 0.023848960876464844, 0.023183359146118163, 0.02323356819152832, 0.02313315200805664, 0.02298579216003418, 0.02296633529663086, 0.02297126388549805, 0.022977855682373045, 0.023151296615600586, 0.023113536834716796, 0.02301296043395996, 0.023019903182983397, 0.02292745590209961, 0.02299635124206543, 0.023019615173339843, 0.023127840042114257, 0.02317430305480957, 0.022977888107299806, 0.02326976013183594, 0.023071807861328127, 0.023100351333618162, 0.023388160705566406, 0.023447551727294923, 0.023867391586303712, 0.02341587257385254, 0.02361440086364746, 0.023473215103149415, 0.023501760482788087, 0.023379968643188476, 0.023359487533569336, 0.023545856475830077, 0.023456159591674804, 0.02342127990722656, 0.023504735946655274, 0.023383935928344726, 0.02335670471191406, 0.02347929573059082, 0.02338559913635254, 0.023342815399169922, 0.02321075248718262, 0.023220256805419923, 0.023100799560546875, 0.02334783935546875, 0.023363584518432616, 0.023082143783569337, 0.02311897659301758, 0.02313596725463867, 0.02318345642089844, 0.023201887130737304, 0.023225568771362306, 0.023335519790649413, 0.023098880767822266, 0.023349056243896483, 0.023226879119873048, 0.02319174385070801, 0.023117631912231446, 0.023381824493408202, 0.023418655395507814, 0.02326585578918457, 0.023089183807373046, 0.0230830078125, 0.023651519775390626, 0.02367296028137207, 0.023411392211914062, 0.023332639694213866, 0.024443103790283204, 0.02324684715270996, 0.0230830078125, 0.023357440948486328, 0.023093088150024414, 0.023185567855834963, 0.023547903060913086, 0.023226367950439454, 0.02313804817199707, 0.023146080017089843, 0.023179744720458983, 0.023264671325683595, 0.02326176071166992, 0.023206111907958984, 0.023365631103515624, 0.02326118469238281, 0.02353561592102051, 0.023302143096923827, 0.023313695907592774, 0.023293888092041016, 0.023464736938476564, 0.023295007705688476, 0.02313315200805664, 0.023148160934448242, 0.02339062309265137, 0.023191135406494142, 0.022905216217041016, 0.023164928436279295, 0.023273120880126952, 0.023222272872924804, 0.02328780746459961, 0.023099359512329103, 0.023221567153930665, 0.022956768035888673, 0.022984703063964843, 0.026842912673950194, 0.023998624801635744, 0.023121984481811523, 0.023449855804443358, 0.023312128067016602, 0.023109632492065428, 0.0233919677734375, 0.023042335510253906, 0.023088191986083983, 0.023087583541870117, 0.0231810245513916, 0.02317184066772461, 0.02326937675476074, 0.023126016616821288, 0.023174272537231446, 0.02313100814819336, 0.02311520004272461, 0.023007360458374024, 0.023129791259765626, 0.023100128173828127, 0.023441375732421874, 0.023317600250244142, 0.023079103469848632, 0.023202112197875976, 0.022944223403930663, 0.023023616790771483, 0.023770143508911133, 0.023172063827514647, 0.02307046318054199, 0.02302297592163086, 0.02292620849609375, 0.023202911376953125, 0.022991775512695312, 0.02291916847229004, 0.022896799087524414, 0.022911903381347656, 0.0230248966217041, 0.023359167098999024, 0.02309097671508789, 0.023100896835327147, 0.023341695785522462, 0.023214208602905274, 0.02325856018066406, 0.023145280838012695, 0.02319651222229004, 0.023083168029785155, 0.023266143798828125, 0.023156639099121093, 0.023377887725830077, 0.02345782470703125, 0.023104896545410158, 0.023079103469848632, 0.0231080322265625, 0.023125696182250976, 0.022968704223632813, 0.022967391967773438, 0.02312716865539551, 0.023218399047851564, 0.023023263931274414, 0.02306230354309082, 0.02305286407470703, 0.02303081512451172, 0.023567327499389647, 0.023102527618408204, 0.023473087310791015, 0.022996992111206056, 0.022937599182128905, 0.023019039154052734, 0.023769567489624024, 0.023330816268920897, 0.022956031799316406, 0.023052576065063477, 0.022971967697143554, 0.024276895523071287, 0.02461212730407715, 0.023051231384277344, 0.0231496639251709, 0.02303887939453125, 0.0230230712890625, 0.022931711196899414, 0.023008768081665038, 0.022977216720581055, 0.023007328033447266, 0.02302566337585449, 0.023006975173950197, 0.023009536743164062, 0.023117151260375977, 0.023457632064819336, 0.025158464431762697, 0.023148160934448242, 0.02327779197692871, 0.02315484809875488, 0.023197696685791015, 0.023078975677490236, 0.023097280502319337, 0.023068031311035155, 0.023105951309204103, 0.023211744308471678, 0.023128864288330078, 0.023224031448364258, 0.023350591659545897, 0.023151296615600586, 0.02321343994140625, 0.023230335235595704, 0.023607200622558593, 0.023204992294311524, 0.02325049591064453, 0.02348192024230957, 0.02322287940979004, 0.023201343536376953, 0.0232739200592041, 0.0240251522064209, 0.023606624603271485, 0.02341734313964844, 0.023439456939697265, 0.02329510307312012, 0.02335424041748047, 0.025091680526733398, 0.023873952865600585, 0.023250143051147462, 0.023489248275756835, 0.023187456130981447, 0.02317283248901367, 0.023246335983276366, 0.0231081600189209, 0.023033536911010743, 0.023222272872924804, 0.023056640625, 0.023419168472290038, 0.023154783248901366, 0.023132064819335937, 0.02328166389465332, 0.023188671112060546, 0.023110464096069337, 0.02327347183227539, 0.023166976928710937, 0.023240703582763672, 0.023373823165893554, 0.02320342445373535, 0.02333123207092285, 0.023185407638549805, 0.022931488037109374, 0.022874080657958984, 0.02290073585510254, 0.02277891159057617, 0.022851871490478515, 0.0229520320892334, 0.022798175811767577, 0.02312668800354004, 0.02290902328491211, 0.02284854316711426, 0.022727584838867186, 0.02373135948181152, 0.023108512878417968, 0.022982431411743165, 0.023208160400390625, 0.022957536697387697, 0.02308764839172363, 0.022963327407836916, 0.023100032806396484, 0.023095552444458007, 0.023049312591552733, 0.023140607833862306, 0.023014047622680663, 0.023001087188720702, 0.022928384780883788, 0.023048608779907227, 0.023036256790161132, 0.02299110412597656, 0.02296169662475586, 0.023065151214599608, 0.02323036766052246, 0.022960128784179686, 0.023134016036987306, 0.02287808036804199, 0.02295225524902344, 0.022974016189575196, 0.022938047409057617, 0.022935840606689455, 0.02304380798339844, 0.022880352020263672, 0.022935615539550782, 0.02283894348144531, 0.0232391357421875, 0.023050527572631836, 0.022889951705932617, 0.022820959091186522, 0.022766016006469728, 0.02290073585510254, 0.022929407119750975, 0.023057855606079102, 0.022938175201416014, 0.0228121280670166, 0.022871744155883788, 0.022913856506347655, 0.022997024536132813, 0.022957408905029297, 0.022936223983764648, 0.02287820816040039, 0.02286591911315918, 0.0229901123046875, 0.022960031509399414, 0.022954847335815428, 0.023047679901123046, 0.02308143997192383, 0.0230166072845459, 0.023257951736450195, 0.022783519744873047, 0.02285968017578125, 0.02301308822631836, 0.022882495880126953, 0.022799039840698244, 0.022839263916015626, 0.022906879425048828, 0.02328678321838379, 0.022912000656127928, 0.02289833641052246, 0.022722080230712892, 0.022915903091430663, 0.022938976287841795, 0.02340073585510254, 0.02274342346191406, 0.023175167083740233, 0.022974464416503908, 0.022879295349121094, 0.02283977508544922, 0.022780384063720703, 0.022849536895751952, 0.022784000396728517, 0.0229171199798584, 0.022851743698120118, 0.02273468780517578, 0.022839296340942384, 0.023013311386108397, 0.023034944534301757, 0.022907167434692382, 0.022840032577514647, 0.022783424377441405, 0.02303443145751953, 0.023024768829345704, 0.022846303939819335, 0.02283075141906738, 0.022944032669067384, 0.022780256271362306, 0.02286566352844238, 0.0227553596496582, 0.022914655685424806, 0.02301024055480957, 0.02281881523132324, 0.02287388801574707, 0.02274940872192383, 0.022752416610717772, 0.023026527404785155, 0.022787776947021485, 0.022914688110351564, 0.022828960418701173, 0.02286262321472168, 0.02284297561645508, 0.02294211196899414, 0.02281062316894531, 0.0228287353515625, 0.022878528594970703, 0.02280243110656738, 0.022848960876464843, 0.022900703430175782, 0.022964832305908203, 0.023127199172973633, 0.022889312744140626, 0.022842527389526367, 0.023022432327270508, 0.022800384521484376, 0.02294972801208496, 0.022865888595581054, 0.022857919692993164, 0.022880191802978515, 0.02335750389099121, 0.023232511520385742, 0.0228721923828125, 0.022966144561767578, 0.02285158348083496, 0.02278313636779785, 0.02336854362487793, 0.022978559494018554, 0.022849536895751952, 0.022922880172729494, 0.02299907112121582, 0.02286566352844238, 0.02291974449157715, 0.022847391128540038, 0.022968448638916016, 0.023214080810546874, 0.022982656478881838, 0.022925056457519532, 0.022929664611816405, 0.022969696044921876, 0.02287068748474121, 0.022896255493164062, 0.022959903717041017, 0.02357308769226074, 0.023076864242553712, 0.022964223861694336, 0.022968320846557616, 0.02289619255065918, 0.02300160026550293, 0.023064319610595702, 0.02295827293395996, 0.02291916847229004, 0.023111679077148437, 0.02307276725769043, 0.023248895645141602, 0.026391616821289064, 0.023260095596313476, 0.023074239730834962, 0.022888063430786133, 0.022884288787841798, 0.02328678321838379, 0.023002496719360353, 0.023021503448486327, 0.02392255973815918, 0.023196479797363282, 0.023006624221801757, 0.022970752716064455, 0.022976160049438477, 0.023118080139160155, 0.023064512252807617, 0.022946176528930665, 0.023842815399169923, 0.022984672546386718, 0.023025312423706056, 0.02289900779724121, 0.022906944274902342, 0.022914911270141603, 0.023080480575561522, 0.022841983795166016, 0.022861824035644532, 0.02285977554321289, 0.022974464416503908, 0.022951648712158202, 0.023007104873657227, 0.022880352020263672, 0.02357689666748047, 0.023015424728393553, 0.022967679977416992, 0.023021408081054687, 0.022962432861328125, 0.022874656677246093, 0.02290483283996582, 0.02303385543823242, 0.023019519805908203, 0.022861120223999023, 0.023156768798828126, 0.022894880294799805, 0.022968704223632813, 0.022929632186889648, 0.02305001640319824, 0.023058528900146483, 0.023555999755859376, 0.02304614448547363, 0.02283100891113281, 0.023012832641601564, 0.023061119079589843, 0.023254367828369142, 0.02312668800354004, 0.023165023803710938, 0.022937503814697266, 0.022962175369262695, 0.023015392303466796, 0.023023775100708008, 0.022935039520263673, 0.022931648254394532, 0.023017663955688477, 0.023009376525878908]",tokens/s,43.18457696127462,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 551, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 502, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 327, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 182, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1030, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 819, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 616, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 218, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 172.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 80.12 MiB is free. Process 330428 has 14.66 GiB memory in use. Of the allocated memory 14.55 GiB is allocated by PyTorch, and 791.00 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1121, in __init__ self.embed_out = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 592.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 300.12 MiB is free. Process 194013 has 14.45 GiB memory in use. Of the allocated memory 14.33 GiB is allocated by PyTorch, and 1.52 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.347328,13880.918016,0.0,13478.395904,13476.849152,s,1,8.200345703125,8.200345703125,0.0,8.200345703125,8.200345703125,8.200345703125,8.200345703125,[8.200345703125],,kWh,8.42613969167966e-06,9.220577104685434e-07,5.037781807999686e-06,1.4385979210147888e-05,,MB,1297.227776,14115.79904,0.0,13700.694016,13671.637504,s,10,2.000564987182617,0.20005649871826173,0.002651883647587154,0.2005892105102539,0.20291226501464846,0.20375589141845704,0.2044307925415039,"[0.19606008911132813, 0.20130841064453125, 0.20459951782226563, 0.1963572235107422, 0.19692306518554686, 0.20092189025878907, 0.20049256896972656, 0.2004915771484375, 0.20068585205078124, 0.20272479248046876]",tokens/s,1279.6385103216423,kWh,5.931440040750052e-06,6.54130199023205e-07,3.959114278400122e-06,1.0544684518173378e-05,tokens/kWh,24277634.817693535,MB,1340.440576,14117.896192,0.0,13702.791168,13671.640064,s,10,37.27404223632813,3.7274042236328127,0.002686149675001048,3.72863525390625,3.7296634521484378,3.730451721191406,3.7310823364257812,"[3.7221123046875, 3.724919677734375, 3.725282958984375, 3.725269287109375, 3.729444091796875, 3.72901513671875, 3.72882958984375, 3.72948828125, 3.72844091796875, 3.731239990234375]",tokens/s,16.901842735639434,kWh,0.00010889586356425067,1.2011476166440925e-05,7.234036342779929e-05,0.00019324770315849087,tokens/kWh,326006.4620190127,,s,630,37.270204143524154,0.05915905419607011,0.0005287248736649628,0.05903779220581055,0.05944354400634765,0.05957615432739258,0.0627592488861084,"[0.06231404876708985, 0.05962351989746094, 0.05893564987182617, 0.05878988647460937, 0.058824256896972654, 0.05880867385864258, 0.058847038269042966, 0.058966304779052736, 0.058916862487792966, 0.058966014862060545, 0.05885472106933594, 0.058980991363525394, 0.058925407409667965, 0.05892694473266601, 0.0588199348449707, 0.05895977783203125, 0.05923904037475586, 0.05921772766113281, 0.059240673065185545, 0.05918511962890625, 0.05896559906005859, 0.058928638458251956, 0.05894236755371094, 0.05887385559082031, 0.05885542297363281, 0.05915238571166992, 0.058816062927246095, 0.05896646499633789, 0.0589185905456543, 0.058819038391113285, 0.058862625122070314, 0.058929214477539064, 0.05916339111328125, 0.05909052658081055, 0.05921958541870117, 0.05935388946533203, 0.05940838241577148, 0.059066497802734375, 0.05901811218261719, 0.058960224151611326, 0.05892572784423828, 0.058834144592285156, 0.05884188842773438, 0.05885030364990235, 0.05910748672485352, 0.05905263900756836, 0.058920833587646486, 0.05899001693725586, 0.0590629768371582, 0.059194942474365235, 0.05925481414794922, 0.05942547225952149, 0.05933776092529297, 0.059154464721679685, 0.059001758575439454, 0.058987998962402345, 0.05903340911865235, 0.0589378547668457, 0.058873664855957034, 0.05891455841064453, 0.05915011215209961, 0.059054752349853516, 0.05918742370605469, 0.06282937622070313, 0.05976604843139648, 0.05902195358276367, 0.05896758270263672, 0.05887993621826172, 0.05898259353637696, 0.058966175079345706, 0.05899862289428711, 0.05891692733764648, 0.05881913757324219, 0.058997726440429686, 0.05879030227661133, 0.058808032989501956, 0.058810367584228515, 0.05889817428588867, 0.05899574279785156, 0.05926287841796875, 0.05943910217285156, 0.05917875289916992, 0.05918540954589844, 0.05908588790893555, 0.05894022369384765, 0.05888748931884766, 0.05897097778320313, 0.058836959838867185, 0.05885337448120117, 0.058912094116210935, 0.05894633483886719, 0.05898227310180664, 0.059146240234375, 0.05877910232543945, 0.05888694381713867, 0.05900831985473633, 0.05922812652587891, 0.05931465530395508, 0.05928870391845703, 0.05941945648193359, 0.059103294372558596, 0.05910713577270508, 0.05905427169799805, 0.05904342269897461, 0.05897865676879883, 0.05905558395385742, 0.0590214729309082, 0.05905065536499023, 0.058908447265625, 0.05884928131103516, 0.058904319763183596, 0.059060192108154295, 0.059097377777099606, 0.059140094757080076, 0.05932796859741211, 0.05929833602905273, 0.05928345489501953, 0.05942643356323242, 0.05931865692138672, 0.05916592025756836, 0.05904188919067383, 0.05892985534667969, 0.0590233268737793, 0.05927324676513672, 0.05906441497802734, 0.058947486877441405, 0.06266175842285156, 0.059665279388427736, 0.059008895874023436, 0.05893132781982422, 0.0589035530090332, 0.059020286560058595, 0.0590333137512207, 0.05890835189819336, 0.05916723251342773, 0.0589312973022461, 0.059063648223876955, 0.058970783233642576, 0.0588155517578125, 0.058958782196044925, 0.05876531219482422, 0.05897356796264648, 0.05944339370727539, 0.05953545761108398, 0.059353439331054685, 0.059098430633544925, 0.05901587295532226, 0.058980255126953124, 0.05899657440185547, 0.05904409790039063, 0.05892879867553711, 0.05891721725463867, 0.05880134582519531, 0.05888083267211914, 0.058965217590332034, 0.05898524856567383, 0.05897420883178711, 0.05896131134033203, 0.0590772476196289, 0.059383617401123044, 0.059453182220458985, 0.059444896697998045, 0.059195297241210934, 0.059101760864257814, 0.0590456657409668, 0.05886003112792969, 0.058944862365722654, 0.0588581428527832, 0.0590561294555664, 0.05887385559082031, 0.05912985610961914, 0.05907769775390625, 0.05900751876831055, 0.05903196716308594, 0.059186622619628905, 0.059164447784423826, 0.05928768157958984, 0.05935315322875977, 0.05925948715209961, 0.05902531051635742, 0.05913398361206055, 0.05903571319580078, 0.059057727813720706, 0.05902995300292969, 0.059004417419433595, 0.05893331146240234, 0.05906000137329102, 0.05914080047607422, 0.05903478240966797, 0.06285516738891601, 0.059749824523925785, 0.05887443161010742, 0.05878169631958008, 0.05887977600097656, 0.0589189453125, 0.05891904067993164, 0.0589699821472168, 0.05904403305053711, 0.05898649597167969, 0.058985599517822264, 0.059008991241455075, 0.05888608169555664, 0.05887472152709961, 0.059031681060791014, 0.05912547302246094, 0.05943664169311524, 0.059472190856933595, 0.059228542327880856, 0.059017215728759766, 0.058929153442382816, 0.059018623352050784, 0.05887039947509766, 0.05900396728515625, 0.05896015930175781, 0.05893155288696289, 0.05890876770019531, 0.05905583953857422, 0.05888035202026367, 0.05882281494140625, 0.058998783111572264, 0.05905145645141602, 0.05946630477905274, 0.05940598297119141, 0.05945119857788086, 0.059310623168945316, 0.0592132797241211, 0.05903766250610352, 0.059056289672851564, 0.05891328048706055, 0.05904912185668945, 0.059072673797607424, 0.058950241088867185, 0.0589027214050293, 0.05888390350341797, 0.059031551361083984, 0.058832191467285154, 0.058958526611328124, 0.05922409439086914, 0.05930390548706055, 0.05949849700927735, 0.05932022476196289, 0.0592119026184082, 0.059167743682861325, 0.05913699340820312, 0.05913190460205078, 0.05900243377685547, 0.05891321563720703, 0.059028575897216794, 0.059003807067871096, 0.058982398986816405, 0.05895372772216797, 0.059006175994873046, 0.06387811279296875, 0.06023088073730469, 0.05932860946655273, 0.059017311096191405, 0.05900348663330078, 0.05892300796508789, 0.05910860824584961, 0.05892182540893555, 0.058961406707763675, 0.0588636474609375, 0.05902070236206055, 0.058770591735839844, 0.05880198287963867, 0.05895372772216797, 0.058943489074707034, 0.059291648864746097, 0.059561214447021484, 0.05965865707397461, 0.05957984161376953, 0.05925481414794922, 0.05917103958129883, 0.05892300796508789, 0.05890467071533203, 0.05891727828979492, 0.05881052780151367, 0.058824512481689455, 0.05886995315551758, 0.05884467315673828, 0.059005088806152343, 0.058904769897460935, 0.058978145599365234, 0.059200862884521484, 0.059392990112304686, 0.05953740692138672, 0.05932646560668945, 0.059132991790771486, 0.05922636795043945, 0.05932463836669922, 0.059140575408935546, 0.05908070373535156, 0.058897537231445314, 0.05893795013427734, 0.05901052856445312, 0.058963905334472655, 0.059052928924560544, 0.059012832641601565, 0.05899446487426758, 0.0590013427734375, 0.05906025695800781, 0.05920764923095703, 0.05955382537841797, 0.05960291290283203, 0.059330078125, 0.05908659362792969, 0.05914492797851562, 0.05920758438110352, 0.059152095794677735, 0.05908915328979492, 0.05897638320922852, 0.05905817413330078, 0.05906227111816406, 0.059015167236328124, 0.05902867126464844, 0.06309891128540039, 0.05971292877197266, 0.05898092651367187, 0.05884272003173828, 0.058853790283203124, 0.05885152053833008, 0.05902105712890625, 0.05899862289428711, 0.058996543884277344, 0.058929729461669925, 0.059094879150390626, 0.05915212631225586, 0.058971393585205076, 0.058856353759765626, 0.05894316864013672, 0.0591069450378418, 0.059520801544189456, 0.05955481719970703, 0.05926630401611328, 0.059026111602783204, 0.05895788955688477, 0.05898678588867187, 0.059004478454589844, 0.05897772979736328, 0.058894081115722655, 0.05889878463745117, 0.05890316772460937, 0.05897324752807617, 0.058880191802978515, 0.05888828659057617, 0.058864288330078125, 0.05909708786010742, 0.05932032012939453, 0.059469825744628904, 0.059447135925292965, 0.05937715148925781, 0.05936092758178711, 0.059371871948242186, 0.059447135925292965, 0.059267040252685546, 0.059160415649414065, 0.0590299186706543, 0.059023967742919924, 0.05893711853027344, 0.05897817611694336, 0.059039199829101566, 0.05898739242553711, 0.05903744125366211, 0.059263233184814454, 0.059344894409179685, 0.05928310394287109, 0.059255134582519534, 0.05930937576293945, 0.05921011352539062, 0.05922659301757813, 0.059158367156982423, 0.05922611236572266, 0.05922611236572266, 0.05922140884399414, 0.05901548767089844, 0.05911759948730469, 0.059338367462158204, 0.059095680236816404, 0.06274252700805664, 0.059676673889160155, 0.05921996688842773, 0.05886566543579102, 0.05897011184692383, 0.05889843368530273, 0.058959873199462894, 0.0592072639465332, 0.05925296020507813, 0.059259071350097656, 0.0591541748046875, 0.05882905578613281, 0.058931198120117184, 0.058875713348388675, 0.05895391845703125, 0.05914409637451172, 0.05959894561767578, 0.05957164764404297, 0.05948681640625, 0.05936729431152344, 0.05911561584472656, 0.05902102279663086, 0.05891100692749023, 0.058982398986816405, 0.05884928131103516, 0.05881958389282226, 0.05891993713378906, 0.05900006484985352, 0.05894630432128906, 0.058959873199462894, 0.058992641448974606, 0.059056159973144534, 0.0593152961730957, 0.059433856964111326, 0.05943910217285156, 0.059383201599121097, 0.05924518585205078, 0.059184192657470706, 0.05908163070678711, 0.05889043045043945, 0.058931007385253906, 0.05898236846923828, 0.058988574981689454, 0.05905817413330078, 0.059023391723632815, 0.059041759490966794, 0.059172031402587894, 0.059033504486083986, 0.059265953063964844, 0.059355136871337894, 0.05942393493652344, 0.05942076873779297, 0.059138782501220705, 0.059109375, 0.05918291091918945, 0.05922835159301758, 0.059172863006591796, 0.059084800720214846, 0.05899059295654297, 0.05903769683837891, 0.05902336120605469, 0.059114910125732424, 0.05914585494995117, 0.06335689544677735, 0.05994425582885742, 0.05913056182861328, 0.059146240234375, 0.058987743377685545, 0.058851585388183594, 0.05885756683349609, 0.05911318588256836, 0.05906655883789062, 0.058947166442871096, 0.05899103927612305, 0.05890918350219727, 0.05891619110107422, 0.058890975952148435, 0.05901017761230469, 0.05937004852294922, 0.05970355224609375, 0.059666431427001954, 0.05950838470458984, 0.05914972686767578, 0.05909139251708984, 0.0589007682800293, 0.058936607360839846, 0.05891372680664062, 0.0589411506652832, 0.05904412841796875, 0.05895782470703125, 0.05890252685546875, 0.058966014862060545, 0.059000831604003906, 0.05901055908203125, 0.059090431213378904, 0.05927423858642578, 0.059328510284423826, 0.05953279876708984, 0.05937203216552735, 0.0591723518371582, 0.05906304168701172, 0.059173728942871096, 0.05902428817749023, 0.058947296142578126, 0.05907689666748047, 0.05903142547607422, 0.05908902359008789, 0.05922611236572266, 0.059098270416259764, 0.05902761459350586, 0.05899744033813477, 0.05922198486328125, 0.059205406188964846, 0.05920793533325195, 0.059530879974365236, 0.05927299118041992, 0.059140705108642576, 0.05907804870605469, 0.059106975555419924, 0.05911852645874023, 0.05908684921264649, 0.059090335845947264, 0.05903788757324219, 0.05908220672607422, 0.0592289924621582, 0.058992767333984376, 0.06276607894897461, 0.05978374481201172, 0.05899292755126953, 0.0588260498046875, 0.05905081558227539, 0.05885955047607422, 0.05897356796264648, 0.05889811325073242, 0.0589813117980957, 0.05910678482055664, 0.05894118499755859, 0.05896860885620117, 0.05889772796630859, 0.05897721481323242, 0.05893753433227539, 0.05932831954956055, 0.05967257690429688, 0.05956198501586914, 0.05931126403808594, 0.05901913452148438, 0.05904883193969727, 0.05897951889038086, 0.058987422943115236, 0.058943359375, 0.059135807037353515, 0.059011390686035156, 0.05905926513671875, 0.05896633529663086, 0.05895436859130859, 0.058963329315185546, 0.05886835098266602, 0.05912124633789063, 0.05926339340209961, 0.059387294769287106, 0.05944790267944336, 0.05940019226074219, 0.059394046783447264, 0.059184288024902346, 0.059199840545654296, 0.05901363372802734, 0.0590458869934082, 0.05907401657104492, 0.05900278472900391, 0.05903219223022461, 0.05899468612670898, 0.05894688034057617, 0.05901129531860352, 0.05899055862426758, 0.05931372833251953, 0.059306079864501954, 0.05946249771118164, 0.05943500900268555, 0.05949542236328125, 0.05920044708251953, 0.059152446746826175, 0.05920323181152344, 0.05914217758178711, 0.05897862243652344, 0.05899059295654297, 0.058985950469970704, 0.05907043075561524, 0.05897273635864258, 0.059061279296875, 0.06302950286865235, 0.05978112030029297, 0.059146240234375, 0.058961631774902344, 0.05904412841796875, 0.0591558723449707, 0.05904172897338867, 0.05896451187133789, 0.05907263946533203, 0.05902057647705078, 0.05901385498046875, 0.05881769561767578, 0.059250751495361326, 0.0589005126953125, 0.05891491317749024, 0.05918297576904297, 0.0596651840209961, 0.05969510269165039, 0.05949033737182617, 0.05926089477539063, 0.05915238571166992, 0.0589087028503418, 0.05894060897827148, 0.05892700958251953, 0.05901913452148438, 0.05883129501342774, 0.05893331146240234, 0.05891328048706055, 0.058912574768066404, 0.058920318603515626, 0.05895196914672852, 0.05911811065673828, 0.05954764938354492, 0.059486209869384764, 0.05947203063964844, 0.05958633422851563, 0.05961939239501953, 0.05930950546264648, 0.05913622283935547, 0.05899504089355469, 0.059007137298583985, 0.05907235336303711, 0.05900870513916016, 0.05914041519165039, 0.05902336120605469, 0.05900803375244141, 0.05900796890258789, 0.0591196174621582, 0.059272960662841795, 0.05951667022705078, 0.05949065780639649, 0.0597935676574707, 0.05937120056152344, 0.05949801635742188, 0.05917366409301758, 0.058966110229492184, 0.059045215606689454, 0.05900326538085938, 0.05909872055053711, 0.059042400360107425, 0.05908889770507812, 0.059006175994873046, 0.05903043365478516]",tokens/s,16.903583290661015,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,838.36928,9637.39648,0.0,9242.148864,8603.568128,s,1,7.5771103515625,7.5771103515625,0.0,7.5771103515625,7.5771103515625,7.5771103515625,7.5771103515625,[7.5771103515625],,kWh,1.2212581470809407e-05,1.3398002122987354e-06,5.9325047460090374e-06,1.948488642911718e-05,,MB,1216.487424,9889.05472,0.0,9481.224192,8972.090368,s,10,1.0719497299194336,0.10719497299194336,0.008302012551692599,0.11059019088745117,0.11199024887084962,0.11226201972961426,0.11247943641662599,"[0.10413276672363281, 0.11084531402587891, 0.10690787506103516, 0.11083315277099609, 0.11034722900390626, 0.11118134307861328, 0.10990019226074219, 0.11253379058837891, 0.08333821105957032, 0.1119298553466797]",tokens/s,2388.171691775515,kWh,3.3949029875972787e-06,3.7433836348732785e-07,2.2527537401861285e-06,6.021995091270735e-06,tokens/kWh,42510828.408194534,MB,1244.884992,9893.249024,0.0,9485.418496,8972.092928,s,10,24.113931396484375,2.4113931396484376,0.0033347543767851566,2.411421142578125,2.4156163574218747,2.4166848876953124,2.4175397119140625,"[2.406288818359375, 2.40878857421875, 2.411273681640625, 2.406954345703125, 2.41148291015625, 2.412856201171875, 2.411359375, 2.41537890625, 2.411795166015625, 2.41775341796875]",tokens/s,26.12597629318333,kWh,7.044531578990048e-05,7.770005280472887e-06,4.676936881081512e-05,0.0001249846898811885,tokens/kWh,504061.7379607721,,s,630,24.110681304931674,0.038270922706240704,0.0008782322904293862,0.03810611152648926,0.03860705413818359,0.03883380527496338,0.04386638484954834,"[0.04344163131713867, 0.03990086364746094, 0.03855628967285156, 0.038123390197753904, 0.03779580688476562, 0.03776681518554687, 0.03764499282836914, 0.03774457550048828, 0.03763347244262695, 0.037787296295166015, 0.03779804611206055, 0.03770825576782227, 0.03776956939697266, 0.037895584106445314, 0.037704288482666014, 0.03771360015869141, 0.03773443222045898, 0.03780624008178711, 0.03800179290771484, 0.03775743865966797, 0.03782447814941406, 0.037833248138427734, 0.037770721435546876, 0.03780230331420899, 0.038133983612060544, 0.0386899528503418, 0.03869168090820312, 0.03866624069213867, 0.038406143188476564, 0.03820697784423828, 0.037917182922363284, 0.03773952102661133, 0.03792588806152344, 0.03813369750976563, 0.03826646423339844, 0.03801545715332031, 0.03798992156982422, 0.038090431213378906, 0.03812432098388672, 0.03811459350585938, 0.038187744140625, 0.03824025726318359, 0.03788185501098633, 0.03928473663330078, 0.03786124801635742, 0.03809084701538086, 0.03799168014526367, 0.03794585418701172, 0.038320415496826174, 0.03808870315551758, 0.038282783508300784, 0.03838000106811523, 0.03863347244262695, 0.03860889434814453, 0.03859782409667969, 0.038529857635498044, 0.03821158218383789, 0.0379059829711914, 0.038162494659423826, 0.037953632354736325, 0.03805417633056641, 0.03805593490600586, 0.0380682258605957, 0.04390889739990234, 0.03971324920654297, 0.03872927856445312, 0.038076862335205075, 0.03775897598266602, 0.0377704963684082, 0.03778432083129883, 0.03791257476806641, 0.03793222427368164, 0.03827180862426758, 0.037850910186767575, 0.037694782257080076, 0.03772476959228516, 0.03781049728393555, 0.037781505584716796, 0.03811520004272461, 0.0379736328125, 0.03799091339111328, 0.037910526275634765, 0.037758430480957034, 0.037751327514648436, 0.03791820907592774, 0.03786393737792969, 0.03770556640625, 0.038459583282470705, 0.03836310577392578, 0.03826399993896484, 0.03842031860351563, 0.03825350570678711, 0.0383158073425293, 0.03794150543212891, 0.03809075164794922, 0.038069568634033206, 0.038202049255371094, 0.03831552124023437, 0.03814374542236328, 0.038228416442871095, 0.038031681060791016, 0.03806617736816406, 0.03789619064331055, 0.03790848159790039, 0.03806208038330078, 0.038324222564697266, 0.038340351104736325, 0.03784524917602539, 0.03795558547973633, 0.037969921112060545, 0.03797532653808594, 0.03799296188354492, 0.03820307159423828, 0.038447360992431644, 0.03876499176025391, 0.038854496002197265, 0.03846553421020508, 0.03823535919189453, 0.03832854461669922, 0.03810934448242188, 0.03809260940551758, 0.03854153442382813, 0.03883852767944336, 0.03807382583618164, 0.038261409759521484, 0.038088542938232425, 0.0468045768737793, 0.04062665557861328, 0.038690174102783206, 0.03782463836669922, 0.037985950469970706, 0.03812438583374023, 0.038061790466308594, 0.03796201705932617, 0.03796783828735351, 0.03765660858154297, 0.037697406768798826, 0.03789174270629883, 0.037742176055908204, 0.03782950210571289, 0.037904384613037106, 0.03765033721923828, 0.03793315124511719, 0.03793100738525391, 0.037720062255859374, 0.037787647247314454, 0.03810713577270508, 0.03825254440307617, 0.03788943862915039, 0.03799919891357422, 0.038076416015625, 0.03837055969238281, 0.038429439544677736, 0.03866995239257812, 0.038166175842285155, 0.03805667114257812, 0.03789158248901367, 0.03823462295532227, 0.03814153671264649, 0.03791689682006836, 0.03796604919433594, 0.03810464096069336, 0.03812598419189453, 0.03794112014770508, 0.03810508728027344, 0.03849843215942383, 0.03797401428222656, 0.03777536010742188, 0.03794729614257813, 0.03800870513916016, 0.037988319396972656, 0.0380951042175293, 0.03796713638305664, 0.03801971054077148, 0.038186496734619144, 0.03796847915649414, 0.03827302551269531, 0.03829555130004883, 0.03845939254760742, 0.03837747192382813, 0.038526912689208985, 0.038417793273925784, 0.038357505798339846, 0.03851887893676758, 0.03874211120605469, 0.038250495910644534, 0.03805593490600586, 0.03789139175415039, 0.03811196899414063, 0.04447507095336914, 0.03966265487670898, 0.03868764877319336, 0.03808585739135742, 0.03790099334716797, 0.03779593658447265, 0.03775827026367187, 0.03802758407592773, 0.03782060623168945, 0.03771104049682617, 0.0377017936706543, 0.03806499099731445, 0.037682464599609375, 0.03784777450561523, 0.03781756973266601, 0.037767967224121096, 0.03786051177978516, 0.03818083190917969, 0.038122367858886716, 0.03812102508544922, 0.03800928115844727, 0.03802659225463867, 0.03798492813110352, 0.037875614166259765, 0.0378488655090332, 0.03817824172973633, 0.038214271545410156, 0.038246654510498045, 0.03845465469360351, 0.038402687072753905, 0.0379266242980957, 0.03790415954589844, 0.037935169219970706, 0.03810070419311523, 0.038034015655517575, 0.03796758270263672, 0.038230430603027346, 0.03840108871459961, 0.03806902313232422, 0.03790383911132812, 0.037986526489257814, 0.038088382720947264, 0.038091552734375, 0.03803327941894531, 0.03807859039306641, 0.03808870315551758, 0.03839091110229492, 0.03808345413208008, 0.03794124984741211, 0.03821353530883789, 0.03833206558227539, 0.03822022247314453, 0.038338558197021484, 0.03851651382446289, 0.03842816162109375, 0.03852975845336914, 0.03815628814697265, 0.037916576385498044, 0.03790652847290039, 0.038122943878173825, 0.03804339218139648, 0.03822041702270508, 0.038162624359130856, 0.04376230239868164, 0.03968368148803711, 0.0383656005859375, 0.0380549430847168, 0.03799043273925781, 0.03784758377075195, 0.03777753448486328, 0.03763024139404297, 0.03788780975341797, 0.03775279998779297, 0.03777763366699219, 0.03775897598266602, 0.0377262077331543, 0.037768447875976566, 0.03790719985961914, 0.03786038589477539, 0.037999584197998044, 0.03805388641357422, 0.0380682258605957, 0.03818086242675781, 0.03788595199584961, 0.037986305236816405, 0.0380948486328125, 0.038131614685058594, 0.038338558197021484, 0.03903881454467773, 0.03902899169921875, 0.038316032409667966, 0.03828902435302734, 0.038271358489990234, 0.038126655578613285, 0.038028224945068356, 0.03790777587890625, 0.038085311889648435, 0.03830108642578125, 0.03842108917236328, 0.03794739151000977, 0.038034622192382815, 0.03790111923217773, 0.037986305236816405, 0.038122718811035156, 0.03820131301879883, 0.0381407356262207, 0.03804569625854492, 0.03784662246704101, 0.03815670394897461, 0.03812761688232422, 0.038166080474853516, 0.037981697082519535, 0.038167488098144534, 0.038225887298583984, 0.03861507034301758, 0.0389769287109375, 0.0387625617980957, 0.038843936920166015, 0.03836419296264648, 0.038381534576416014, 0.03835903930664063, 0.03828531265258789, 0.0382479362487793, 0.03836886215209961, 0.03831286239624023, 0.03849132919311524, 0.044175838470458986, 0.03992620849609375, 0.03837958526611328, 0.03801417541503906, 0.03811203384399414, 0.037894142150878905, 0.03779302215576172, 0.03779865646362305, 0.0380579833984375, 0.03824156951904297, 0.037870304107666015, 0.03819472122192383, 0.03803388977050781, 0.0380682258605957, 0.038004737854003906, 0.03803340911865234, 0.0381781120300293, 0.038218433380126954, 0.038316032409667966, 0.03788595199584961, 0.03809049606323242, 0.038039806365966794, 0.038040576934814455, 0.037993183135986326, 0.03829792022705078, 0.0387454719543457, 0.03862710571289062, 0.03836191940307617, 0.038257823944091794, 0.038394718170166015, 0.038065376281738283, 0.03788438415527344, 0.038226238250732424, 0.037822463989257815, 0.03806367874145508, 0.03813011169433594, 0.038125568389892575, 0.038284481048583986, 0.03798099136352539, 0.038029312133789066, 0.0380211181640625, 0.038125568389892575, 0.0380313606262207, 0.03829759979248047, 0.03801456069946289, 0.03792057418823242, 0.038037216186523434, 0.038198143005371096, 0.038117374420166016, 0.03817062377929688, 0.03831193542480469, 0.038376670837402344, 0.03879935836791992, 0.03885862350463867, 0.038613918304443356, 0.038430721282958984, 0.038221759796142576, 0.03830585479736328, 0.038268638610839845, 0.03809462356567383, 0.03805644989013672, 0.038109184265136715, 0.038449153900146485, 0.04441084671020508, 0.03985408020019531, 0.03848134231567383, 0.037970497131347654, 0.0378342399597168, 0.03787417602539062, 0.038096736907958985, 0.03808201599121094, 0.037695648193359375, 0.037810592651367186, 0.03783283233642578, 0.037770431518554685, 0.037826942443847655, 0.038334911346435546, 0.03800064086914062, 0.0383851203918457, 0.03796796798706055, 0.03775737762451172, 0.03802316665649414, 0.03806412887573242, 0.03793471908569336, 0.037889793395996095, 0.03784761428833008, 0.038039615631103516, 0.038141952514648435, 0.03843670272827148, 0.038469791412353516, 0.038502079010009765, 0.038326591491699216, 0.03824844741821289, 0.038182910919189454, 0.03808051300048828, 0.03824844741821289, 0.037961727142333986, 0.037918495178222655, 0.037991649627685545, 0.03834982299804687, 0.03807436752319336, 0.037904384613037106, 0.03811494445800781, 0.038349056243896486, 0.03792294311523438, 0.038299648284912106, 0.03787571334838867, 0.03802092742919922, 0.03821382522583008, 0.03803881454467774, 0.03793353652954102, 0.03806028747558594, 0.03834470367431641, 0.03837542343139649, 0.038335521697998046, 0.038585311889648435, 0.03833980941772461, 0.03839670562744141, 0.03845523071289063, 0.038413665771484376, 0.038447841644287106, 0.03828521728515625, 0.03837961578369141, 0.038563838958740236, 0.03826179122924805, 0.038388702392578126, 0.04575455856323242, 0.04010540771484375, 0.038629470825195314, 0.03806070327758789, 0.03787776184082031, 0.03807231903076172, 0.037748737335205076, 0.03782867050170898, 0.037957569122314454, 0.03783270263671875, 0.03788579177856445, 0.03774889755249024, 0.038072128295898434, 0.03791686248779297, 0.037793342590332034, 0.038154689788818356, 0.03767091369628906, 0.037867454528808596, 0.03785715103149414, 0.037873855590820314, 0.03808051300048828, 0.03801456069946289, 0.037891902923583985, 0.03821628952026367, 0.03853420639038086, 0.03838457489013672, 0.038438911437988284, 0.03874819183349609, 0.03888518524169922, 0.03860086441040039, 0.0381399040222168, 0.03827462387084961, 0.03827347183227539, 0.038352127075195315, 0.03808742523193359, 0.03794944000244141, 0.0380412483215332, 0.038037857055664065, 0.038166175842285155, 0.03801327896118164, 0.037969921112060545, 0.038133792877197266, 0.03827094268798828, 0.038250495910644534, 0.03799808120727539, 0.03804620742797851, 0.03798204803466797, 0.03801513671875, 0.038161792755126954, 0.03824294281005859, 0.03826483154296875, 0.038567615509033204, 0.038926559448242186, 0.03870115280151367, 0.03860684967041016, 0.038461505889892576, 0.038598590850830075, 0.03818038558959961, 0.038214111328125, 0.038422401428222654, 0.03829062271118164, 0.03818096160888672, 0.03869782257080078, 0.04184486389160156, 0.042041439056396485, 0.04006908798217773, 0.03896319961547851, 0.03871654510498047, 0.037931903839111325, 0.037713920593261716, 0.037705726623535156, 0.03765657424926758, 0.03774457550048828, 0.03789574432373047, 0.037943809509277344, 0.03804569625854492, 0.03800806427001953, 0.03826969528198242, 0.038258689880371094, 0.03802067184448242, 0.037939647674560546, 0.038055614471435545, 0.0380173454284668, 0.037976062774658204, 0.03828883361816406, 0.0379172477722168, 0.03796105575561524, 0.03795011138916016, 0.03790959930419922, 0.03780905532836914, 0.03790643310546875, 0.038160385131835936, 0.038238239288330075, 0.03848944091796875, 0.038179454803466795, 0.038174720764160154, 0.03806208038330078, 0.03812351989746094, 0.03814604949951172, 0.038258689880371094, 0.038063232421875, 0.03809769439697266, 0.03869091033935547, 0.0384266242980957, 0.038147167205810545, 0.03794348907470703, 0.03813040161132813, 0.03817244720458984, 0.03833391952514648, 0.03825945663452148, 0.03827916717529297, 0.038174720764160154, 0.03794944000244141, 0.038063934326171875, 0.038295616149902345, 0.03825436782836914, 0.03814384078979492, 0.03833087921142578, 0.038319934844970704, 0.03822022247314453, 0.03812531280517578, 0.03801641464233398, 0.03783939361572266, 0.03833055877685547, 0.03829302215576172, 0.038231521606445315, 0.04493308639526367, 0.04011142349243164, 0.03869152069091797, 0.03813750457763672, 0.037970272064208985, 0.037720062255859374, 0.037774848937988284, 0.03792127990722656, 0.037891201019287106, 0.03780492782592773, 0.0379249267578125, 0.03816236877441406, 0.03813785552978516, 0.03807846450805664, 0.03804569625854492, 0.03799407958984375, 0.038080928802490234, 0.03804764938354492, 0.03792240142822265, 0.0382325439453125, 0.03834268951416016, 0.03805184173583984, 0.038093982696533205, 0.0380219841003418, 0.038365184783935545, 0.038481311798095705, 0.03863203048706055, 0.03865974426269531, 0.038702880859375, 0.03843337631225586, 0.038408161163330075, 0.038408191680908206, 0.03833446502685547, 0.03824025726318359, 0.03779135894775391, 0.0380863037109375, 0.03792300796508789, 0.03814217758178711, 0.038082111358642576, 0.03812838363647461, 0.038612991333007815, 0.038400001525878906, 0.03818905639648437, 0.038122943878173825, 0.03832889556884766, 0.03786751937866211, 0.037932289123535155, 0.037923583984375, 0.03830169677734375, 0.03853023910522461, 0.03845203018188476, 0.03862268829345703, 0.038468128204345704, 0.038828033447265625, 0.03868467330932617, 0.038542591094970706, 0.038437633514404296, 0.03843635177612305, 0.03851728057861328, 0.038481887817382814, 0.038356990814208985, 0.038288959503173826, 0.03814854431152344]",tokens/s,26.129498044136092,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,882.249728,6232.604672,0.0,5830.08256,5627.341824,s,1,7.69474560546875,7.69474560546875,0.0,7.69474560546875,7.69474560546875,7.69474560546875,7.69474560546875,[7.69474560546875],,kWh,6.94612680413229e-06,7.587835732407286e-07,3.7275029819983097e-06,1.1432413359371327e-05,,MB,1299.0464,6295.519232,0.0,5880.414208,5763.868672,s,10,0.6898012619018554,0.06898012619018554,0.0019314298902433425,0.06932867431640624,0.07137216567993164,0.07170352363586426,0.07196861000061035,"[0.07004659271240235, 0.06594716644287109, 0.06973577880859375, 0.07129853057861328, 0.0680313949584961, 0.07203488159179687, 0.06789043426513672, 0.06987964630126953, 0.06601526641845704, 0.06892156982421875]",tokens/s,3711.2138544684713,kWh,2.234175531106668e-06,2.46386509635665e-07,1.4813582673587996e-06,3.961920308101133e-06,tokens/kWh,64615131.0707952,MB,1331.585024,6379.405312,0.0,5964.300288,5763.871232,s,10,17.636839965820315,1.7636839965820315,0.001726147159011865,1.7639865722656252,1.7653359375,1.7661005859375,1.7667123046875,"[1.761876953125, 1.7644449462890626, 1.7608316650390625, 1.764967041015625, 1.7640091552734376, 1.765166015625, 1.762084228515625, 1.766865234375, 1.7626307373046874, 1.7639639892578125]",tokens/s,35.72068472702149,kWh,5.1392405804730434e-05,5.666931981591902e-06,3.4110225761443005e-05,9.116956354776536e-05,tokens/kWh,691020.0899118397,,s,630,17.633591796875,0.02798982824900793,0.0006709935126169438,0.027814032554626465,0.028317695617675782,0.028527670192718506,0.03217824111938477,"[0.0311430721282959, 0.02903331184387207, 0.02814156723022461, 0.02772787284851074, 0.0275599365234375, 0.027503616333007814, 0.02764691162109375, 0.027666496276855468, 0.02760633659362793, 0.02770809555053711, 0.027634944915771485, 0.02782694435119629, 0.02766204833984375, 0.027863424301147462, 0.027647808074951173, 0.02760713577270508, 0.027643903732299805, 0.027743871688842774, 0.027586944580078127, 0.027763999938964844, 0.027610944747924804, 0.027808223724365234, 0.027908544540405273, 0.02760835266113281, 0.0276343994140625, 0.02772719955444336, 0.02759337615966797, 0.027778112411499023, 0.027607295989990236, 0.02769171142578125, 0.02777292823791504, 0.027580415725708008, 0.02775609588623047, 0.0277258243560791, 0.027768896102905272, 0.02841641616821289, 0.02846099281311035, 0.02863283157348633, 0.027672063827514647, 0.027464479446411134, 0.0276889591217041, 0.027801088333129883, 0.027918848037719726, 0.02817411231994629, 0.028275936126708985, 0.028269567489624024, 0.027834367752075196, 0.02757004737854004, 0.027879199981689452, 0.028260704040527343, 0.028298912048339845, 0.028020063400268556, 0.02774470329284668, 0.027882047653198242, 0.028184576034545897, 0.028251455307006835, 0.03049951934814453, 0.027645887374877928, 0.02786092758178711, 0.028126976013183595, 0.02810870361328125, 0.028217824935913086, 0.028112096786499022, 0.032363838195800784, 0.02985759925842285, 0.02869478416442871, 0.028180192947387696, 0.02767299270629883, 0.027549888610839842, 0.027639904022216798, 0.027672800064086914, 0.02764988708496094, 0.02766819190979004, 0.027949151992797853, 0.027686527252197266, 0.027597536087036134, 0.027830272674560546, 0.02780086326599121, 0.027597536087036134, 0.027744255065917968, 0.027867136001586915, 0.02762723159790039, 0.027760927200317382, 0.02774015998840332, 0.027618560791015625, 0.027654912948608397, 0.027672576904296874, 0.027848703384399414, 0.027809791564941407, 0.0276375675201416, 0.027607231140136718, 0.027676671981811524, 0.027709152221679686, 0.027879648208618164, 0.027673887252807616, 0.02762348747253418, 0.027765024185180665, 0.02764361572265625, 0.027919071197509766, 0.0283504638671875, 0.028475391387939454, 0.02808185577392578, 0.028055871963500977, 0.02835251235961914, 0.028212543487548827, 0.02771219253540039, 0.027611135482788086, 0.027780576705932616, 0.02777142333984375, 0.027717023849487304, 0.027951711654663085, 0.028094463348388672, 0.02817433547973633, 0.028652671813964845, 0.028232576370239258, 0.028313600540161132, 0.028012479782104492, 0.027748416900634766, 0.027799295425415038, 0.028002752304077148, 0.028310623168945313, 0.028222175598144533, 0.02833785629272461, 0.0279837760925293, 0.028088735580444335, 0.02818662452697754, 0.03194252777099609, 0.029567136764526367, 0.028264255523681642, 0.027941055297851562, 0.02772777557373047, 0.027762239456176757, 0.02758505630493164, 0.02764998435974121, 0.027589887619018556, 0.027588544845581056, 0.027624639511108398, 0.027604639053344728, 0.027561599731445313, 0.027638463973999022, 0.027661376953125, 0.027533504486083986, 0.027588607788085938, 0.027690528869628906, 0.027771072387695314, 0.027654912948608397, 0.027744255065917968, 0.02775040054321289, 0.027750112533569335, 0.027623327255249023, 0.027912576675415038, 0.027643903732299805, 0.02770534324645996, 0.0277227840423584, 0.027654912948608397, 0.02777724838256836, 0.027728960037231444, 0.02761414337158203, 0.02776473617553711, 0.02778112030029297, 0.027753952026367188, 0.028194656372070314, 0.028443328857421873, 0.02817840003967285, 0.028018720626831056, 0.027915264129638673, 0.027783552169799806, 0.02791609573364258, 0.027718175888061525, 0.028158239364624024, 0.028282880783081055, 0.02820025634765625, 0.027840991973876954, 0.027646175384521486, 0.027930624008178712, 0.028531904220581054, 0.027972415924072267, 0.02772172737121582, 0.02785081672668457, 0.028087871551513672, 0.028311935424804688, 0.02831564712524414, 0.027791519165039063, 0.027705184936523436, 0.028012544631958007, 0.0280611515045166, 0.028149311065673827, 0.02806825637817383, 0.027824703216552733, 0.032860160827636715, 0.02992201614379883, 0.02852249526977539, 0.02817433547973633, 0.02803436851501465, 0.028407840728759765, 0.027888544082641603, 0.02781977653503418, 0.027650047302246093, 0.027686527252197266, 0.02759718322753906, 0.027624992370605467, 0.02773468780517578, 0.02756150436401367, 0.0276278076171875, 0.027618303298950195, 0.02772275161743164, 0.027662336349487306, 0.027938047409057618, 0.027900672912597655, 0.027860992431640624, 0.027572223663330078, 0.027744255065917968, 0.027893888473510743, 0.027653472900390625, 0.027716127395629883, 0.027785472869873047, 0.02774163246154785, 0.02780396842956543, 0.027930624008178712, 0.027830272674560546, 0.027712799072265624, 0.027570911407470703, 0.027844608306884764, 0.027686784744262696, 0.027809919357299803, 0.028256256103515624, 0.02829516792297363, 0.028233728408813476, 0.027840511322021484, 0.027662336349487306, 0.027784543991088866, 0.028070560455322267, 0.028090368270874022, 0.028229183197021484, 0.028324287414550783, 0.027967487335205078, 0.027674144744873046, 0.027810272216796876, 0.028043264389038085, 0.028110847473144532, 0.02823302459716797, 0.02819171142578125, 0.027911487579345702, 0.02778153610229492, 0.02782972717285156, 0.028107295989990233, 0.02830745506286621, 0.02832294464111328, 0.02794697570800781, 0.02787830352783203, 0.027769855499267578, 0.02783443260192871, 0.03287263870239258, 0.029944992065429686, 0.02848975944519043, 0.028056224822998046, 0.02805097579956055, 0.02827052879333496, 0.027656896591186524, 0.02775606346130371, 0.02763974380493164, 0.027568672180175783, 0.02755379295349121, 0.027672256469726562, 0.027691328048706054, 0.027648000717163085, 0.027805696487426756, 0.027647104263305664, 0.027620223999023436, 0.027668479919433595, 0.027691007614135742, 0.028043167114257812, 0.027973247528076173, 0.02795724868774414, 0.027840255737304687, 0.027599136352539064, 0.027673023223876953, 0.027709375381469725, 0.027689023971557616, 0.027963392257690428, 0.027891231536865235, 0.02768556785583496, 0.02756790351867676, 0.027633888244628906, 0.027836191177368165, 0.027715583801269532, 0.027768768310546876, 0.02764192008972168, 0.027897855758666993, 0.02816204833984375, 0.02831155204772949, 0.028317695617675782, 0.02812259292602539, 0.028023296356201172, 0.02817171287536621, 0.028184160232543946, 0.028236799240112305, 0.028063360214233397, 0.027651968002319335, 0.027631935119628907, 0.027809247970581055, 0.02766694450378418, 0.027893856048583986, 0.028127359390258788, 0.028225536346435546, 0.02814566421508789, 0.028012544631958007, 0.027789024353027342, 0.027883039474487305, 0.028058368682861327, 0.028268320083618164, 0.028135648727416994, 0.02792825508117676, 0.027697471618652342, 0.02777497673034668, 0.032299007415771484, 0.029824928283691408, 0.02854457664489746, 0.02759939193725586, 0.027459583282470702, 0.027531072616577147, 0.02779078483581543, 0.027804256439208985, 0.027693216323852538, 0.02764575958251953, 0.027709632873535155, 0.028452512741088867, 0.027852672576904297, 0.02770787239074707, 0.027602272033691408, 0.027806367874145508, 0.027712608337402345, 0.02769603157043457, 0.027717632293701173, 0.027639808654785155, 0.027801536560058595, 0.027783199310302733, 0.0276091194152832, 0.027631616592407225, 0.027793407440185547, 0.027828224182128908, 0.028013599395751952, 0.028197792053222655, 0.028371007919311523, 0.027932704925537108, 0.02759676742553711, 0.027693056106567384, 0.027703039169311522, 0.027733856201171875, 0.027998624801635744, 0.028317695617675782, 0.028585248947143555, 0.028243967056274414, 0.028158079147338866, 0.027953760147094726, 0.02777292823791504, 0.027891744613647462, 0.028071136474609376, 0.028306175231933593, 0.028134719848632812, 0.027914943695068358, 0.027720800399780275, 0.02778995132446289, 0.02790982437133789, 0.027762304306030272, 0.027607551574707033, 0.027984352111816407, 0.028076032638549804, 0.02815180778503418, 0.028278783798217775, 0.02816201591491699, 0.027758176803588868, 0.027713983535766602, 0.02793244743347168, 0.02806153678894043, 0.028076416015625, 0.028317695617675782, 0.028417951583862306, 0.03245270538330078, 0.029742464065551758, 0.028654144287109374, 0.028919519424438475, 0.027551744461059572, 0.027672544479370117, 0.02756345558166504, 0.02761414337158203, 0.027695104598999022, 0.02767001533508301, 0.02759321594238281, 0.027701248168945314, 0.027704416275024416, 0.02785152053833008, 0.027875455856323242, 0.02763903999328613, 0.027656768798828123, 0.027674751281738283, 0.027636991500854493, 0.027789632797241212, 0.027732511520385743, 0.02771331214904785, 0.027928800582885743, 0.02792857551574707, 0.02811235237121582, 0.027773471832275392, 0.02768076705932617, 0.027668479919433595, 0.027713151931762697, 0.027677055358886718, 0.027893760681152343, 0.027875328063964845, 0.02772991943359375, 0.027574176788330077, 0.027889759063720702, 0.027734079360961915, 0.02767660713195801, 0.027951135635375976, 0.028076000213623047, 0.028020736694335937, 0.028082048416137696, 0.028198591232299806, 0.02834592056274414, 0.02810969543457031, 0.027777023315429687, 0.027631616592407225, 0.027813888549804686, 0.027807743072509765, 0.027641311645507812, 0.0277806396484375, 0.02799228858947754, 0.02794576072692871, 0.027860992431640624, 0.027711488723754882, 0.02775449562072754, 0.027883232116699217, 0.02781417655944824, 0.027666431427001953, 0.028018688201904295, 0.028329919815063477, 0.02844473648071289, 0.02801363182067871, 0.028112831115722655, 0.03222492980957031, 0.029646848678588866, 0.02846873664855957, 0.02805232048034668, 0.028043264389038085, 0.028010496139526365, 0.028018688201904295, 0.028016639709472657, 0.028038303375244142, 0.028130144119262696, 0.028062816619873046, 0.02808310317993164, 0.02779136085510254, 0.027668479919433595, 0.02812723159790039, 0.02780364799499512, 0.027865087509155274, 0.027627647399902342, 0.02782195281982422, 0.02773196792602539, 0.027617279052734374, 0.02772991943359375, 0.02771353530883789, 0.027852800369262694, 0.027834367752075196, 0.02797772789001465, 0.02789990425109863, 0.02772377586364746, 0.027591840744018555, 0.027677343368530272, 0.027670080184936524, 0.027709407806396483, 0.0277490234375, 0.02786832046508789, 0.028090368270874022, 0.02848227119445801, 0.02827686309814453, 0.028147552490234377, 0.028186784744262696, 0.028057600021362306, 0.02773401641845703, 0.02785625648498535, 0.028114688873291015, 0.02835647964477539, 0.028247039794921876, 0.02779961585998535, 0.027645471572875977, 0.027734111785888672, 0.027823711395263673, 0.027712480545043945, 0.027746047973632813, 0.027922431945800782, 0.027880704879760743, 0.027620031356811525, 0.028782655715942383, 0.02800230407714844, 0.028311328887939455, 0.028305631637573242, 0.02790719985961914, 0.02775859260559082, 0.02776767921447754, 0.028063743591308594, 0.028349695205688478, 0.032063934326171876, 0.02974483108520508, 0.028563232421875, 0.027631807327270507, 0.027521408081054688, 0.027917984008789063, 0.027547840118408204, 0.02758406448364258, 0.027568735122680665, 0.027708831787109374, 0.027728031158447266, 0.02763155174255371, 0.02780620765686035, 0.027865087509155274, 0.02752227210998535, 0.027681568145751952, 0.027672544479370117, 0.02784668731689453, 0.02786457633972168, 0.027674591064453125, 0.02761782455444336, 0.027611135482788086, 0.02770534324645996, 0.027971584320068358, 0.027854848861694335, 0.027696287155151367, 0.028394271850585937, 0.02760300827026367, 0.027631616592407225, 0.027688287734985353, 0.027698944091796875, 0.027851680755615234, 0.027851968765258788, 0.02766521644592285, 0.027742048263549805, 0.028182687759399413, 0.028448768615722656, 0.028212543487548827, 0.028034912109375, 0.027939680099487305, 0.02785424041748047, 0.027671136856079102, 0.027725183486938476, 0.02787596893310547, 0.027753631591796876, 0.027833183288574218, 0.028112895965576173, 0.02821664047241211, 0.028172544479370117, 0.027867008209228515, 0.027806432723999023, 0.027858783721923828, 0.028107872009277345, 0.028373920440673828, 0.028248064041137694, 0.028093631744384766, 0.028185407638549806, 0.028321727752685547, 0.02819692802429199, 0.02801424026489258, 0.027906368255615235, 0.027790943145751954, 0.027779520034790037, 0.03368198394775391, 0.03031804847717285, 0.028805280685424806, 0.028053728103637696, 0.027801759719848634, 0.027611391067504883, 0.02762726402282715, 0.027680479049682617, 0.027601184844970702, 0.027617279052734374, 0.02760704040527344, 0.027668479919433595, 0.027670528411865233, 0.027774688720703124, 0.02777731132507324, 0.027600223541259766, 0.02783817672729492, 0.02778985595703125, 0.02759878349304199, 0.027701311111450196, 0.027734432220458984, 0.02774015998840332, 0.027821439743041992, 0.027723743438720704, 0.0276015682220459, 0.027606143951416015, 0.02778335952758789, 0.027955551147460938, 0.028246368408203125, 0.02835660743713379, 0.027998144149780274, 0.027626752853393555, 0.02768160057067871, 0.027793312072753908, 0.027703039169311522, 0.027855199813842775, 0.028049407958984376, 0.02846080017089844, 0.028204736709594728, 0.027669023513793946, 0.027598335266113282, 0.027785408020019532, 0.027689311981201174, 0.02775449562072754, 0.027901952743530273, 0.027862239837646484, 0.027671327590942384, 0.027797056198120118, 0.028057472229003906, 0.028219968795776366, 0.028383232116699218, 0.02813337516784668, 0.02779257583618164, 0.02764473533630371, 0.027822080612182616, 0.028079423904418945, 0.02811359977722168, 0.028192768096923827, 0.02831564712524414, 0.028086271286010742, 0.02776678466796875, 0.027650047302246093, 0.027893760681152343]",tokens/s,35.72726460139833,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 353, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 153823 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,883.953664,3447.586816,0.0,3045.064704,2842.846208,s,1,8.02355419921875,8.02355419921875,0.0,8.02355419921875,8.02355419921875,8.02355419921875,8.02355419921875,[8.02355419921875],,kWh,6.202965183304817e-06,6.762853036365872e-07,1.9544460079928827e-06,8.833696494934286e-06,,MB,1300.054016,3623.747584,0.0,3208.64256,2982.452736,s,10,0.3353229446411133,0.03353229446411133,0.0010901052342009124,0.03330710411071777,0.033897095108032224,0.03529018039703369,0.036404648628234865,"[0.03668326568603516, 0.033587520599365234, 0.03342169570922852, 0.03348783874511719, 0.033108894348144534, 0.032782272338867186, 0.03283500671386719, 0.03276512145996094, 0.033192512512207034, 0.03345881652832031]",tokens/s,7634.431347189486,kWh,1.1790570660618e-06,1.3002797003030515e-07,7.822038067661773e-07,2.0912888428582826e-06,tokens/kWh,122412549.98047537,MB,1332.5312,3623.747584,0.0,3208.64256,2982.455296,s,10,12.766562988281251,1.2766562988281251,0.004098770278977627,1.2768309326171874,1.2813100463867186,1.2817324157714844,1.2820703112792968,"[1.28215478515625, 1.27374853515625, 1.2764273681640625, 1.2772344970703124, 1.2788096923828125, 1.2747169189453125, 1.2802906494140625, 1.2812161865234375, 1.274308837890625, 1.267655517578125]",tokens/s,49.3476592390836,kWh,3.7326319415185716e-05,4.116590090107799e-06,2.3951093757633725e-05,6.539400326292723e-05,tokens/kWh,963391.0887317641,,s,630,12.762303985595716,0.020257625373961433,0.00032702290583168614,0.020191136360168456,0.020447423362731934,0.02062287359237671,0.02176067825317383,"[0.020412416458129884, 0.020270816802978514, 0.02054569625854492, 0.020369535446166993, 0.020259872436523437, 0.020372320175170898, 0.020449151992797853, 0.020355039596557618, 0.02037379264831543, 0.020292703628540038, 0.02038262367248535, 0.02009219169616699, 0.02032918357849121, 0.020289024353027343, 0.0203855037689209, 0.020738048553466795, 0.020396831512451172, 0.02072550392150879, 0.020250879287719726, 0.02029363250732422, 0.020350976943969725, 0.020426752090454102, 0.020319936752319336, 0.020152544021606444, 0.020211807250976564, 0.020135551452636718, 0.020177024841308594, 0.020187040328979493, 0.020226400375366212, 0.020115455627441405, 0.020125728607177734, 0.0204202880859375, 0.020160800933837892, 0.020213184356689454, 0.0202225284576416, 0.020283008575439455, 0.020381792068481445, 0.020199680328369142, 0.020266016006469725, 0.0202926082611084, 0.020328128814697265, 0.020365631103515625, 0.020528224945068358, 0.020429248809814452, 0.020269535064697267, 0.020109312057495117, 0.020254335403442382, 0.020402559280395506, 0.0202127685546875, 0.020255712509155272, 0.02025881576538086, 0.021055200576782226, 0.020455711364746092, 0.020428800582885744, 0.020537120819091797, 0.020379104614257813, 0.020415231704711913, 0.020636831283569336, 0.020534112930297853, 0.020381664276123045, 0.020299808502197265, 0.02025609588623047, 0.020483903884887696, 0.02063155174255371, 0.020447231292724608, 0.02040131187438965, 0.020269920349121093, 0.020254655838012694, 0.020236032485961914, 0.020222272872924805, 0.020246528625488282, 0.020393152236938477, 0.020212543487548827, 0.02015158462524414, 0.020095712661743165, 0.020025344848632814, 0.02015987205505371, 0.020085376739501955, 0.020178943634033202, 0.020118783950805665, 0.019987199783325197, 0.020205568313598633, 0.020176895141601564, 0.019992576599121094, 0.02016409683227539, 0.020320192337036132, 0.02003977584838867, 0.020113887786865233, 0.02007040023803711, 0.020244735717773438, 0.02053094482421875, 0.020135936737060548, 0.020086143493652345, 0.020266847610473634, 0.020007104873657228, 0.02023484802246094, 0.02021171188354492, 0.02034806442260742, 0.020400991439819337, 0.020287488937377928, 0.02039347267150879, 0.020283327102661133, 0.020212223052978515, 0.020284927368164063, 0.020163135528564455, 0.020223424911499022, 0.020208192825317384, 0.020413984298706056, 0.020106719970703124, 0.02003865623474121, 0.020067712783813477, 0.020275487899780273, 0.020054367065429686, 0.020131359100341795, 0.020308448791503907, 0.020154367446899413, 0.020170047760009767, 0.020134592056274415, 0.020063520431518555, 0.02024239921569824, 0.020380224227905273, 0.0201648006439209, 0.020033472061157225, 0.020312128067016603, 0.02015135955810547, 0.020410335540771485, 0.02054256057739258, 0.02013481521606445, 0.02004787254333496, 0.02040012741088867, 0.02009859275817871, 0.020154848098754882, 0.020242431640625, 0.02012099266052246, 0.02023484802246094, 0.019928768157958986, 0.019984703063964843, 0.01994099235534668, 0.020418943405151366, 0.020076543807983398, 0.019982208251953126, 0.019924863815307618, 0.02003379249572754, 0.020619264602661135, 0.02018217658996582, 0.020040224075317383, 0.020409759521484376, 0.020154272079467773, 0.02015279960632324, 0.02023049545288086, 0.02018937683105469, 0.020297727584838866, 0.02022809600830078, 0.020101119995117187, 0.020231231689453125, 0.020209695816040037, 0.020059040069580078, 0.020317472457885743, 0.020245216369628907, 0.020148223876953125, 0.020191232681274415, 0.020131839752197265, 0.020272287368774414, 0.02022649574279785, 0.020201887130737305, 0.020008960723876954, 0.020116735458374023, 0.020176864624023436, 0.02010736083984375, 0.020619775772094725, 0.02024019241333008, 0.020361600875854494, 0.020060159683227538, 0.020365312576293947, 0.02025267219543457, 0.020128799438476563, 0.022811391830444335, 0.020121664047241212, 0.020258975982666017, 0.020168703079223634, 0.020545536041259766, 0.020303136825561525, 0.020290271759033203, 0.021020416259765626, 0.020181055068969726, 0.020145824432373047, 0.020206111907958985, 0.02027926445007324, 0.020122751235961914, 0.020327775955200196, 0.020048288345336913, 0.020069984436035155, 0.02014476776123047, 0.020133888244628906, 0.02006220817565918, 0.020123647689819335, 0.02005606460571289, 0.020185087203979494, 0.019990528106689453, 0.020172800064086914, 0.0204083194732666, 0.02006937599182129, 0.02001203155517578, 0.020025344848632814, 0.020074495315551756, 0.02009814453125, 0.02034492874145508, 0.02020639991760254, 0.020090879440307616, 0.02010316848754883, 0.020599872589111327, 0.02058732795715332, 0.02044121551513672, 0.020346879959106445, 0.02022323226928711, 0.020073055267333984, 0.02015577507019043, 0.020240543365478515, 0.02037811279296875, 0.020160640716552734, 0.02045939254760742, 0.020476032257080078, 0.020377599716186523, 0.020191232681274415, 0.02031001663208008, 0.020148223876953125, 0.020230335235595705, 0.020229951858520508, 0.020183040618896485, 0.020117504119873047, 0.020090015411376953, 0.020237152099609374, 0.02005401611328125, 0.021161983489990235, 0.020156415939331054, 0.02018675231933594, 0.01998067283630371, 0.02001286315917969, 0.020127935409545897, 0.022877695083618164, 0.021442975997924805, 0.0202957763671875, 0.020129791259765627, 0.020494335174560546, 0.02007843208312988, 0.020056224822998046, 0.020227968215942384, 0.020098848342895506, 0.020121952056884766, 0.020060159683227538, 0.020133888244628906, 0.020105215072631837, 0.020340736389160157, 0.020151552200317384, 0.02050307273864746, 0.020589792251586914, 0.02046601676940918, 0.02032236862182617, 0.020344959259033204, 0.020296159744262694, 0.020379648208618165, 0.02023219108581543, 0.02024448013305664, 0.02028339195251465, 0.0204902400970459, 0.020188800811767577, 0.020396352767944336, 0.020500543594360352, 0.02028544044494629, 0.020402175903320312, 0.020137983322143553, 0.019939327239990236, 0.02003763198852539, 0.020225215911865234, 0.02021049690246582, 0.020039680480957032, 0.020051967620849608, 0.020207616806030275, 0.020166656494140626, 0.02034819221496582, 0.020046560287475586, 0.0200581111907959, 0.020032608032226562, 0.020513696670532225, 0.020238336563110353, 0.020077983856201173, 0.020183168411254882, 0.020203231811523437, 0.02009164810180664, 0.020315135955810547, 0.020204544067382812, 0.020154367446899413, 0.020036863327026366, 0.02012179183959961, 0.020596384048461914, 0.021732255935668944, 0.0206376953125, 0.02034623908996582, 0.02019596862792969, 0.02020351982116699, 0.0200614070892334, 0.02010380744934082, 0.02029532814025879, 0.02025052833557129, 0.02037001609802246, 0.020305919647216796, 0.020338687896728515, 0.020948064804077147, 0.020263839721679687, 0.020178943634033202, 0.02044927978515625, 0.020332191467285158, 0.020214111328125, 0.02022400093078613, 0.02028544044494629, 0.02063052749633789, 0.020230144500732423, 0.0201910400390625, 0.020254240036010743, 0.020163232803344727, 0.02003763198852539, 0.020192384719848633, 0.02013043212890625, 0.02004774475097656, 0.020121055603027342, 0.020283552169799806, 0.02034764862060547, 0.02063974380493164, 0.020418560028076172, 0.02031001663208008, 0.02024448013305664, 0.020625408172607423, 0.02024038314819336, 0.02024985694885254, 0.02018115234375, 0.020381631851196288, 0.02059699249267578, 0.020175264358520507, 0.020185087203979494, 0.020047456741333007, 0.020085023880004882, 0.02016182327270508, 0.02015225601196289, 0.02016655921936035, 0.02015737533569336, 0.020162527084350585, 0.02012771224975586, 0.020074560165405275, 0.020147327423095704, 0.020250911712646483, 0.020320383071899414, 0.02024297523498535, 0.020121599197387697, 0.020154655456542967, 0.02034223937988281, 0.020234495162963866, 0.020389888763427736, 0.020197376251220703, 0.020176895141601564, 0.02020966339111328, 0.020105056762695313, 0.02042825508117676, 0.020083391189575195, 0.020084575653076173, 0.020170528411865233, 0.020125951766967774, 0.0202172794342041, 0.020340415954589845, 0.020183263778686525, 0.020396831512451172, 0.02017228889465332, 0.020125728607177734, 0.020318687438964842, 0.02007244873046875, 0.02008883285522461, 0.020254655838012694, 0.02017830467224121, 0.020161216735839843, 0.02027939224243164, 0.020725759506225586, 0.023225696563720703, 0.02113804817199707, 0.020258848190307616, 0.020411680221557617, 0.020224735260009764, 0.020178943634033202, 0.020191232681274415, 0.020243648529052735, 0.02039686393737793, 0.020263967514038087, 0.020278240203857423, 0.02007244873046875, 0.020187135696411132, 0.020189184188842774, 0.020336448669433595, 0.020252864837646486, 0.02020262336730957, 0.02024937629699707, 0.020192768096923826, 0.020113279342651367, 0.020169055938720704, 0.02014028739929199, 0.020756223678588866, 0.020271455764770508, 0.02024220848083496, 0.02263212776184082, 0.02019081687927246, 0.020337472915649413, 0.020225343704223634, 0.020185888290405272, 0.02013804817199707, 0.020160512924194338, 0.020162303924560546, 0.02014031982421875, 0.02028745651245117, 0.020217472076416016, 0.02012099266052246, 0.02003971290588379, 0.020071359634399415, 0.02006537628173828, 0.020261791229248045, 0.020092927932739257, 0.020117504119873047, 0.02024831962585449, 0.020051200866699218, 0.02014668846130371, 0.020213855743408202, 0.020212127685546876, 0.020407583236694334, 0.020099712371826173, 0.020055231094360353, 0.019997600555419923, 0.02013974380493164, 0.020267295837402343, 0.020123647689819335, 0.020078079223632812, 0.020209375381469726, 0.020228511810302736, 0.020294015884399413, 0.020278879165649414, 0.02017321586608887, 0.0204202880859375, 0.020077152252197264, 0.020156415939331054, 0.020143360137939454, 0.02029372787475586, 0.02011408042907715, 0.020230144500732423, 0.0209039363861084, 0.02031167984008789, 0.020937088012695313, 0.020239936828613282, 0.020314464569091795, 0.02025071907043457, 0.020166656494140626, 0.02025267219543457, 0.02066022491455078, 0.020213760375976563, 0.020406272888183592, 0.020307968139648438, 0.020150272369384766, 0.02018694305419922, 0.020491647720336913, 0.02012851142883301, 0.02028960037231445, 0.020342048645019532, 0.020220640182495118, 0.021300384521484375, 0.021772287368774415, 0.0201378231048584, 0.020394304275512695, 0.02009561538696289, 0.02015648078918457, 0.020135839462280272, 0.020166751861572265, 0.020424224853515624, 0.020292064666748048, 0.020191232681274415, 0.020082687377929686, 0.02006220817565918, 0.020060159683227538, 0.02003286361694336, 0.020062240600585937, 0.019960447311401366, 0.02002124786376953, 0.02007040023803711, 0.020113407135009767, 0.020088768005371092, 0.020412416458129884, 0.019947328567504884, 0.02264847946166992, 0.021424671173095704, 0.0211878719329834, 0.020156320571899415, 0.02000912094116211, 0.020142816543579103, 0.02012291145324707, 0.02032918357849121, 0.020060159683227538, 0.020197376251220703, 0.02002739143371582, 0.020033504486083983, 0.02013507270812988, 0.020102016448974608, 0.020563520431518555, 0.021150239944458006, 0.02010316848754883, 0.020166271209716796, 0.020078559875488282, 0.01995120048522949, 0.02009347152709961, 0.022306495666503907, 0.02018569564819336, 0.020262912750244142, 0.020322303771972656, 0.020199424743652345, 0.020152320861816408, 0.01998847961425781, 0.020309984207153322, 0.01999875259399414, 0.02006220817565918, 0.020055200576782225, 0.020019487380981447, 0.020115680694580078, 0.02005971145629883, 0.019935295104980467, 0.02001692771911621, 0.020218751907348634, 0.02013190460205078, 0.020156415939331054, 0.020133888244628906, 0.020160512924194338, 0.020221824645996093, 0.0203633918762207, 0.020133888244628906, 0.02017679977416992, 0.020179040908813478, 0.02023628807067871, 0.020202751159667968, 0.020095104217529296, 0.020107904434204103, 0.020336383819580077, 0.02011884880065918, 0.020148799896240233, 0.019951135635375976, 0.020067167282104493, 0.02022105598449707, 0.020151168823242187, 0.020430688858032228, 0.020215200424194335, 0.02023295974731445, 0.020227807998657227, 0.02011065673828125, 0.02076688003540039, 0.02026576042175293, 0.020325855255126955, 0.020150848388671875, 0.02012774467468262, 0.020168703079223634, 0.020166656494140626, 0.020164608001708984, 0.02011955261230469, 0.02025187110900879, 0.0200281925201416, 0.020172800064086914, 0.02022400093078613, 0.020074495315551756, 0.020296607971191406, 0.020131839752197265, 0.020101119995117187, 0.0201125431060791, 0.020421472549438477, 0.020114591598510742, 0.020320863723754884, 0.01994163131713867, 0.02016374397277832, 0.020024160385131835, 0.020170751571655272, 0.020131359100341795, 0.020119104385375976, 0.020106143951416015, 0.020319520950317384, 0.01995849609375, 0.02016169548034668, 0.020271327972412108, 0.020185728073120118, 0.02015190315246582, 0.020019296646118165, 0.020243871688842775, 0.020038560867309572, 0.020162559509277343, 0.02006630325317383, 0.019965951919555663, 0.02014723205566406, 0.020484991073608398, 0.02009209632873535, 0.0199335994720459, 0.019923456192016603, 0.020021215438842773, 0.01995369529724121, 0.02002124786376953, 0.020076543807983398, 0.02005401611328125, 0.019959808349609375, 0.020273151397705077, 0.020115360260009766, 0.02001103973388672, 0.020018976211547853, 0.02001679992675781, 0.020584415435791016, 0.020144800186157226, 0.01997209548950195, 0.02010316848754883, 0.020010976791381835, 0.02001513671875, 0.019922304153442382, 0.02007923126220703, 0.020068351745605468, 0.02006812858581543, 0.02003171157836914, 0.019957759857177734, 0.020502527236938475, 0.02004694366455078, 0.020361984252929687, 0.020199583053588866, 0.020120767593383788, 0.020047712326049804, 0.01998703956604004, 0.02016703987121582, 0.020053855895996092]",tokens/s,49.36412741077595,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,894.767104,3900.571648,0.0,3498.049536,3295.335424,s,1,7.838755859375,7.838755859375,0.0,7.838755859375,7.838755859375,7.838755859375,7.838755859375,[7.838755859375],,kWh,6.224694845847504e-06,6.710429606477696e-07,1.8466681439982535e-06,8.742405950493527e-06,,MB,1341.104128,4045.275136,0.0,3630.170112,3408.337408,s,10,0.3288679676055908,0.03288679676055908,0.001412033065997222,0.03255937576293945,0.03362424659729004,0.035211659431457515,0.036481589698791506,"[0.036799072265625, 0.033000160217285156, 0.03128121566772461, 0.03239904022216797, 0.03184854316711426, 0.0324106559753418, 0.03270809555053711, 0.03282444763183594, 0.03327148818969727, 0.03232524871826172]",tokens/s,7784.278957414884,kWh,1.1840258874325065e-06,1.3057546298347479e-07,7.871047682348112e-07,2.1017061186507925e-06,tokens/kWh,121805802.3090028,MB,1374.035968,4047.372288,0.0,3632.267264,3408.339968,s,10,12.86194372558594,1.286194372558594,0.005984142358966597,1.2867727661132813,1.29386669921875,1.2940996093750001,1.2942859375,"[1.29381494140625, 1.288985107421875, 1.29433251953125, 1.28923046875, 1.279242431640625, 1.282271240234375, 1.2768880615234375, 1.2806627197265625, 1.2845604248046876, 1.291955810546875]",tokens/s,48.981710186366065,kWh,3.748788752965326e-05,4.133219952902962e-06,2.4667693373364976e-05,6.62888008559212e-05,tokens/kWh,950386.7800675802,,s,630,12.859231740951554,0.020411478953891334,0.000471211326385086,0.020317135810852052,0.020611968421936036,0.020888641738891597,0.02278375881195069,"[0.020925312042236327, 0.020360736846923827, 0.020628032684326173, 0.020264543533325196, 0.020610496520996092, 0.020392927169799804, 0.020381696701049806, 0.02026848030090332, 0.020140607833862303, 0.020501855850219727, 0.020204191207885743, 0.020566112518310548, 0.02531318473815918, 0.020360448837280273, 0.0201530876159668, 0.020205568313598633, 0.02025881576538086, 0.02023206329345703, 0.020448959350585938, 0.020225919723510743, 0.0204334716796875, 0.020206687927246093, 0.02020035171508789, 0.02027065658569336, 0.020244735717773438, 0.020088544845581056, 0.020095104217529296, 0.02024998474121094, 0.020591615676879883, 0.020213727951049806, 0.02034662437438965, 0.020278816223144532, 0.020244928359985353, 0.020735935211181642, 0.020304351806640624, 0.020199296951293945, 0.02034627151489258, 0.021243904113769533, 0.020582784652709962, 0.02035251235961914, 0.020584768295288085, 0.020359359741210937, 0.020699359893798827, 0.02022400093078613, 0.020447231292724608, 0.020465503692626952, 0.02026016044616699, 0.020428800582885744, 0.020431711196899415, 0.020254720687866212, 0.021056928634643556, 0.02202249526977539, 0.020704639434814452, 0.020452224731445312, 0.020481760025024415, 0.020361503601074218, 0.020230144500732423, 0.0201779842376709, 0.020411327362060548, 0.02020355224609375, 0.02026848030090332, 0.020534048080444334, 0.02281769561767578, 0.021128095626831055, 0.02048521614074707, 0.02053945541381836, 0.020462432861328126, 0.020346879959106445, 0.020418304443359375, 0.020340511322021484, 0.020365983963012695, 0.02024838447570801, 0.02011955261230469, 0.020313951492309572, 0.02018729591369629, 0.020307968139648438, 0.02023219108581543, 0.020213279724121094, 0.020197216033935546, 0.020055776596069337, 0.020063167572021486, 0.020219871520996094, 0.02045132827758789, 0.020250431060791017, 0.02027244758605957, 0.020347040176391603, 0.020337087631225586, 0.020050207138061524, 0.02023971176147461, 0.020202144622802735, 0.020276512145996094, 0.02039676856994629, 0.020137983322143553, 0.020121088027954103, 0.02025651168823242, 0.020105983734130858, 0.020146175384521483, 0.020101184844970702, 0.02036729621887207, 0.020219520568847658, 0.02038412857055664, 0.020245695114135744, 0.02032640075683594, 0.02009187126159668, 0.020280767440795898, 0.020540000915527344, 0.020303680419921876, 0.020446687698364257, 0.020260480880737303, 0.02038262367248535, 0.02018252754211426, 0.02044585609436035, 0.025284448623657228, 0.020244735717773438, 0.020196767807006837, 0.020131616592407225, 0.020102943420410156, 0.020413312911987305, 0.020396095275878906, 0.021872480392456053, 0.02252390480041504, 0.020358879089355467, 0.020422624588012694, 0.020285760879516602, 0.020388063430786134, 0.021646175384521484, 0.020912128448486327, 0.020340736389160157, 0.020516767501831054, 0.020399391174316408, 0.020558591842651366, 0.020136224746704103, 0.020231967926025392, 0.021212799072265625, 0.021784959793090822, 0.02164121627807617, 0.02033459281921387, 0.020529247283935546, 0.020360095977783203, 0.020132863998413086, 0.02020742416381836, 0.02018921661376953, 0.020226207733154297, 0.02033420753479004, 0.0203143367767334, 0.02081564712524414, 0.020503936767578126, 0.020333440780639648, 0.020293760299682616, 0.02025657653808594, 0.02361369514465332, 0.02017068862915039, 0.020176895141601564, 0.020342592239379884, 0.02031545639038086, 0.0205295352935791, 0.020299648284912108, 0.020304576873779297, 0.020227264404296875, 0.020752992630004883, 0.022859296798706054, 0.022700672149658204, 0.020358976364135743, 0.020402368545532228, 0.020352127075195313, 0.020241056442260742, 0.020324575424194337, 0.02024038314819336, 0.02021958351135254, 0.020299903869628905, 0.020402015686035155, 0.020233760833740233, 0.020489023208618163, 0.020376800537109375, 0.020323040008544922, 0.020325664520263673, 0.020204479217529298, 0.02038153648376465, 0.020195167541503908, 0.020207231521606445, 0.020382240295410158, 0.020686847686767578, 0.020311712265014648, 0.02025712013244629, 0.020744192123413087, 0.020418432235717772, 0.020142112731933594, 0.020183135986328125, 0.02050003242492676, 0.020756799697875975, 0.02045078468322754, 0.021783071517944334, 0.020979711532592774, 0.020959104537963867, 0.020508800506591797, 0.020432031631469727, 0.020478912353515625, 0.020455072402954102, 0.02034048080444336, 0.02063148880004883, 0.021710912704467775, 0.020128255844116212, 0.02017500877380371, 0.020159360885620117, 0.020431840896606445, 0.020174848556518556, 0.020514144897460937, 0.020228767395019533, 0.02036911964416504, 0.020436927795410155, 0.02298476791381836, 0.020600639343261718, 0.02050899124145508, 0.020357248306274413, 0.020538816452026366, 0.020296319961547852, 0.02028335952758789, 0.020324352264404297, 0.020466815948486327, 0.020386112213134765, 0.020245248794555665, 0.020375040054321288, 0.020489599227905272, 0.02033695983886719, 0.020511167526245117, 0.02024287986755371, 0.020369152069091796, 0.020269119262695312, 0.020414112091064453, 0.020193567276000978, 0.020178848266601563, 0.020406368255615235, 0.020357311248779295, 0.02060652732849121, 0.020387231826782228, 0.020293792724609374, 0.02031452751159668, 0.0199967041015625, 0.020312320709228514, 0.020164159774780272, 0.02027267265319824, 0.02042572784423828, 0.020260927200317382, 0.02008662414550781, 0.020364831924438477, 0.020687328338623048, 0.02030905532836914, 0.02025155258178711, 0.020291616439819336, 0.02037081527709961, 0.020213855743408202, 0.02012214469909668, 0.020476512908935547, 0.02031590461730957, 0.020375263214111327, 0.02038137626647949, 0.02028566360473633, 0.02037743949890137, 0.020441951751708983, 0.020436960220336912, 0.020246559143066407, 0.020547040939331053, 0.020359712600708006, 0.02020569610595703, 0.02021334457397461, 0.020218143463134764, 0.020369279861450196, 0.02026473617553711, 0.020477888107299804, 0.02032451248168945, 0.020291839599609375, 0.020213056564331054, 0.02033030319213867, 0.020199583053588866, 0.020178815841674805, 0.02027107238769531, 0.020374399185180664, 0.020287776947021486, 0.020268768310546876, 0.020389888763427736, 0.020473535537719727, 0.02053875160217285, 0.02037651252746582, 0.020335615158081053, 0.020290559768676757, 0.020415712356567382, 0.020279520034790038, 0.020172639846801756, 0.020245023727416992, 0.02024870491027832, 0.020352479934692382, 0.020244543075561523, 0.020260576248168946, 0.020336511611938477, 0.02018604850769043, 0.02035740852355957, 0.020225343704223634, 0.020150463104248048, 0.020225759506225585, 0.020111871719360352, 0.020254560470581055, 0.02010537528991699, 0.020164608001708984, 0.02020351982116699, 0.020115423202514648, 0.020295551300048828, 0.020463712692260744, 0.020385984420776368, 0.02028028869628906, 0.02025372886657715, 0.020543359756469728, 0.020313663482666014, 0.020181568145751953, 0.020275072097778322, 0.02018115234375, 0.020664480209350584, 0.020310144424438476, 0.0210982723236084, 0.020460063934326173, 0.02027519989013672, 0.02025814437866211, 0.020187679290771483, 0.0201744327545166, 0.020287424087524413, 0.02037001609802246, 0.020336639404296874, 0.020378944396972656, 0.020295616149902343, 0.020624128341674805, 0.0203450870513916, 0.020083839416503907, 0.02019596862792969, 0.02031203269958496, 0.020239904403686525, 0.02030847930908203, 0.020320255279541014, 0.020278879165649414, 0.0202872314453125, 0.02029808044433594, 0.02025254440307617, 0.020146623611450195, 0.020185087203979494, 0.02033459281921387, 0.020418560028076172, 0.02033897590637207, 0.020242143630981445, 0.02030739212036133, 0.02021583938598633, 0.02030441665649414, 0.02026927947998047, 0.020316095352172853, 0.020293344497680665, 0.020295583724975585, 0.020348255157470702, 0.020324640274047852, 0.02017340850830078, 0.020426752090454102, 0.020316160202026368, 0.02050048065185547, 0.020436992645263673, 0.02042265510559082, 0.02026028823852539, 0.020434591293334962, 0.020145055770874023, 0.020166336059570314, 0.020324480056762694, 0.020582111358642578, 0.020306655883789063, 0.020541183471679686, 0.020346879959106445, 0.020416736602783203, 0.02022172737121582, 0.020471040725708007, 0.020512704849243165, 0.02034566307067871, 0.02063324737548828, 0.020351327896118165, 0.020677696228027342, 0.020819839477539064, 0.020385919570922853, 0.0203155517578125, 0.0203123836517334, 0.020391551971435547, 0.02018979263305664, 0.020256767272949217, 0.020221567153930663, 0.020212383270263673, 0.02014569664001465, 0.02020377540588379, 0.020135936737060548, 0.020401376724243164, 0.02051919937133789, 0.020206079483032227, 0.020254720687866212, 0.02016239929199219, 0.020258975982666017, 0.020125696182250977, 0.020148223876953125, 0.020278335571289063, 0.020171104431152345, 0.020105823516845703, 0.020282976150512694, 0.02028175926208496, 0.020198816299438475, 0.020109920501708983, 0.020645408630371093, 0.020230560302734374, 0.020342592239379884, 0.020127296447753906, 0.020136640548706054, 0.020189184188842774, 0.020289535522460937, 0.02030080032348633, 0.020147199630737304, 0.02015545654296875, 0.02014031982421875, 0.02013792037963867, 0.020031776428222656, 0.02012175941467285, 0.020170080184936524, 0.020189952850341798, 0.02023878479003906, 0.020159584045410156, 0.020728416442871093, 0.020287712097167968, 0.02050396728515625, 0.020261312484741213, 0.02030182456970215, 0.020182815551757813, 0.020232255935668946, 0.0202325439453125, 0.020294912338256837, 0.020378496170043944, 0.02047475242614746, 0.0202346248626709, 0.020208032608032226, 0.020342815399169923, 0.02023334312438965, 0.020200319290161133, 0.020375455856323242, 0.020307680130004883, 0.02072438430786133, 0.020633184432983398, 0.02050908851623535, 0.02063155174255371, 0.020626432418823244, 0.021959552764892577, 0.02044940757751465, 0.020338687896728515, 0.020299776077270508, 0.02040617561340332, 0.020286592483520507, 0.020220895767211915, 0.02019468879699707, 0.020400768280029298, 0.020100128173828124, 0.020351871490478516, 0.020197439193725585, 0.020280767440795898, 0.020245088577270507, 0.02032339286804199, 0.02039904022216797, 0.020213760375976563, 0.020162559509277343, 0.020118751525878907, 0.020109407424926756, 0.02025337600708008, 0.020089887619018556, 0.020224992752075194, 0.02022422409057617, 0.020231967926025392, 0.020748064041137694, 0.020152544021606444, 0.02040150451660156, 0.020357791900634765, 0.020225183486938476, 0.020210527420043947, 0.020142080307006836, 0.020316160202026368, 0.02023219108581543, 0.020254528045654297, 0.020463808059692383, 0.020233631134033203, 0.020106847763061524, 0.020362239837646484, 0.0201562557220459, 0.020199487686157227, 0.020078752517700197, 0.02056921577453613, 0.020417343139648436, 0.02027724838256836, 0.020211360931396485, 0.020260927200317382, 0.020334880828857423, 0.020295679092407228, 0.020415679931640625, 0.02014489555358887, 0.0202097282409668, 0.020346879959106445, 0.020211328506469728, 0.02032646369934082, 0.020115776062011717, 0.02021171188354492, 0.020219968795776366, 0.020824064254760744, 0.020479007720947264, 0.020373920440673828, 0.020369983673095702, 0.020513856887817383, 0.02042902374267578, 0.02033910369873047, 0.02024275207519531, 0.020195327758789062, 0.020305919647216796, 0.020303871154785155, 0.021914688110351563, 0.020286399841308592, 0.020231199264526368, 0.02024051284790039, 0.02080240058898926, 0.02063532829284668, 0.020389184951782227, 0.020365663528442383, 0.02044822311401367, 0.020539072036743163, 0.020411775588989257, 0.02043155288696289, 0.020377824783325196, 0.020370527267456053, 0.020836767196655274, 0.02019708824157715, 0.020348928451538087, 0.020429248809814452, 0.020303903579711916, 0.02041244888305664, 0.02032774353027344, 0.02043769645690918, 0.0203571834564209, 0.02035043144226074, 0.02019375991821289, 0.020176639556884767, 0.020330751419067383, 0.02024038314819336, 0.020216127395629883, 0.02034841537475586, 0.02025491142272949, 0.020197376251220703, 0.020338304519653322, 0.02008464050292969, 0.02031398391723633, 0.020269567489624024, 0.020232255935668946, 0.0201810245513916, 0.020389888763427736, 0.020204736709594728, 0.020196159362792968, 0.020140031814575195, 0.0203341121673584, 0.020627935409545897, 0.020426528930664063, 0.020334815979003905, 0.02037555122375488, 0.02055068778991699, 0.02044003105163574, 0.020330528259277343, 0.020403903961181642, 0.020307552337646483, 0.0211822395324707, 0.020637504577636717, 0.020447519302368163, 0.020514944076538085, 0.020422176361083986, 0.020447711944580078, 0.0204083194732666, 0.02033839988708496, 0.020267295837402343, 0.020213151931762697, 0.02036796760559082, 0.020458528518676758, 0.020452255249023436, 0.02034284782409668, 0.020314111709594726, 0.020403423309326173, 0.02030473518371582, 0.020283008575439455, 0.020326208114624024, 0.02032694435119629, 0.02027849578857422, 0.020316032409667967, 0.020228992462158202, 0.020394336700439452, 0.020331775665283203, 0.020343391418457032, 0.02032211112976074, 0.020364511489868165, 0.020345151901245116, 0.020226783752441406, 0.020254463195800782, 0.021106464385986328, 0.023621856689453127, 0.020318111419677733, 0.02034668731689453, 0.021108800888061524, 0.022079200744628907, 0.020416223526000976, 0.020327199935913087, 0.020313440322875978, 0.020465503692626952, 0.020499263763427734, 0.02025641632080078, 0.02037180709838867, 0.020172256469726563, 0.020366943359375, 0.020187423706054686, 0.0201181755065918, 0.020196800231933595, 0.020611648559570313, 0.02096691131591797, 0.02055628776550293, 0.02042684745788574, 0.02043280029296875, 0.02032640075683594, 0.02052841567993164, 0.020488927841186524, 0.020391231536865236, 0.020859935760498046, 0.02061484718322754, 0.02060678482055664, 0.020360607147216797, 0.02036751937866211]",tokens/s,48.9920403249053,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,887.689216,6266.159104,0.0,5863.636992,5744.700416,s,1,7.2903544921875,7.2903544921875,0.0,7.2903544921875,7.2903544921875,7.2903544921875,7.2903544921875,[7.2903544921875],,kWh,6.875910358364004e-06,7.508060936711192e-07,2.1691684019831126e-06,9.795884854018237e-06,,MB,1325.502464,6475.874304,0.0,6060.76928,6020.358144,s,10,0.6542509689331055,0.06542509689331055,0.0018893555108964017,0.06557673263549804,0.06743290939331055,0.06770469398498535,0.0679221216583252,"[0.06490662384033204, 0.0669549789428711, 0.0660208969116211, 0.06737251281738281, 0.06797647857666016, 0.06530671691894531, 0.06584674835205079, 0.06441632080078125, 0.06454668426513673, 0.06090300750732422]",tokens/s,3912.8715455700753,kWh,2.153647573651949e-06,2.3740273025106554e-07,1.4287511430001373e-06,3.819801446903151e-06,tokens/kWh,67019190.27952311,MB,1358.143488,6559.760384,0.0,6144.65536,6125.164032,s,10,17.98795227050781,1.7987952270507812,0.002731751442871645,1.7990762939453124,1.8015147216796874,1.801962744140625,1.802321162109375,"[1.79367822265625, 1.800449951171875, 1.79770263671875, 1.7970155029296875, 1.797313720703125, 1.8014151611328124, 1.8009652099609375, 1.8024107666015625, 1.8011771240234375, 1.795823974609375]",tokens/s,35.023441830725666,kWh,5.230534246925779e-05,5.769086307174277e-06,3.477441670840063e-05,9.28488454848327e-05,tokens/kWh,678522.168703663,,s,630,17.984807035446163,0.028547312754676455,0.000531445552537215,0.028486095428466796,0.028695769500732424,0.029051476955413817,0.03175775905609131,"[0.030490047454833986, 0.029510528564453124, 0.028654592514038086, 0.02839219284057617, 0.028053087234497072, 0.02799068832397461, 0.027882495880126954, 0.027803743362426758, 0.027812768936157226, 0.028289024353027343, 0.028370943069458008, 0.02815590476989746, 0.028256256103515624, 0.028360319137573243, 0.028106592178344728, 0.029056991577148437, 0.02836742401123047, 0.02829465675354004, 0.028286527633666993, 0.028511167526245117, 0.028284927368164063, 0.028424095153808594, 0.02837718391418457, 0.028329984664916992, 0.028349567413330078, 0.028222335815429687, 0.028443967819213867, 0.028390079498291015, 0.028387327194213868, 0.028336416244506835, 0.028386592864990234, 0.02856390380859375, 0.028544511795043945, 0.02840012741088867, 0.028651519775390624, 0.02856697654724121, 0.02851900863647461, 0.028630720138549805, 0.028539199829101563, 0.028483039855957033, 0.028463327407836914, 0.028324127197265625, 0.028493663787841798, 0.028417312622070312, 0.028479679107666016, 0.02839411163330078, 0.02859596824645996, 0.028494144439697267, 0.028399616241455077, 0.02853068733215332, 0.028440576553344726, 0.02904473686218262, 0.0284467830657959, 0.028485408782958986, 0.028556896209716798, 0.028566080093383787, 0.028430335998535155, 0.02850758361816406, 0.028555456161499022, 0.028583904266357422, 0.028584352493286135, 0.02856755256652832, 0.0285347843170166, 0.033138465881347653, 0.030619136810302733, 0.02926665687561035, 0.028557184219360352, 0.028300607681274414, 0.02829539108276367, 0.028317472457885743, 0.028046144485473632, 0.02817228889465332, 0.028406911849975586, 0.028348480224609375, 0.028216127395629884, 0.028338176727294922, 0.02828598403930664, 0.02828300857543945, 0.028361568450927733, 0.02811395263671875, 0.02807904052734375, 0.02827471923828125, 0.0284117431640625, 0.02826255989074707, 0.028440000534057618, 0.02855174446105957, 0.028444383621215822, 0.028317983627319337, 0.02833612823486328, 0.028450336456298828, 0.028367328643798827, 0.02849171257019043, 0.028366912841796876, 0.02839347267150879, 0.028276063919067382, 0.028430431365966798, 0.028394208908081055, 0.02855673599243164, 0.028504480361938478, 0.028841535568237306, 0.028677600860595703, 0.02874671936035156, 0.02857369613647461, 0.028441823959350587, 0.028474016189575194, 0.02839360046386719, 0.028632736206054686, 0.02853718376159668, 0.028386976242065428, 0.028454591751098633, 0.028490400314331053, 0.028648479461669922, 0.02854924774169922, 0.028459871292114258, 0.028562911987304686, 0.028424736022949218, 0.028588031768798827, 0.028534847259521483, 0.028429344177246095, 0.028476320266723632, 0.02842732810974121, 0.029552799224853515, 0.028592416763305664, 0.028548959732055665, 0.028629663467407227, 0.028649471282958985, 0.03168316841125488, 0.029777887344360352, 0.02872319984436035, 0.02842972755432129, 0.028131935119628908, 0.028121088027954103, 0.0282825927734375, 0.02848387145996094, 0.028221151351928712, 0.028315872192382813, 0.028313440322875978, 0.028299488067626954, 0.028401792526245116, 0.02817411231994629, 0.028360799789428712, 0.028454912185668944, 0.02816409683227539, 0.02830745506286621, 0.028325632095336915, 0.02810700798034668, 0.02836185646057129, 0.02861964797973633, 0.028385183334350587, 0.028350143432617186, 0.02834217643737793, 0.028416479110717773, 0.028229631423950196, 0.028368928909301757, 0.028477439880371092, 0.029245439529418944, 0.028417343139648436, 0.028521055221557616, 0.028473440170288085, 0.028530431747436524, 0.028434688568115235, 0.02852249526977539, 0.028532672882080078, 0.028472896575927734, 0.028586496353149415, 0.02848912048339844, 0.02848214340209961, 0.028596223831176756, 0.028678144454956055, 0.02858188819885254, 0.02856959915161133, 0.028481536865234375, 0.028536575317382813, 0.028614912033081055, 0.02843615913391113, 0.02856582450866699, 0.02844380760192871, 0.028434751510620117, 0.028523040771484376, 0.028454336166381836, 0.02851091194152832, 0.028486656188964843, 0.028642175674438476, 0.02863849639892578, 0.028537567138671876, 0.0285614070892334, 0.028631040573120117, 0.028554975509643556, 0.028576032638549805, 0.0325522575378418, 0.030277856826782225, 0.028834144592285157, 0.02834239959716797, 0.028273887634277343, 0.028007200241088867, 0.02796281623840332, 0.027775552749633788, 0.02799001693725586, 0.028309503555297853, 0.02816739273071289, 0.028088096618652345, 0.028244991302490235, 0.02831113624572754, 0.02824233627319336, 0.028215295791625978, 0.028348703384399414, 0.028376800537109375, 0.028313312530517578, 0.028520191192626953, 0.028354175567626955, 0.028441503524780275, 0.028259391784667968, 0.02826950454711914, 0.028405311584472657, 0.028303808212280273, 0.02848883247375488, 0.028301504135131834, 0.02830975914001465, 0.02823734474182129, 0.02810358428955078, 0.028483583450317384, 0.02857164764404297, 0.028461055755615236, 0.028415199279785155, 0.02857859230041504, 0.02933900833129883, 0.02864192008972168, 0.028867904663085937, 0.028590784072875977, 0.028467199325561524, 0.028475391387939454, 0.028476736068725587, 0.028385696411132814, 0.028479776382446288, 0.02836265563964844, 0.028465248107910155, 0.02843561553955078, 0.028547840118408205, 0.028498016357421874, 0.028467199325561524, 0.028499967575073244, 0.028622560501098633, 0.02873369598388672, 0.028612640380859374, 0.028495071411132812, 0.02863907241821289, 0.028588031768798827, 0.0284968318939209, 0.028552383422851563, 0.028650144577026367, 0.028600479125976564, 0.028596223831176756, 0.031914112091064456, 0.029924863815307616, 0.028748287200927734, 0.028440576553344726, 0.02806540870666504, 0.02818819236755371, 0.02944086456298828, 0.02838937568664551, 0.028518335342407226, 0.028264448165893553, 0.028266559600830077, 0.028112895965576173, 0.027842559814453126, 0.02780364799499512, 0.02805116844177246, 0.02842857551574707, 0.028092416763305664, 0.028379135131835938, 0.028643327713012694, 0.028444671630859376, 0.028286975860595705, 0.028421344757080077, 0.02842902374267578, 0.028330047607421874, 0.028386560440063477, 0.028215200424194335, 0.028461055755615236, 0.0282891845703125, 0.028435104370117186, 0.028528480529785155, 0.02838137626647949, 0.02841929626464844, 0.02841587257385254, 0.028561311721801756, 0.028593151092529297, 0.028553216934204102, 0.028632896423339844, 0.028557504653930664, 0.02850201606750488, 0.02856083106994629, 0.028500543594360352, 0.028645376205444335, 0.028173664093017577, 0.02819548797607422, 0.02879078483581543, 0.028790271759033204, 0.028555103302001953, 0.028502687454223633, 0.02854297637939453, 0.028448768615722656, 0.028481536865234375, 0.028391424179077147, 0.028464799880981446, 0.028393823623657225, 0.028544448852539064, 0.02852854347229004, 0.02861846351623535, 0.02863363265991211, 0.028612607955932616, 0.028555168151855468, 0.028562976837158204, 0.02862384033203125, 0.02854412841796875, 0.031985631942749025, 0.030134048461914063, 0.029069631576538087, 0.028569503784179686, 0.028404991149902345, 0.028295936584472655, 0.028219392776489258, 0.027946304321289063, 0.027735776901245117, 0.02779644775390625, 0.02826620864868164, 0.028526079177856444, 0.028311744689941406, 0.028324447631835937, 0.028108800888061523, 0.028180192947387696, 0.028364063262939453, 0.028513280868530274, 0.02834390449523926, 0.028453184127807618, 0.028467296600341797, 0.028368896484375, 0.02841606330871582, 0.028513471603393556, 0.028506847381591798, 0.029394336700439453, 0.02837977600097656, 0.02823744010925293, 0.028422527313232422, 0.028475231170654296, 0.028452224731445312, 0.028350976943969725, 0.02845929527282715, 0.02840323257446289, 0.028615135192871094, 0.02853273582458496, 0.028649471282958985, 0.02860428810119629, 0.028596351623535157, 0.029624319076538085, 0.02869430351257324, 0.028482847213745117, 0.02846611213684082, 0.02853068733215332, 0.028618751525878908, 0.028505535125732423, 0.02860211181640625, 0.028535167694091798, 0.028368896484375, 0.028516799926757812, 0.02854252815246582, 0.02858233642578125, 0.028560800552368162, 0.028553823471069335, 0.02861395263671875, 0.02888163185119629, 0.028845695495605467, 0.02873583984375, 0.028873920440673828, 0.02873632049560547, 0.02866899108886719, 0.02869548797607422, 0.028467071533203124, 0.03175811195373535, 0.02988751983642578, 0.028908063888549804, 0.02838479995727539, 0.028369823455810548, 0.028110847473144532, 0.028004352569580077, 0.028083520889282225, 0.028125888824462892, 0.02837708854675293, 0.028436479568481447, 0.02826006317138672, 0.028334367752075196, 0.02826188850402832, 0.028357376098632814, 0.028458688735961916, 0.028182464599609373, 0.02833216094970703, 0.02857478332519531, 0.028506879806518556, 0.028368896484375, 0.028504095077514648, 0.028370431900024414, 0.028486303329467774, 0.028345855712890625, 0.028393087387084962, 0.02830134391784668, 0.02849468803405762, 0.028621984481811524, 0.028584447860717774, 0.028451168060302734, 0.02852403259277344, 0.028537120819091797, 0.028408031463623046, 0.02866169548034668, 0.02857548713684082, 0.028551488876342773, 0.028700672149658202, 0.028945823669433594, 0.02881391906738281, 0.02871500778198242, 0.02860982322692871, 0.02851215934753418, 0.028594175338745118, 0.02861257553100586, 0.028534624099731447, 0.028578720092773437, 0.028604448318481444, 0.02852992057800293, 0.028511039733886717, 0.02873334312438965, 0.028655040740966798, 0.02856345558166504, 0.028664480209350585, 0.028626943588256838, 0.028770303726196288, 0.028640512466430665, 0.028484352111816408, 0.02869830322265625, 0.028659711837768553, 0.028631359100341796, 0.028665855407714845, 0.028647424697875977, 0.03202867126464844, 0.03034316825866699, 0.029237247467041014, 0.028624383926391602, 0.02830723190307617, 0.028371679306030274, 0.028092416763305664, 0.02824176025390625, 0.02852675247192383, 0.028286975860595705, 0.02835251235961914, 0.02832761573791504, 0.028332351684570312, 0.02854025650024414, 0.028303104400634764, 0.028482112884521484, 0.02826684761047363, 0.02838118362426758, 0.02831155204772949, 0.028495199203491212, 0.028482208251953123, 0.028557247161865234, 0.028495935440063475, 0.02854015922546387, 0.028450944900512695, 0.028505760192871092, 0.02849260711669922, 0.02833987236022949, 0.028477439880371092, 0.028426752090454102, 0.028489376068115236, 0.028406112670898438, 0.028413919448852538, 0.0283156795501709, 0.028682239532470705, 0.028682239532470705, 0.028635135650634767, 0.028606464385986328, 0.028780096054077147, 0.028545312881469727, 0.0284366397857666, 0.02856959915161133, 0.028628896713256836, 0.02858198356628418, 0.02854092788696289, 0.028618240356445314, 0.02848409652709961, 0.028661760330200195, 0.02864454460144043, 0.02854355239868164, 0.028628351211547852, 0.028609407424926757, 0.028624895095825196, 0.02855731201171875, 0.028524223327636718, 0.028670175552368164, 0.028572927474975585, 0.02862323188781738, 0.028649887084960936, 0.02861587142944336, 0.02853891181945801, 0.028693344116210936, 0.02885737609863281, 0.031982112884521484, 0.030312000274658205, 0.029216768264770508, 0.028528287887573243, 0.028237855911254883, 0.02844361686706543, 0.028205055236816406, 0.028305023193359376, 0.02850649642944336, 0.028301311492919923, 0.028386911392211913, 0.028204639434814452, 0.028312063217163085, 0.02848588752746582, 0.028239519119262695, 0.028333887100219727, 0.028430944442749025, 0.028432384490966797, 0.028427839279174805, 0.028344512939453125, 0.02850432014465332, 0.028405727386474608, 0.028352256774902343, 0.028231231689453126, 0.028377824783325196, 0.02836444854736328, 0.028393791198730468, 0.028446752548217772, 0.028485631942749022, 0.028568767547607423, 0.028540864944458007, 0.02836956787109375, 0.028442527770996092, 0.028288639068603516, 0.028677888870239258, 0.028769216537475585, 0.02858598327636719, 0.0285447998046875, 0.028647647857666016, 0.028516191482543946, 0.028401824951171876, 0.028590080261230468, 0.028573535919189454, 0.028606624603271485, 0.02856719970703125, 0.028533023834228517, 0.028550304412841798, 0.02873641586303711, 0.02857369613647461, 0.02858188819885254, 0.028702720642089844, 0.028620800018310546, 0.028565568923950194, 0.028530527114868164, 0.0285512638092041, 0.02856982421875, 0.02868342399597168, 0.028736127853393554, 0.02865545654296875, 0.028614175796508788, 0.028512800216674804, 0.02858723258972168, 0.028650367736816406, 0.031756895065307614, 0.030293088912963867, 0.029179904937744142, 0.028609216690063475, 0.028356832504272463, 0.028245248794555665, 0.028062463760375977, 0.02785590362548828, 0.027935712814331055, 0.02802400016784668, 0.02809119987487793, 0.02814352035522461, 0.0287740478515625, 0.028126720428466798, 0.027899103164672853, 0.027716608047485353, 0.02812348747253418, 0.02841971206665039, 0.028237823486328126, 0.028359424591064452, 0.028344640731811522, 0.028323520660400392, 0.028438528060913085, 0.028247039794921876, 0.02838425636291504, 0.028380640029907228, 0.028349983215332032, 0.028494495391845703, 0.028405216217041014, 0.028477567672729492, 0.02839219284057617, 0.028506111145019532, 0.02843775939941406, 0.028408575057983398, 0.028497919082641602, 0.028579839706420897, 0.028817407608032225, 0.028691648483276367, 0.028541183471679686, 0.028504640579223632, 0.02855686378479004, 0.028528127670288086, 0.028563648223876952, 0.028502975463867188, 0.0284234561920166, 0.02858755111694336, 0.028610815048217775, 0.028525312423706053, 0.028533952713012695, 0.028519071578979493, 0.02845916748046875, 0.028577024459838868, 0.028467967987060548, 0.028561344146728517, 0.028550752639770506, 0.028501504898071288, 0.02862179183959961, 0.028495359420776366, 0.028367551803588867, 0.02855695915222168, 0.028495168685913085, 0.02861516761779785, 0.028492128372192383]",tokens/s,35.02956683151152,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 170212 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 270, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 84814 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 905.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 72786 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.330944,13880.918016,0.0,13478.395904,13476.849152,s,1,7.34776416015625,7.34776416015625,0.0,7.34776416015625,7.34776416015625,7.34776416015625,7.34776416015625,[7.34776416015625],,kWh,8.509368816661815e-06,9.31521312924467e-07,5.033059581993626e-06,1.4473949711579908e-05,,MB,1294.278656,14115.79904,0.0,13700.694016,13671.637504,s,10,2.0948021392822262,0.20948021392822264,0.0034513227977716453,0.20956494140624998,0.2127468246459961,0.21322307510375976,0.2136040754699707,"[0.20057171630859374, 0.21089900207519532, 0.20983500671386718, 0.20913638305664062, 0.21369932556152343, 0.20810928344726562, 0.2126409912109375, 0.20929487609863281, 0.20851571655273438, 0.21209983825683593]",tokens/s,1222.0724582977423,kWh,6.0883177444446955e-06,6.710956018419764e-07,4.044065735250065e-06,1.0803479081536737e-05,tokens/kWh,23696070.31845017,MB,1337.491456,14117.896192,0.0,13702.791168,13671.640064,s,10,37.208315673828125,3.7208315673828123,0.002876997698239784,3.7203675537109375,3.72428798828125,3.725852001953125,3.7271032128906247,"[3.7197529296875, 3.717982666015625, 3.71877392578125, 3.717134033203125, 3.720982177734375, 3.7195205078125, 3.721120849609375, 3.7239404296875, 3.721692138671875, 3.727416015625]",tokens/s,16.931698965431384,kWh,0.00010881553686888856,1.200291783780401e-05,7.22826897705504e-05,0.00019310114447724293,tokens/kWh,326253.8923347737,,s,630,37.20560744094847,0.05905651974753728,0.0005787943865051249,0.05891747093200683,0.05936752510070801,0.059584354782104484,0.06286623451232912,"[0.06257270431518555, 0.05956198501586914, 0.05895539093017578, 0.058775936126708984, 0.05881651306152344, 0.05881651306152344, 0.05898649597167969, 0.05904383850097656, 0.058836383819580076, 0.05888470458984375, 0.059157920837402345, 0.05929785537719726, 0.059095550537109375, 0.0588328971862793, 0.058824737548828124, 0.05901311874389648, 0.05934223937988281, 0.05935958480834961, 0.05925094223022461, 0.05928550338745117, 0.05925273513793945, 0.05893529510498047, 0.058787841796875, 0.0589285774230957, 0.05894319915771484, 0.05878460693359375, 0.05888336181640625, 0.059144161224365235, 0.059253505706787106, 0.059015167236328124, 0.05897216033935547, 0.05907865524291992, 0.059334495544433596, 0.05919702529907227, 0.05895840072631836, 0.058966014862060545, 0.059109375, 0.0588836784362793, 0.05884560012817383, 0.05888313674926758, 0.05866995239257813, 0.058972225189208985, 0.05876326370239258, 0.05890252685546875, 0.05897830581665039, 0.05887558364868164, 0.05881657409667969, 0.058784000396728514, 0.05895775985717774, 0.05903571319580078, 0.05905408096313477, 0.05906227111816406, 0.05904761505126953, 0.058986335754394534, 0.05887638473510742, 0.058855392456054687, 0.058981822967529296, 0.05875142288208008, 0.05886140823364258, 0.05883321762084961, 0.05885542297363281, 0.05884108734130859, 0.05887945556640625, 0.06294451141357423, 0.05955456161499023, 0.058793983459472655, 0.058636287689208984, 0.05857689666748047, 0.05874425506591797, 0.058712638854980466, 0.05866495895385742, 0.058758495330810546, 0.058823326110839846, 0.058883617401123044, 0.05884118270874023, 0.058636672973632814, 0.058574207305908205, 0.058702465057373046, 0.0592151985168457, 0.05964252853393555, 0.059674175262451175, 0.05934928131103516, 0.05909110260009766, 0.05902336120605469, 0.058810367584228515, 0.058748126983642575, 0.05881683349609375, 0.058767040252685546, 0.05874972915649414, 0.05881158447265625, 0.058925792694091796, 0.05890057754516602, 0.058855358123779296, 0.05880633544921875, 0.05899468612670898, 0.059213214874267575, 0.05921238327026367, 0.05939779281616211, 0.05930972671508789, 0.059159233093261716, 0.05883628845214844, 0.058663616180419924, 0.0586497917175293, 0.059050079345703124, 0.05879628753662109, 0.05897999954223633, 0.058826881408691405, 0.058819263458251954, 0.05890867233276367, 0.058887680053710936, 0.05895542526245117, 0.0589730224609375, 0.05893856048583984, 0.05912863922119141, 0.059322017669677735, 0.05915052795410156, 0.05910460662841797, 0.05907129669189453, 0.059031326293945315, 0.05889865493774414, 0.05883055877685547, 0.058757408142089844, 0.0588730239868164, 0.058974720001220705, 0.05891670227050781, 0.05902588653564453, 0.06340611267089843, 0.060213214874267576, 0.059254783630371094, 0.05876521682739258, 0.058744926452636716, 0.058828800201416016, 0.058918270111083984, 0.0587639045715332, 0.05876671981811524, 0.059185791015625, 0.05873408126831055, 0.05871206283569336, 0.058638206481933595, 0.058918785095214844, 0.05876403045654297, 0.059006591796875, 0.05943471908569336, 0.05936576080322266, 0.05924652862548828, 0.059050334930419925, 0.05887129592895508, 0.05877542495727539, 0.058703872680664064, 0.0587342414855957, 0.05876617431640625, 0.058810497283935545, 0.05914134216308594, 0.05886851119995117, 0.059017215728759766, 0.058931198120117184, 0.058722305297851565, 0.05875616073608399, 0.059060863494873043, 0.05922409439086914, 0.05909494400024414, 0.05910771179199219, 0.05907980728149414, 0.059022209167480466, 0.058912769317626956, 0.05878374481201172, 0.058746849060058594, 0.058818592071533206, 0.05871615982055664, 0.058662784576416015, 0.05881254577636719, 0.058900478363037106, 0.058859519958496094, 0.05885337448120117, 0.05915382385253906, 0.05915235137939453, 0.059197601318359376, 0.059308448791503904, 0.05920732879638672, 0.059300193786621096, 0.05898649597167969, 0.05886716842651367, 0.05876985549926758, 0.058683551788330075, 0.058934783935546874, 0.058905086517333984, 0.05878963088989258, 0.05885977554321289, 0.058929153442382816, 0.06256396865844727, 0.059498462677001956, 0.058784160614013675, 0.05875913619995117, 0.0587894401550293, 0.058834720611572265, 0.058816192626953125, 0.059009727478027345, 0.05897248077392578, 0.058912769317626956, 0.058722305297851565, 0.05861536026000977, 0.05868588638305664, 0.05856211090087891, 0.058821056365966795, 0.059115520477294924, 0.059465087890625, 0.059398815155029296, 0.05921744155883789, 0.059052127838134766, 0.058912769317626956, 0.058781665802001955, 0.05864857482910156, 0.058635711669921875, 0.05879289627075195, 0.05906147384643555, 0.05885007858276367, 0.058787841796875, 0.05872751998901367, 0.05880105590820312, 0.058810367584228515, 0.059138046264648435, 0.05925273513793945, 0.059172863006591796, 0.059180831909179686, 0.05910275268554688, 0.05896239852905273, 0.05883315277099609, 0.05886921691894531, 0.058726913452148435, 0.058755073547363285, 0.05883084869384766, 0.0590561294555664, 0.05877731323242188, 0.05873062515258789, 0.058832191467285154, 0.058761344909667966, 0.0588172492980957, 0.059009025573730466, 0.05913324737548828, 0.05923910522460937, 0.05932582473754883, 0.059376255035400394, 0.05918515014648437, 0.05923958587646484, 0.059035934448242185, 0.05900505447387695, 0.05888454437255859, 0.05888169479370117, 0.05883852767944336, 0.05887036895751953, 0.058791614532470705, 0.05880377578735352, 0.06267459106445313, 0.059603870391845705, 0.05887737655639649, 0.058757183074951175, 0.05883750534057617, 0.05883699035644531, 0.058998783111572264, 0.05881756973266602, 0.05874095916748047, 0.05867187118530273, 0.05917468643188477, 0.05883014297485351, 0.05894851303100586, 0.058875904083251954, 0.058894336700439455, 0.05919334411621094, 0.059717025756835934, 0.05960889434814453, 0.059407135009765626, 0.05901830291748047, 0.0588392333984375, 0.05871488189697266, 0.05883903884887695, 0.05883004760742187, 0.058780448913574215, 0.05882265472412109, 0.058844928741455076, 0.058792190551757814, 0.0587407341003418, 0.059065406799316406, 0.0591451530456543, 0.05900284957885742, 0.05929487991333008, 0.059310623168945316, 0.05923465728759766, 0.05932556915283203, 0.05907900619506836, 0.05897679901123047, 0.05900848007202148, 0.058906654357910156, 0.05884979248046875, 0.05893040084838867, 0.058888992309570315, 0.0588144645690918, 0.05881856155395508, 0.058787841796875, 0.058875904083251954, 0.05898163223266602, 0.059120384216308594, 0.05918105697631836, 0.059299713134765626, 0.05929587173461914, 0.059230209350585934, 0.05921791839599609, 0.05897203063964844, 0.05910335922241211, 0.05901929473876953, 0.059084766387939455, 0.05888204956054687, 0.05874892807006836, 0.05876736068725586, 0.05879808044433594, 0.058966014862060545, 0.06317385482788086, 0.05975849533081055, 0.058896575927734375, 0.05864313507080078, 0.05857276916503906, 0.05867932891845703, 0.058877601623535156, 0.05887420654296875, 0.05884928131103516, 0.05890457534790039, 0.05883059310913086, 0.05880435180664063, 0.05880947113037109, 0.05900185775756836, 0.05881552124023438, 0.05915110397338867, 0.05960265731811523, 0.05967638397216797, 0.05926172637939453, 0.05906022262573242, 0.058891422271728514, 0.05877231979370117, 0.05875302505493164, 0.05868931198120117, 0.05888227081298828, 0.0587407341003418, 0.05885542297363281, 0.05864396667480469, 0.05863065719604492, 0.05886771011352539, 0.05893699264526367, 0.05909302520751953, 0.05922982406616211, 0.05943888092041016, 0.059323295593261716, 0.05927091217041015, 0.059213214874267575, 0.05909958267211914, 0.05894720077514649, 0.05879478454589844, 0.05869744110107422, 0.05884511947631836, 0.05884963226318359, 0.058817855834960936, 0.05873859024047851, 0.05874319839477539, 0.05882486343383789, 0.05900515365600586, 0.059068416595458986, 0.059350879669189456, 0.05941263961791992, 0.05944319915771484, 0.059361278533935545, 0.05913103866577148, 0.058964160919189455, 0.05881206512451172, 0.05872115325927734, 0.058775264739990236, 0.05889043045043945, 0.05884073638916015, 0.0588232307434082, 0.058797569274902345, 0.05902592086791992, 0.0631673927307129, 0.05986304092407226, 0.05893939208984375, 0.05866851043701172, 0.05879430389404297, 0.05879420852661133, 0.0587960319519043, 0.058810367584228515, 0.05884640121459961, 0.059009471893310544, 0.05890496063232422, 0.05865814590454101, 0.058670974731445315, 0.058608417510986326, 0.05875094223022461, 0.059179039001464845, 0.059672031402587894, 0.059523040771484376, 0.059434593200683596, 0.05908505630493164, 0.05895423889160156, 0.0588331184387207, 0.058777599334716796, 0.058850528717041016, 0.05874358367919922, 0.05862604904174805, 0.05863423919677734, 0.05875302505493164, 0.05879548645019531, 0.058730239868164065, 0.058884574890136716, 0.059297889709472654, 0.05930624008178711, 0.05942233657836914, 0.05931996917724609, 0.05928134536743164, 0.059462112426757814, 0.05909123229980469, 0.058858814239501955, 0.05876988983154297, 0.05876335906982422, 0.058731903076171876, 0.058847999572753905, 0.05881840133666992, 0.05891823959350586, 0.05885974502563476, 0.05907632064819336, 0.059008991241455075, 0.05921795272827148, 0.05947071838378906, 0.05928144073486328, 0.059246593475341794, 0.05922576141357422, 0.05920800018310547, 0.05902131271362305, 0.058910686492919924, 0.05884723281860352, 0.058890270233154296, 0.058865249633789064, 0.05901353454589844, 0.05895375823974609, 0.05902947235107422, 0.05908019256591797, 0.06355161666870117, 0.06004499053955078, 0.059121952056884766, 0.058881504058837894, 0.05874703979492187, 0.058826881408691405, 0.05880172729492188, 0.05918585586547852, 0.059070270538330076, 0.05894553756713867, 0.05878559875488281, 0.058675582885742185, 0.05866486358642578, 0.05885254287719727, 0.05882563018798828, 0.05965337753295898, 0.059646751403808596, 0.059647937774658204, 0.059367454528808594, 0.05922195053100586, 0.059007007598876955, 0.05894118499755859, 0.05893353652954102, 0.05879808044433594, 0.05880012893676758, 0.05874687957763672, 0.058732479095458985, 0.058642017364501954, 0.05868182373046875, 0.05876873779296875, 0.05885200119018555, 0.05912371063232422, 0.05941452789306641, 0.05945548629760742, 0.059334686279296875, 0.0592193603515625, 0.05906192016601562, 0.0589136962890625, 0.058899646759033204, 0.058942272186279294, 0.058931232452392575, 0.058942657470703125, 0.05900163269042969, 0.05885337448120117, 0.058789024353027346, 0.05880902481079102, 0.05886991882324219, 0.05909708786010742, 0.05927724838256836, 0.05953337478637695, 0.05927526473999024, 0.05918294525146484, 0.059168926239013674, 0.059121662139892575, 0.059121662139892575, 0.05900288009643555, 0.059057823181152345, 0.059047840118408204, 0.059019710540771486, 0.05897539138793945, 0.05902422332763672, 0.058929153442382816, 0.05888409423828125, 0.06335712051391601, 0.060080127716064455, 0.059198974609375, 0.05881657409667969, 0.058724769592285155, 0.05877884674072266, 0.05886854553222656, 0.058998783111572264, 0.05886566543579102, 0.05870796966552734, 0.058793983459472655, 0.05885279846191406, 0.05881232070922852, 0.05871478271484375, 0.0588042221069336, 0.059364768981933595, 0.05965449523925781, 0.05967225646972656, 0.05943558502197266, 0.059101184844970706, 0.058930622100830075, 0.0587534065246582, 0.058622142791748044, 0.058722305297851565, 0.05870182418823242, 0.05868956756591797, 0.05891622543334961, 0.05893939208984375, 0.05887036895751953, 0.05875817489624023, 0.05878054428100586, 0.059213951110839845, 0.059471839904785155, 0.059467201232910154, 0.05933929443359375, 0.05924252700805664, 0.05912985610961914, 0.05909708786010742, 0.058910720825195315, 0.0587611198425293, 0.05881660842895508, 0.05878374481201172, 0.058910240173339845, 0.05884332656860351, 0.05897248077392578, 0.05891846466064453, 0.05894569778442383, 0.058818817138671875, 0.05901055908203125, 0.05921152114868164, 0.05936816024780273, 0.05944118499755859, 0.059307998657226565, 0.059179039001464845, 0.05901311874389648, 0.05894675064086914, 0.059034431457519534, 0.05880831909179687, 0.05883004760742187, 0.05887670516967773, 0.0588199348449707, 0.05876598358154297, 0.05888569641113281, 0.0636616325378418, 0.0604574089050293, 0.05939199829101562, 0.05885036849975586, 0.058769664764404296, 0.05879059219360352, 0.05876921463012695, 0.058867103576660154, 0.05881731033325195, 0.05882003021240234, 0.058796607971191406, 0.058893985748291015, 0.05876566314697266, 0.0586506233215332, 0.059137950897216796, 0.05935523223876953, 0.05987123107910156, 0.05983027267456055, 0.05972716903686524, 0.059372222900390625, 0.05913750457763672, 0.05876995086669922, 0.05866700744628906, 0.058873313903808594, 0.05879452896118164, 0.05875711822509765, 0.05882060623168945, 0.05876326370239258, 0.05891439819335938, 0.058749343872070314, 0.0588963851928711, 0.059099136352539064, 0.05914831924438477, 0.05925417709350586, 0.059275711059570316, 0.05955929565429687, 0.05939273452758789, 0.059311294555664064, 0.05916553497314453, 0.05889843368530273, 0.058845184326171876, 0.05889023971557617, 0.05905203247070313, 0.05911142349243164, 0.05904540634155273, 0.05896406555175781, 0.0591486701965332, 0.058992641448974606, 0.05927267074584961, 0.059241024017333985, 0.05931824111938477, 0.05945705413818359, 0.059388385772705075, 0.05922566223144531, 0.05920608139038086, 0.059159774780273434, 0.05905487823486328, 0.058864734649658204, 0.05901119995117188, 0.059036449432373045, 0.058992641448974606, 0.05910732650756836, 0.0589062385559082]",tokens/s,16.93293144050706,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 1248, in __init__ self.transformer = FalconModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 973, in __init__ self.h = nn.ModuleList([FalconDecoderLayer(config, layer_idx=i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 973, in self.h = nn.ModuleList([FalconDecoderLayer(config, layer_idx=i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 721, in __init__ self.self_attention = FALCON_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 366, in __init__ self.query_key_value = FalconLinear(self.hidden_size, qkv_out_dim, bias=config.bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 450.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 424.12 MiB is free. Process 203741 has 14.32 GiB memory in use. Of the allocated memory 14.20 GiB is allocated by PyTorch, and 6.16 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.650432,15021.768704,0.0,14619.246592,14483.4816,s,1,7.8357177734375,7.8357177734375,0.0,7.8357177734375,7.8357177734375,7.8357177734375,7.8357177734375,[7.8357177734375],,kWh,8.748542512466884e-06,9.574288184970494e-07,3.363891579996614e-06,1.3069862910960548e-05,,MB,1324.015616,15145.500672,0.0,14730.395648,14577.604608,s,10,2.0134782409667973,0.20134782409667967,0.004303914748542827,0.20282481384277343,0.20514772033691406,0.20525174713134767,0.20533496856689454,"[0.19077159118652343, 0.20512460327148438, 0.20459158325195312, 0.20034445190429687, 0.19650428771972656, 0.20287590026855468, 0.20277372741699218, 0.20316697692871094, 0.20196934509277345, 0.20535577392578125]",tokens/s,1271.431668797565,kWh,5.813652701225412e-06,6.411288347780044e-07,3.84518826133368e-06,1.0299969797337095e-05,tokens/kWh,24854441.81265318,MB,1367.621632,15160.180736,0.0,14745.075712,14577.607168,s,10,40.26893212890625,4.026893212890625,0.004163876119883424,4.0278955078125005,4.032488378906249,4.03267119140625,4.03281744140625,"[4.019496337890625, 4.022361328125, 4.027443603515625, 4.028347412109375, 4.029434814453125, 4.028558349609375, 4.022712646484375, 4.02527587890625, 4.03285400390625, 4.03244775390625]",tokens/s,15.644815163791419,kWh,0.00011780355589127305,1.2994019435771773e-05,7.813648843506843e-05,0.00020893406376211327,tokens/kWh,301530.5348759698,,s,630,40.26517862319944,0.06391298194158644,0.00050928786236609,0.06383504104614257,0.06421259231567382,0.06434695129394531,0.06728740425109864,"[0.06685033416748047, 0.06390419387817382, 0.0633175048828125, 0.06316476821899414, 0.06330486297607422, 0.06340505599975586, 0.06343910217285156, 0.06357785415649414, 0.0636165771484375, 0.0634471664428711, 0.06348831939697265, 0.06377395248413086, 0.06361983871459961, 0.06350815963745117, 0.06378118515014648, 0.06434547424316406, 0.0641480941772461, 0.06368668746948242, 0.06375830459594727, 0.06379446411132812, 0.06353174209594727, 0.0634936637878418, 0.06352268981933594, 0.06355404663085938, 0.06341846466064453, 0.06384128189086914, 0.06402870178222657, 0.06362416076660156, 0.06359148788452149, 0.0641111068725586, 0.06368096160888671, 0.06391807937622071, 0.06407782745361328, 0.064, 0.06384435272216797, 0.06399795150756836, 0.06395699310302734, 0.06378291320800782, 0.06386848068237305, 0.06373446273803711, 0.06359423828125, 0.06354915237426757, 0.06367398452758789, 0.06377046585083007, 0.06403699493408203, 0.06409696197509765, 0.06409327697753907, 0.06413577270507813, 0.06399820709228515, 0.0636965103149414, 0.0637853126525879, 0.06364358520507812, 0.06354140853881836, 0.06377788925170899, 0.06368143844604492, 0.0638009910583496, 0.06362556838989258, 0.06381340789794922, 0.06366460800170898, 0.06380313491821289, 0.06365151977539063, 0.06403721618652344, 0.06415174102783203, 0.06742607879638672, 0.06392652893066406, 0.06324428939819336, 0.06336444854736328, 0.06338560104370117, 0.0634488639831543, 0.06381452941894532, 0.06375423812866211, 0.06359843063354492, 0.06347792053222656, 0.06353305435180664, 0.06343027114868165, 0.0635467529296875, 0.06357913589477539, 0.0638520622253418, 0.06409024047851562, 0.06420925140380859, 0.06380543899536133, 0.06383135986328126, 0.0637751350402832, 0.0634370880126953, 0.0634101448059082, 0.06353513717651367, 0.06359827041625976, 0.06352313613891601, 0.06377193450927734, 0.06366860961914063, 0.06352025604248047, 0.06366704177856446, 0.06379520034790039, 0.06409625244140625, 0.06448332977294922, 0.06411650848388672, 0.06378636932373047, 0.06379593658447266, 0.06374208068847656, 0.06357523345947266, 0.06368719863891602, 0.06390748977661133, 0.0638421745300293, 0.06360371017456054, 0.06375094223022461, 0.06372230529785156, 0.06378307342529296, 0.06371257781982421, 0.06411872100830078, 0.06433869171142578, 0.06404300689697266, 0.0640815658569336, 0.06400812530517579, 0.06393606567382812, 0.06401725006103516, 0.06380444717407227, 0.06394700622558594, 0.0639290542602539, 0.06397068786621093, 0.06375414276123047, 0.063716064453125, 0.06385663986206054, 0.06416361236572266, 0.06379951858520508, 0.06385852813720704, 0.06395920181274414, 0.06758175659179687, 0.0644090576171875, 0.06356246566772461, 0.06339532852172852, 0.06342297744750977, 0.0636948471069336, 0.0638375358581543, 0.06378160095214844, 0.06375395202636719, 0.0636556167602539, 0.06354998397827148, 0.06360188674926757, 0.06354806518554687, 0.06361715316772461, 0.06382121658325195, 0.06428892517089843, 0.0642136001586914, 0.06420464324951172, 0.06419046020507813, 0.06399795150756836, 0.06358547210693359, 0.06364582443237304, 0.06351532745361328, 0.06358544158935547, 0.06361584091186523, 0.06358220672607422, 0.06373756790161132, 0.06370342254638672, 0.0637388801574707, 0.06381049728393555, 0.06437459564208985, 0.06438694763183593, 0.06414339447021485, 0.0641145248413086, 0.0638570556640625, 0.0637457275390625, 0.06384262466430664, 0.06402457427978515, 0.06398361587524413, 0.0639908790588379, 0.06382060623168945, 0.06364889526367187, 0.06374476623535157, 0.06371964645385743, 0.06368051147460937, 0.0640469741821289, 0.06411666870117187, 0.06412834930419922, 0.06409468841552735, 0.06411878204345703, 0.06399014282226563, 0.06398060989379883, 0.06359545516967774, 0.0637311668395996, 0.06380598449707031, 0.06399545669555665, 0.06389139175415039, 0.06395340728759766, 0.06391948699951172, 0.06388953781127929, 0.06366371154785157, 0.06420162963867188, 0.06421222686767578, 0.06719686126708985, 0.06413948822021484, 0.06359590530395508, 0.06357817459106445, 0.06371158218383789, 0.06376992034912109, 0.06377695846557617, 0.06380569458007812, 0.06373401641845704, 0.06369689559936523, 0.06366723251342774, 0.06373065567016602, 0.06368460845947266, 0.06363955307006836, 0.06376857757568359, 0.06405107116699219, 0.06438912200927735, 0.0644343032836914, 0.06379110336303712, 0.06374399948120117, 0.06360883331298828, 0.06363888168334961, 0.06377040100097656, 0.06378380966186524, 0.06386643218994141, 0.06374153518676758, 0.06359654235839844, 0.06372396850585937, 0.06362972640991212, 0.06397030258178711, 0.06403993225097657, 0.06426134490966796, 0.06415030670166015, 0.06394192123413087, 0.06373990249633789, 0.0638880958557129, 0.06402649688720703, 0.06405644989013672, 0.06384537506103516, 0.06398771286010742, 0.06383987045288086, 0.06381811141967773, 0.06402611541748048, 0.06417040252685546, 0.0637809600830078, 0.06385868835449218, 0.06396723175048828, 0.06425724792480468, 0.06413801574707031, 0.06379724884033203, 0.06361670303344727, 0.06401670074462891, 0.06372761535644532, 0.0640531234741211, 0.064223388671875, 0.06415337371826171, 0.0638216323852539, 0.06375667190551758, 0.0637188491821289, 0.06386540985107422, 0.06384611129760742, 0.0640311050415039, 0.06432105255126953, 0.06732438659667969, 0.06432288360595703, 0.06358668899536132, 0.06344908905029296, 0.06364096069335938, 0.06370124816894532, 0.06388191986083984, 0.06379724884033203, 0.06375606536865235, 0.06362131118774414, 0.06367212677001953, 0.06360291290283203, 0.06365814590454101, 0.06371311950683593, 0.06381327819824219, 0.06421334075927734, 0.06432672119140626, 0.06427049255371094, 0.06410431671142579, 0.06399391937255859, 0.06371721649169922, 0.06370147323608398, 0.06373839950561523, 0.06372147369384766, 0.06370304107666015, 0.06362726211547852, 0.0636797103881836, 0.06364649581909179, 0.06358595275878906, 0.06379705429077148, 0.06416022491455078, 0.06411472320556641, 0.06421497344970703, 0.06408946990966796, 0.06399833679199218, 0.06396051025390626, 0.06397999954223633, 0.06435475158691406, 0.06378700637817383, 0.06375219345092774, 0.06376006317138672, 0.0639507827758789, 0.06397500610351563, 0.06405181121826171, 0.06398175811767579, 0.06411443328857422, 0.06410594940185547, 0.06429558563232422, 0.0641815643310547, 0.0640223388671875, 0.06376959991455078, 0.06379036712646484, 0.06382390213012695, 0.06383071899414063, 0.06386070251464844, 0.06383209609985352, 0.0638193588256836, 0.06375056076049805, 0.06380137634277344, 0.06415151977539063, 0.063925537109375, 0.06399459075927734, 0.06449359893798828, 0.06714780426025391, 0.06422041320800781, 0.06368332672119141, 0.06357401657104492, 0.06350438308715821, 0.06357356643676758, 0.06377721786499023, 0.06397542572021485, 0.0639918098449707, 0.06377676773071289, 0.06355072021484375, 0.06359654235839844, 0.06359939193725586, 0.06368252944946289, 0.06392326354980468, 0.0642938232421875, 0.06434815979003906, 0.06435391998291015, 0.0638897933959961, 0.06378236770629883, 0.06386320114135742, 0.06367654418945312, 0.06358220672607422, 0.0635781135559082, 0.0636619529724121, 0.0635855369567871, 0.0635821762084961, 0.06374851226806641, 0.06378707122802735, 0.06387689590454101, 0.06415631866455078, 0.06428467559814453, 0.06423551940917968, 0.06405897521972656, 0.06415414428710937, 0.06403263854980469, 0.06380953598022461, 0.06385027313232422, 0.06357219314575195, 0.0636313591003418, 0.06367846298217773, 0.06381772613525391, 0.06383561706542969, 0.06379743957519532, 0.06393280029296874, 0.06399590301513672, 0.06430512237548829, 0.06427772521972656, 0.06395574569702149, 0.06386195373535156, 0.0638144645690918, 0.06392422485351562, 0.06373980712890626, 0.0638098258972168, 0.06384419250488281, 0.0645447006225586, 0.06357968139648437, 0.06398726272583008, 0.06409645080566406, 0.06421164703369141, 0.0640471649169922, 0.06408191680908203, 0.06407068634033203, 0.06754319763183594, 0.06424156951904297, 0.0635741424560547, 0.06317222213745118, 0.06338156890869141, 0.06369520187377929, 0.06383599853515624, 0.06371257781982421, 0.06363552093505859, 0.06358915328979492, 0.06343679809570313, 0.06345318222045898, 0.06346716690063477, 0.06368291091918946, 0.06387916946411133, 0.06438428497314454, 0.06440767669677734, 0.0637364158630371, 0.06378889465332031, 0.06384223937988281, 0.06384585571289063, 0.06373657608032227, 0.06363103866577148, 0.06366422271728515, 0.06369657516479492, 0.06379743957519532, 0.06363785552978515, 0.06358015823364258, 0.06364550399780274, 0.06336495971679687, 0.06372387313842773, 0.06428876495361328, 0.06400160217285156, 0.06383039855957032, 0.06382598495483398, 0.06354496002197266, 0.06376895904541016, 0.06393049621582031, 0.06403263854980469, 0.06371123123168945, 0.06348153686523438, 0.06368288040161132, 0.06375628662109376, 0.063727294921875, 0.06353443145751952, 0.06424470520019532, 0.06412633514404296, 0.064150146484375, 0.06390892791748047, 0.0637572479248047, 0.06382368087768554, 0.06410018920898437, 0.06371567916870118, 0.06387071990966797, 0.06399190521240235, 0.06395849609375, 0.06395955276489258, 0.06381094360351562, 0.06373459243774414, 0.06378905487060547, 0.06361222457885742, 0.06408243560791016, 0.06387843322753907, 0.06774374389648438, 0.06450150299072266, 0.06351852798461914, 0.06347206497192383, 0.0634142723083496, 0.06366207885742188, 0.06384844970703125, 0.0634142723083496, 0.06344815826416016, 0.063353759765625, 0.06329552078247071, 0.0632540168762207, 0.06371075057983398, 0.06377135848999023, 0.0639697608947754, 0.06413030242919922, 0.06411929321289063, 0.06392806243896484, 0.06395654296875, 0.0639536018371582, 0.0638361587524414, 0.0637132797241211, 0.06368870544433594, 0.06363750457763671, 0.06370521545410156, 0.06375206375122071, 0.06373990249633789, 0.06358220672607422, 0.06366412734985352, 0.06396928024291992, 0.06392617416381836, 0.06445475006103515, 0.06428163146972657, 0.064098876953125, 0.06383801651000977, 0.06377328109741211, 0.06379110336303712, 0.06391177749633789, 0.06391120147705077, 0.06397753524780274, 0.0636956787109375, 0.06360038375854492, 0.06369308853149414, 0.06394262313842773, 0.06385254287719727, 0.06371638488769531, 0.06372230529785156, 0.06394675064086915, 0.06403068542480468, 0.06399200057983398, 0.06389270401000977, 0.06395779037475587, 0.06361497497558594, 0.06387209701538087, 0.06384035110473633, 0.06386489486694336, 0.06361561584472657, 0.06401197052001953, 0.06411894226074219, 0.06407001495361328, 0.06389750289916993, 0.0638996467590332, 0.06435225677490235, 0.06758399963378907, 0.06433132934570313, 0.06354726409912109, 0.06343097686767578, 0.06356943893432618, 0.0637729606628418, 0.06392659378051758, 0.06385472106933594, 0.06383631896972657, 0.06380473709106445, 0.0635621452331543, 0.06366819381713867, 0.06378070449829101, 0.06369311904907227, 0.06392012786865234, 0.06435440063476562, 0.06414707183837891, 0.0641393280029297, 0.06420047760009766, 0.06400863647460937, 0.06401023864746094, 0.06388124847412109, 0.0638480339050293, 0.06390208053588867, 0.06399766540527344, 0.0637154884338379, 0.06361331176757813, 0.06364543914794922, 0.06367257690429687, 0.06371020889282226, 0.06408377838134766, 0.06431353759765625, 0.06421171569824219, 0.0640773468017578, 0.06408787536621094, 0.06388550567626954, 0.06413155364990235, 0.06398876953125, 0.06381657409667969, 0.064040771484375, 0.06376422500610351, 0.06360688018798828, 0.06387292861938476, 0.0639062385559082, 0.06408611297607422, 0.06408335876464843, 0.06389820861816406, 0.06410205078125, 0.06421539306640625, 0.06421250915527343, 0.06392428970336914, 0.06417244720458984, 0.06475981140136719, 0.06386412811279298, 0.06420140838623047, 0.06416178894042969, 0.06382201766967774, 0.0638216323852539, 0.06366617584228515, 0.06422457885742187, 0.06419315338134765, 0.06400326538085938, 0.0641360626220703, 0.06770063781738281, 0.06456662750244141, 0.06383078384399414, 0.06362847900390625, 0.06346835327148438, 0.06377228927612305, 0.06387267303466797, 0.06395487976074218, 0.0640640640258789, 0.06384640121459961, 0.06378684616088867, 0.06354787063598633, 0.0635939826965332, 0.06363935852050781, 0.06383446502685547, 0.06434841918945312, 0.0643616943359375, 0.06401513671875, 0.06385030364990234, 0.06393670272827148, 0.06399379348754883, 0.06400323486328124, 0.06364384078979492, 0.06365052795410156, 0.0635736312866211, 0.06384883117675781, 0.06373360061645508, 0.06368672180175781, 0.06375638580322265, 0.06383206558227539, 0.06419660949707032, 0.06417817687988281, 0.06446905517578125, 0.06415721893310547, 0.06406531524658203, 0.06392691040039063, 0.06408914947509765, 0.06412179565429688, 0.063963134765625, 0.06388032150268555, 0.06363225555419921, 0.06378870391845703, 0.06373411178588867, 0.06376444625854492, 0.06387260818481445, 0.06415302276611329, 0.0642278060913086, 0.06431388854980469, 0.06385161590576172, 0.06392720031738282, 0.06412083435058594, 0.06395097732543946, 0.06405935668945313, 0.06386883163452148, 0.0640159683227539, 0.06403241729736328, 0.0639309425354004, 0.06399200057983398, 0.06410854339599609, 0.06406543731689453, 0.06387283325195313, 0.06401017761230468, 0.06433993530273438]",tokens/s,15.64627356792638,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 1195, in __init__ self.model = MixtralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in __init__ [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 757, in __init__ self.block_sparse_moe = MixtralSparseMoeBlock(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in __init__ self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 672, in __init__ self.w3 = nn.Linear(self.hidden_dim, self.ffn_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 100.12 MiB is free. Process 180049 has 14.64 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 49.54 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1262, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1030, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1030, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 797, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 402, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 81770 has 14.73 GiB memory in use. Of the allocated memory 12.27 GiB is allocated by PyTorch, and 2.34 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.886336,3354.329088,0.0,2959.081472,2942.567424,s,1,7.60200439453125,7.60200439453125,0.0,7.60200439453125,7.60200439453125,7.60200439453125,7.60200439453125,[7.60200439453125],,kWh,1.0000995695833125e-05,1.0958325234668118e-06,3.3088915359982818e-06,1.4405719755298219e-05,,MB,1145.524224,3549.364224,0.0,3141.533696,3105.830912,s,10,0.314550048828125,0.03145500488281249,0.0017512524114522284,0.03136390399932861,0.03279800224304199,0.03422858524322509,0.03537305164337158,"[0.0356591682434082, 0.029427936553955078, 0.02969139289855957, 0.03184819221496582, 0.03121228790283203, 0.030395008087158202, 0.03151552009582519, 0.029893503189086915, 0.03248009490966797, 0.03242694473266602]",tokens/s,8138.609450347993,kWh,1.1479109921405104e-06,1.265949505202019e-07,7.620790410353302e-07,2.0365849836960425e-06,tokens/kWh,125700622.3896462,MB,1173.696512,3591.307264,0.0,3183.476736,3163.048448,s,10,10.851031616210937,1.0851031616210938,0.013687952523551393,1.0892457885742188,1.09405625,1.0982018310546875,1.1015182958984375,"[1.093135009765625, 1.102347412109375, 1.08806591796875, 1.092810791015625, 1.0870216064453124, 1.0904256591796875, 1.0930145263671875, 1.0854609375, 1.062905029296875, 1.0558447265625]",tokens/s,58.05899588927648,kWh,3.152952058369203e-05,3.477122149222979e-06,2.0829549343364018e-05,5.5836192076279034e-05,tokens/kWh,1128300.4384313016,,s,630,10.847838054656979,0.017218790562947592,0.0003928363628905611,0.017219743728637694,0.017526509666442872,0.01780540027618408,0.018400972499847415,"[0.017661983489990235, 0.01719891166687012, 0.01731616020202637, 0.01738137626647949, 0.017250240325927733, 0.017360960006713867, 0.017319936752319336, 0.017358240127563478, 0.017492351531982423, 0.017370880126953123, 0.017455583572387696, 0.017567743301391603, 0.017489919662475584, 0.01736832046508789, 0.01737343978881836, 0.017690879821777344, 0.017473663330078125, 0.01723520088195801, 0.01719718360900879, 0.01747635269165039, 0.017453216552734376, 0.01725859260559082, 0.017282976150512695, 0.017385183334350587, 0.01759062385559082, 0.017321760177612305, 0.017364992141723632, 0.01719424057006836, 0.01720806312561035, 0.0172677116394043, 0.017212127685546873, 0.017219871520996095, 0.017319936752319336, 0.017358848571777344, 0.017258655548095702, 0.01787273597717285, 0.017613983154296874, 0.017234432220458985, 0.017322175979614256, 0.017295520782470705, 0.017349983215332033, 0.017566368103027342, 0.017430015563964844, 0.017453567504882812, 0.017433759689331054, 0.017216224670410157, 0.017082239151000978, 0.017186975479125975, 0.017207391738891603, 0.017272544860839845, 0.01729155158996582, 0.017154048919677735, 0.01719500732421875, 0.017299455642700197, 0.017252351760864256, 0.017375232696533204, 0.017129472732543945, 0.017366527557373047, 0.01728060722351074, 0.01722051239013672, 0.017522687911987304, 0.017276927947998046, 0.01717628860473633, 0.017629728317260743, 0.017875135421752928, 0.01758236885070801, 0.017876863479614258, 0.01724345588684082, 0.017353471755981446, 0.01727084732055664, 0.01731283187866211, 0.01735148811340332, 0.01778086471557617, 0.017348703384399415, 0.017341567993164064, 0.017395999908447264, 0.01737779235839844, 0.01728441619873047, 0.017212095260620116, 0.017114784240722655, 0.017530784606933594, 0.017159776687622072, 0.017465919494628907, 0.01730739212036133, 0.02096796798706055, 0.019359743118286133, 0.017477632522583008, 0.017555456161499023, 0.01739094352722168, 0.017261215209960937, 0.017180671691894533, 0.017123327255249024, 0.01722163200378418, 0.0171378231048584, 0.01725628852844238, 0.01745305633544922, 0.017444576263427734, 0.01729475212097168, 0.01787379264831543, 0.018478912353515627, 0.01818841552734375, 0.0173702392578125, 0.0174268798828125, 0.017379423141479493, 0.017515968322753907, 0.017425119400024416, 0.01721164894104004, 0.01716223907470703, 0.017526496887207033, 0.017228063583374024, 0.017324031829833983, 0.017319839477539064, 0.017334112167358397, 0.017202688217163087, 0.0174517765045166, 0.017245376586914062, 0.01732691192626953, 0.017172479629516603, 0.01727462387084961, 0.017139328002929687, 0.01726527976989746, 0.017432191848754882, 0.017496448516845703, 0.017551359176635743, 0.017454816818237306, 0.01722960090637207, 0.017766975402832032, 0.017862432479858397, 0.017281408309936522, 0.01722064018249512, 0.01701798439025879, 0.01729315185546875, 0.017489471435546876, 0.01741868782043457, 0.017154239654541017, 0.017452640533447264, 0.017173824310302736, 0.01719772720336914, 0.017328384399414063, 0.01714995193481445, 0.017190271377563477, 0.01720547294616699, 0.016984447479248047, 0.016940832138061523, 0.017105152130126953, 0.017470975875854493, 0.01750271987915039, 0.01749318313598633, 0.017271680831909178, 0.017242368698120118, 0.017342144012451172, 0.01710393524169922, 0.01717955207824707, 0.017209375381469726, 0.01728102493286133, 0.017137664794921875, 0.017102848052978514, 0.017150976181030272, 0.017130399703979494, 0.017101951599121094, 0.01725129508972168, 0.017502208709716797, 0.017708864212036133, 0.01727097511291504, 0.017155744552612304, 0.017328479766845702, 0.0172359676361084, 0.017059488296508787, 0.017073919296264648, 0.017554143905639648, 0.01753251266479492, 0.01736323165893555, 0.017238016128540038, 0.017247871398925783, 0.01713190460205078, 0.01699430465698242, 0.01710895919799805, 0.01736297607421875, 0.017175615310668944, 0.017187776565551757, 0.017133472442626953, 0.01727827262878418, 0.017144128799438475, 0.01714838409423828, 0.017421600341796874, 0.017785600662231445, 0.017217504501342774, 0.017053440093994142, 0.017107200622558594, 0.018931711196899414, 0.018153472900390624, 0.01752662467956543, 0.017121440887451173, 0.017047456741333008, 0.01725040054321289, 0.017096704483032226, 0.017202207565307617, 0.017156320571899412, 0.017269407272338867, 0.01724015998840332, 0.017336320877075196, 0.017082176208496093, 0.017096895217895508, 0.017501760482788085, 0.01723436737060547, 0.017094655990600584, 0.01704876708984375, 0.01726972770690918, 0.017571680068969725, 0.017348608016967772, 0.01734003257751465, 0.017388927459716798, 0.017300064086914063, 0.01712995147705078, 0.017116479873657227, 0.01730374336242676, 0.017296863555908204, 0.017148895263671873, 0.017372352600097656, 0.01728623962402344, 0.017074111938476563, 0.017344127655029296, 0.01773923110961914, 0.017048255920410156, 0.0171560001373291, 0.017358272552490235, 0.017134111404418947, 0.01718070411682129, 0.017088640213012696, 0.017117151260375975, 0.017231775283813477, 0.01747727966308594, 0.017424800872802734, 0.017330207824707032, 0.017170400619506837, 0.017260671615600586, 0.017125280380249023, 0.017258432388305663, 0.01725791931152344, 0.017232511520385744, 0.017264095306396485, 0.017343008041381835, 0.017187839508056642, 0.018181119918823242, 0.018701728820800782, 0.017225696563720704, 0.017332735061645507, 0.017364479064941405, 0.017451648712158204, 0.017514495849609374, 0.017366783142089844, 0.01730531120300293, 0.017662336349487304, 0.018061023712158203, 0.017432863235473633, 0.017247711181640625, 0.017117183685302736, 0.017056640625, 0.017123584747314454, 0.017176319122314453, 0.017278976440429687, 0.01718681526184082, 0.017097856521606444, 0.01706662368774414, 0.016936511993408204, 0.017006399154663086, 0.017507200241088868, 0.017426591873168945, 0.017383167266845703, 0.017143903732299806, 0.017100799560546876, 0.017174367904663087, 0.017277088165283203, 0.017190656661987304, 0.017315391540527345, 0.0171711368560791, 0.017133535385131834, 0.017200544357299806, 0.017369728088378906, 0.017498111724853514, 0.017119232177734374, 0.017113088607788086, 0.017390880584716797, 0.017175264358520508, 0.01717219161987305, 0.017317792892456055, 0.017367424011230467, 0.017301504135131835, 0.01727280044555664, 0.01714384078979492, 0.01712646484375, 0.01710553550720215, 0.017258880615234375, 0.017008575439453125, 0.016977920532226562, 0.01708361625671387, 0.01729977607727051, 0.017821599960327148, 0.017297983169555664, 0.017096704483032226, 0.017254463195800783, 0.01722707176208496, 0.017248640060424803, 0.017228031158447267, 0.01708185577392578, 0.017121440887451173, 0.017926496505737306, 0.017059295654296876, 0.017089056015014648, 0.01719059181213379, 0.017162559509277343, 0.017362943649291994, 0.017364992141723632, 0.017295360565185547, 0.017305599212646485, 0.018191455841064453, 0.018140064239501954, 0.01768409538269043, 0.01727097511291504, 0.01714512062072754, 0.017243040084838866, 0.01716633605957031, 0.017149856567382812, 0.017160287857055666, 0.01732371139526367, 0.017130975723266603, 0.017015199661254882, 0.01718726348876953, 0.01720319938659668, 0.017111040115356444, 0.017133567810058595, 0.017524736404418945, 0.017352256774902344, 0.017406015396118163, 0.017166303634643554, 0.017964704513549805, 0.017412864685058593, 0.01722368049621582, 0.01709791946411133, 0.01722662353515625, 0.017141248703002928, 0.01723641586303711, 0.017249471664428712, 0.017202016830444335, 0.017266559600830077, 0.01718681526184082, 0.017160287857055666, 0.017183839797973634, 0.017410528182983397, 0.017405439376831054, 0.01714044761657715, 0.01713907241821289, 0.01834623908996582, 0.01699286460876465, 0.017209184646606444, 0.017416479110717774, 0.017381248474121095, 0.017326080322265625, 0.017309471130371092, 0.017051456451416015, 0.017182655334472656, 0.017231775283813477, 0.017340351104736328, 0.017461343765258788, 0.017385951995849608, 0.01729324722290039, 0.017143936157226564, 0.01715932846069336, 0.017159008026123048, 0.017242111206054688, 0.01745305633544922, 0.01741168022155762, 0.017154464721679686, 0.0172542724609375, 0.01712335968017578, 0.017243648529052736, 0.01700105667114258, 0.01743052864074707, 0.017946239471435546, 0.017891712188720703, 0.017180639266967772, 0.01728451156616211, 0.01720911979675293, 0.01728783988952637, 0.017229888916015627, 0.017203296661376953, 0.017262624740600585, 0.017187999725341796, 0.01705865669250488, 0.017024480819702148, 0.01705958366394043, 0.017154783248901368, 0.017739839553833008, 0.01721897506713867, 0.017174240112304687, 0.017174623489379884, 0.01701148796081543, 0.017469728469848633, 0.0200185604095459, 0.01784662437438965, 0.017336191177368163, 0.017391136169433594, 0.01734511947631836, 0.01730988883972168, 0.017354560852050782, 0.017393503189086914, 0.01720307159423828, 0.01730748748779297, 0.017565343856811525, 0.017498912811279296, 0.01739673614501953, 0.01726908874511719, 0.01729996871948242, 0.017209503173828126, 0.017332096099853516, 0.01737923240661621, 0.0169597110748291, 0.01720319938659668, 0.01770832061767578, 0.017418176651000976, 0.017353504180908204, 0.01731180763244629, 0.017055456161499023, 0.017067359924316405, 0.017228063583374024, 0.017287776947021483, 0.01733171272277832, 0.01729996871948242, 0.017358848571777344, 0.01721343994140625, 0.017193183898925782, 0.017395263671875, 0.017120639801025392, 0.017386335372924805, 0.017445920944213867, 0.017144800186157227, 0.017258495330810548, 0.017071136474609373, 0.01683964729309082, 0.017309696197509765, 0.01745510482788086, 0.018185184478759765, 0.0184233283996582, 0.01777302360534668, 0.017219743728637694, 0.017204511642456056, 0.017238592147827147, 0.017094655990600584, 0.01767628860473633, 0.01748601531982422, 0.017368896484375, 0.01727622413635254, 0.01739411163330078, 0.017676544189453126, 0.017295360565185547, 0.01723187255859375, 0.01724937629699707, 0.01705855941772461, 0.017082527160644533, 0.017532415390014648, 0.017400032043457032, 0.01744236755371094, 0.01716044807434082, 0.017158655166625975, 0.01741561508178711, 0.017285663604736327, 0.017379520416259765, 0.017024831771850588, 0.017147903442382813, 0.017303552627563477, 0.01722764778137207, 0.017163967132568358, 0.01718726348876953, 0.01713577651977539, 0.01734025573730469, 0.01750774383544922, 0.01733078384399414, 0.017219743728637694, 0.017029056549072264, 0.016885759353637696, 0.017177663803100585, 0.017268640518188477, 0.01710176086425781, 0.017119039535522462, 0.016994495391845704, 0.016885759353637696, 0.01693801689147949, 0.017103839874267578, 0.017154239654541017, 0.017260351181030274, 0.017120479583740234, 0.017021631240844725, 0.01708233642578125, 0.017037439346313476, 0.017151647567749024, 0.01716873550415039, 0.016891616821289063, 0.01667100715637207, 0.01700864028930664, 0.0168690242767334, 0.016955583572387696, 0.017043264389038085, 0.01701513671875, 0.016912384033203123, 0.01792585563659668, 0.018128448486328125, 0.017611488342285157, 0.01794223976135254, 0.01809030342102051, 0.01693641662597656, 0.016947711944580078, 0.016934911727905275, 0.01699772834777832, 0.016929407119750977, 0.017037343978881837, 0.01732329559326172, 0.017328351974487306, 0.01679587173461914, 0.016648479461669922, 0.01660313606262207, 0.01666265678405762, 0.01718796730041504, 0.016797439575195312, 0.01693712043762207, 0.016814943313598632, 0.016777215957641603, 0.01679769515991211, 0.016719871520996094, 0.016760223388671874, 0.01694166374206543, 0.01702911949157715, 0.01683046340942383, 0.01683046340942383, 0.016639999389648438, 0.01661337661743164, 0.01681814384460449, 0.01673423957824707, 0.016665727615356445, 0.01668716812133789, 0.016542528152465822, 0.016574464797973632, 0.016948896408081053, 0.016596479415893553, 0.016604000091552735, 0.016916479110717773, 0.016674400329589844, 0.016762592315673827, 0.016663232803344728, 0.01665023994445801, 0.016658143997192384, 0.0166976318359375, 0.016695295333862305, 0.016655935287475585, 0.016662975311279297, 0.01663088035583496, 0.0165897274017334, 0.016669952392578125, 0.016587520599365236, 0.01659699249267578, 0.016586336135864257, 0.016576927185058595, 0.016801376342773438, 0.016729536056518553, 0.016626655578613283, 0.01665433692932129, 0.01701478385925293, 0.016859136581420898, 0.018188127517700194, 0.018187456130981446, 0.01733510398864746, 0.016828607559204102, 0.016777023315429688, 0.017040767669677735, 0.016684831619262694, 0.01666543960571289, 0.016656383514404297, 0.0166297607421875, 0.016695295333862305, 0.0167476806640625, 0.01662191963195801, 0.016588832855224608, 0.01655855941772461, 0.016640256881713868, 0.01670844841003418, 0.016733087539672852, 0.016604223251342774, 0.016570720672607422, 0.016560575485229493, 0.01667647933959961, 0.016673311233520508, 0.01660723114013672, 0.016594944000244142, 0.01660326385498047, 0.016563583374023437, 0.01654630470275879, 0.016635904312133788, 0.016569984436035155, 0.01657913589477539, 0.016621023178100585, 0.017758560180664063, 0.016602880477905275, 0.016687360763549805, 0.01674019241333008, 0.016631967544555665, 0.016688671112060547, 0.016681503295898438, 0.016655391693115234, 0.016697439193725586, 0.01677395248413086, 0.01661756706237793, 0.01664227294921875, 0.016621248245239258, 0.01653116798400879, 0.016619583129882813, 0.01665046310424805, 0.01672969627380371, 0.016945568084716797, 0.016893184661865235, 0.016708351135253905, 0.016776735305786133, 0.016593536376953124, 0.016559968948364256, 0.01660927963256836, 0.016723648071289062, 0.017016223907470703, 0.016839584350585936, 0.016662527084350585, 0.016648191452026367, 0.016746496200561522, 0.01684867286682129]",tokens/s,58.07608823304111,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.255552,3354.329088,0.0,2959.081472,2942.567424,s,1,7.56006298828125,7.56006298828125,0.0,7.56006298828125,7.56006298828125,7.56006298828125,7.56006298828125,[7.56006298828125],,kWh,1.030244897499036e-05,1.129170060222218e-06,4.934170613993261e-06,1.636578964920584e-05,,MB,1145.683968,3549.364224,0.0,3141.533696,3105.830912,s,10,0.3191483516693115,0.03191483516693115,0.001310640518279943,0.03153615951538086,0.0330703311920166,0.03414887790679931,0.03501171527862549,"[0.03522742462158203, 0.03179126358032226, 0.03283065414428711, 0.032280128479003904, 0.030215551376342773, 0.032129951477050785, 0.03128105545043945, 0.03119024085998535, 0.030974496841430665, 0.031227584838867187]",tokens/s,8021.348023920134,kWh,1.1245226313461203e-06,1.239511474870338e-07,7.47360640623088e-07,1.995834419456242e-06,tokens/kWh,128267153.57967736,MB,1174.114304,3591.307264,0.0,3183.476736,3163.048448,s,10,10.659780151367189,1.0659780151367189,0.013331280476035226,1.0704387817382812,1.0778973876953126,1.08138466796875,1.0841744921875,"[1.0745426025390625, 1.0615531005859375, 1.0421826171875, 1.0848719482421876, 1.068078125, 1.0727994384765625, 1.0771224365234375, 1.0770347900390624, 1.049612060546875, 1.0519830322265624]",tokens/s,59.1006560223663,kWh,3.100471784532199e-05,3.419352761774082e-06,2.0457878545777293e-05,5.488194915287337e-05,tokens/kWh,1147918.4134753277,,s,630,10.656761390686036,0.016915494270930215,0.00042222898674190197,0.016895071983337402,0.01727531833648682,0.01764526271820068,0.01838836977005005,"[0.017031999588012697, 0.01689187240600586, 0.016939008712768554, 0.01679155158996582, 0.016696416854858398, 0.01682115173339844, 0.01739129638671875, 0.017162559509277343, 0.017147296905517577, 0.016937503814697264, 0.017070144653320313, 0.017983488082885742, 0.02095235252380371, 0.01775689506530762, 0.016965631484985352, 0.016955392837524414, 0.017227264404296876, 0.01702348709106445, 0.0169881591796875, 0.016957439422607423, 0.016889184951782227, 0.016878240585327147, 0.016842752456665038, 0.0166046085357666, 0.01683888053894043, 0.017377376556396484, 0.017266944885253908, 0.017156095504760743, 0.01695052719116211, 0.017171199798583985, 0.01697532844543457, 0.016945695877075194, 0.0176843204498291, 0.01694326400756836, 0.016904096603393554, 0.01711503982543945, 0.017010816574096678, 0.016957504272460937, 0.016916479110717773, 0.016955007553100587, 0.016845184326171873, 0.017035263061523438, 0.016721920013427736, 0.016787296295166017, 0.016965791702270507, 0.017297407150268555, 0.017165472030639648, 0.01708937644958496, 0.016875360488891603, 0.016824480056762695, 0.016746271133422853, 0.01687126350402832, 0.016902528762817382, 0.016987808227539064, 0.016943456649780274, 0.017315839767456053, 0.01680384063720703, 0.016701440811157226, 0.016682783126831056, 0.016670495986938476, 0.01677510452270508, 0.01669990348815918, 0.01651273536682129, 0.017988576889038085, 0.017946624755859376, 0.01725644874572754, 0.0167956485748291, 0.01685215950012207, 0.016728384017944336, 0.016623775482177736, 0.01680214309692383, 0.016930112838745116, 0.016910655975341797, 0.016795616149902343, 0.01679747200012207, 0.01675436782836914, 0.016701887130737305, 0.016646656036376953, 0.016478208541870116, 0.016604991912841798, 0.017174720764160156, 0.01886207962036133, 0.017231679916381835, 0.01680512046813965, 0.01665119934082031, 0.017086463928222655, 0.017059839248657227, 0.016961536407470702, 0.01713283157348633, 0.017989952087402342, 0.016932512283325197, 0.01689574432373047, 0.01678985595703125, 0.016951904296875, 0.0170098876953125, 0.016871648788452147, 0.017111648559570314, 0.016954816818237305, 0.01687126350402832, 0.016638368606567384, 0.016671072006225585, 0.01657423973083496, 0.01688598442077637, 0.016920576095581053, 0.01678950309753418, 0.016699392318725585, 0.016596511840820314, 0.016435007095336913, 0.01685068893432617, 0.017111967086791992, 0.01699865531921387, 0.016762624740600585, 0.016764928817749023, 0.016381952285766603, 0.016467967987060548, 0.016343040466308592, 0.01689116859436035, 0.01639049530029297, 0.016391551971435547, 0.016299007415771484, 0.016354719161987306, 0.01639894485473633, 0.01655388832092285, 0.016384096145629884, 0.016355327606201172, 0.01636147117614746, 0.0179303035736084, 0.018018367767333985, 0.017269216537475585, 0.016719871520996094, 0.01657379150390625, 0.016730783462524414, 0.0165086727142334, 0.01643129539489746, 0.016281663894653322, 0.016407808303833007, 0.016366304397583006, 0.016463584899902343, 0.0162860164642334, 0.01698585510253906, 0.01793811225891113, 0.017041984558105468, 0.016537471771240233, 0.01660531234741211, 0.01647955131530762, 0.016534208297729492, 0.01647760009765625, 0.016763359069824218, 0.016468095779418945, 0.016357248306274413, 0.016410751342773436, 0.016403743743896484, 0.016253120422363283, 0.016382495880126954, 0.016541696548461913, 0.0164270076751709, 0.01643267250061035, 0.016306528091430662, 0.016342239379882814, 0.016322687149047853, 0.016378463745117186, 0.01645136070251465, 0.016404895782470702, 0.016448511123657226, 0.016583423614501953, 0.0164453125, 0.01642464065551758, 0.01643769645690918, 0.01637196731567383, 0.01638809585571289, 0.0165086727142334, 0.01634124755859375, 0.01638185691833496, 0.016351295471191407, 0.0163656005859375, 0.016363519668579102, 0.01641267204284668, 0.01651257514953613, 0.01653376007080078, 0.016476255416870117, 0.016428672790527343, 0.01634761619567871, 0.016324607849121094, 0.016361215591430663, 0.016326911926269533, 0.01633273506164551, 0.016360992431640624, 0.016343584060668947, 0.016496639251708984, 0.018335296630859376, 0.018438688278198244, 0.01752467155456543, 0.01724617576599121, 0.01708255958557129, 0.01722480010986328, 0.01723075294494629, 0.01743052864074707, 0.01719705581665039, 0.017061567306518553, 0.017178304672241212, 0.01711497688293457, 0.01721833610534668, 0.01713148880004883, 0.017141792297363283, 0.017137887954711915, 0.01722956848144531, 0.01712950325012207, 0.017130783081054687, 0.017191455841064452, 0.017178815841674806, 0.017141759872436522, 0.01715171241760254, 0.017316064834594726, 0.017086528778076173, 0.017024223327636718, 0.017257247924804688, 0.01726851272583008, 0.017135839462280273, 0.01721548843383789, 0.017154016494750977, 0.017057823181152343, 0.01739366340637207, 0.01702707290649414, 0.016873472213745116, 0.01707811164855957, 0.01718492889404297, 0.017286304473876954, 0.017215583801269533, 0.017222400665283202, 0.01722310447692871, 0.017140031814575196, 0.017149824142456055, 0.01716876792907715, 0.017093664169311525, 0.017052640914916994, 0.01729097557067871, 0.0172956485748291, 0.017164287567138673, 0.017303552627563477, 0.0172106876373291, 0.017265344619750978, 0.017307647705078123, 0.01737049674987793, 0.017054336547851563, 0.01708624076843262, 0.01720921516418457, 0.017102624893188475, 0.01696211242675781, 0.017127424240112304, 0.016998367309570314, 0.017274911880493165, 0.017039360046386717, 0.017170751571655273, 0.01673200035095215, 0.016605279922485353, 0.01669126319885254, 0.0170250244140625, 0.016852479934692383, 0.01690880012512207, 0.016876544952392578, 0.016458751678466797, 0.016684896469116212, 0.01751030349731445, 0.01722598457336426, 0.017125375747680666, 0.016920160293579102, 0.016773536682128908, 0.01683830451965332, 0.016691551208496094, 0.016715328216552736, 0.01683705520629883, 0.017154048919677735, 0.017069984436035156, 0.016922719955444337, 0.01678335952758789, 0.01700864028930664, 0.016981407165527342, 0.016906848907470705, 0.01681612777709961, 0.017027040481567383, 0.01686300849914551, 0.016851200103759765, 0.01681612777709961, 0.016738304138183592, 0.016705535888671876, 0.01738956832885742, 0.017147903442382813, 0.017077823638916016, 0.01698975944519043, 0.01712169647216797, 0.01698396873474121, 0.01690825653076172, 0.0169531192779541, 0.016826591491699218, 0.016830528259277344, 0.017127744674682616, 0.016948991775512696, 0.016841184616088866, 0.01680588722229004, 0.01679974365234375, 0.016893951416015626, 0.01682636833190918, 0.017073152542114257, 0.016902656555175782, 0.017076736450195314, 0.017651296615600585, 0.016974016189575194, 0.016832576751708985, 0.016795711517333986, 0.016959583282470703, 0.017210752487182616, 0.01776473617553711, 0.01699660873413086, 0.016945152282714843, 0.0168407039642334, 0.018430559158325196, 0.0183110408782959, 0.017455232620239257, 0.016942848205566408, 0.017125631332397463, 0.017031328201293945, 0.017835872650146484, 0.016857088088989256, 0.01737932777404785, 0.01706598472595215, 0.016832000732421876, 0.01668070411682129, 0.01708665657043457, 0.017023712158203124, 0.017157983779907227, 0.016951295852661134, 0.01669059181213379, 0.016921087265014647, 0.01686332893371582, 0.016715776443481444, 0.016725536346435546, 0.016894432067871095, 0.016875328063964842, 0.017042816162109373, 0.01701968002319336, 0.01682579231262207, 0.017637887954711915, 0.016781408309936522, 0.016815872192382814, 0.016896352767944336, 0.017034719467163086, 0.016662847518920897, 0.016908416748046873, 0.017126432418823244, 0.01703183937072754, 0.017060096740722657, 0.016948511123657226, 0.016827167510986327, 0.016855039596557618, 0.0166495361328125, 0.01683737564086914, 0.0169769287109375, 0.016907167434692384, 0.016699392318725585, 0.016893951416015626, 0.016860736846923827, 0.016871871948242186, 0.01701273536682129, 0.016928768157958983, 0.016997888565063478, 0.016957759857177734, 0.01680179214477539, 0.017274816513061522, 0.017395967483520507, 0.017278976440429687, 0.0169881591796875, 0.017075584411621093, 0.016921215057373047, 0.016873472213745116, 0.016914432525634765, 0.01700022315979004, 0.017090272903442384, 0.016957792282104492, 0.017672191619873046, 0.018103296279907227, 0.017461759567260742, 0.017084928512573243, 0.016990207672119142, 0.016701440811157226, 0.016728000640869142, 0.01665439987182617, 0.01721507263183594, 0.017082784652709963, 0.01701251220703125, 0.016918752670288088, 0.016899999618530274, 0.016806175231933593, 0.016821855545043944, 0.016856672286987305, 0.016799840927124023, 0.016779808044433592, 0.017005760192871092, 0.01690707206726074, 0.01688150405883789, 0.01705999946594238, 0.016885759353637696, 0.016846847534179688, 0.01705504035949707, 0.01702364730834961, 0.017399839401245117, 0.01705369567871094, 0.017121280670166016, 0.017110591888427736, 0.017119680404663086, 0.01689571189880371, 0.01692086410522461, 0.019200000762939453, 0.01750822448730469, 0.017000127792358398, 0.01704185676574707, 0.01693657684326172, 0.01710323143005371, 0.016979232788085937, 0.01716092872619629, 0.017143808364868163, 0.016947200775146484, 0.016907327651977538, 0.016870336532592775, 0.016869375228881836, 0.01680780792236328, 0.017438848495483397, 0.017147903442382813, 0.017138816833496093, 0.017052543640136718, 0.016905567169189454, 0.01685161590576172, 0.016787296295166017, 0.01693302345275879, 0.016936063766479492, 0.017022911071777343, 0.01776313591003418, 0.017092607498168946, 0.017194303512573242, 0.017003328323364257, 0.017170143127441407, 0.01795305633544922, 0.017828832626342772, 0.01779097557067871, 0.017250303268432618, 0.017133567810058595, 0.017377279281616212, 0.01794047927856445, 0.017143327713012694, 0.016941535949707032, 0.017069503784179686, 0.016873023986816407, 0.01691267204284668, 0.016980703353881837, 0.017176576614379883, 0.016855039596557618, 0.016910335540771485, 0.01702911949157715, 0.016957439422607423, 0.016883295059204103, 0.016850719451904295, 0.016949888229370116, 0.01703856086730957, 0.016846879959106446, 0.017071968078613282, 0.017210271835327147, 0.017307647705078123, 0.017122880935668945, 0.017102848052978514, 0.01718726348876953, 0.01687318420410156, 0.016906496047973632, 0.016981504440307618, 0.017201663970947266, 0.01708598327636719, 0.01691257667541504, 0.017131839752197266, 0.017104000091552735, 0.0170578556060791, 0.016902080535888674, 0.016995199203491212, 0.017303552627563477, 0.01696767997741699, 0.016979936599731446, 0.01708812713623047, 0.017222047805786133, 0.017166080474853514, 0.017182592391967774, 0.01702911949157715, 0.0169781436920166, 0.016879167556762695, 0.016869951248168945, 0.016805728912353515, 0.017049472808837892, 0.0174902400970459, 0.017811264038085937, 0.01711123275756836, 0.017096607208251954, 0.016934112548828126, 0.01709129524230957, 0.01696169662475586, 0.017067743301391602, 0.01700182342529297, 0.0169051513671875, 0.016866687774658204, 0.019163232803344726, 0.017799072265625, 0.017244319915771484, 0.01669728088378906, 0.016480255126953124, 0.016453632354736326, 0.016504703521728517, 0.016608896255493163, 0.01659942436218262, 0.016492671966552734, 0.01639219284057617, 0.01640652847290039, 0.016430368423461916, 0.01635606384277344, 0.01654374313354492, 0.01642239952087402, 0.016810207366943358, 0.016607168197631837, 0.01649660873413086, 0.01666249656677246, 0.01671824073791504, 0.016508352279663085, 0.016453407287597657, 0.016501535415649415, 0.016571744918823243, 0.016437631607055664, 0.01643721580505371, 0.016569664001464843, 0.016703968048095704, 0.016475744247436523, 0.016669631958007813, 0.01789952087402344, 0.017153247833251953, 0.01656015968322754, 0.016595712661743166, 0.01685443115234375, 0.01656278419494629, 0.016500736236572267, 0.01645676803588867, 0.01632352066040039, 0.016465919494628906, 0.016359199523925783, 0.016533727645874022, 0.01643929672241211, 0.01642073631286621, 0.01641484832763672, 0.016627712249755858, 0.01636966323852539, 0.016529407501220703, 0.016582656860351562, 0.016480255126953124, 0.016440959930419923, 0.016474496841430663, 0.016459775924682618, 0.017380447387695314, 0.01656515121459961, 0.016506879806518555, 0.01656399917602539, 0.01673561668395996, 0.016615776062011717, 0.016707071304321287, 0.016671743392944336, 0.01683612823486328, 0.01841004753112793, 0.01769808006286621, 0.01730838394165039, 0.0168222713470459, 0.016555200576782225, 0.016509759902954103, 0.016529375076293946, 0.01652720069885254, 0.01664224052429199, 0.01664156723022461, 0.016677343368530273, 0.0165928955078125, 0.016664575576782227, 0.016664575576782227, 0.016669824600219728, 0.01663680076599121, 0.016576351165771483, 0.01648361587524414, 0.016503488540649414, 0.01645136070251465, 0.016394655227661134, 0.01641164779663086, 0.01640732765197754, 0.016422719955444337, 0.016480672836303712, 0.016465919494628906, 0.01643110466003418, 0.016521215438842773, 0.01659859275817871, 0.016984031677246093, 0.016822399139404295, 0.01678505516052246, 0.01688137626647949, 0.01657881546020508, 0.016454368591308593, 0.016434431076049805, 0.016530176162719727, 0.0167587833404541, 0.016738304138183592, 0.01683456039428711, 0.01681612777709961, 0.01696870422363281, 0.01664102363586426, 0.016781312942504883, 0.016711679458618164, 0.016639999389648438, 0.016711679458618164, 0.016736255645751954, 0.016739936828613283, 0.01667487907409668, 0.01659529685974121, 0.016616800308227538, 0.016704160690307616, 0.016687103271484375, 0.016756479263305663, 0.01671603202819824, 0.01658576011657715, 0.016720863342285158, 0.016689151763916017, 0.016685056686401366, 0.016694911956787108, 0.0166808967590332, 0.01665273666381836]",tokens/s,59.11739757546015,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 22199 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 718, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 176.12 MiB is free. Process 46618 has 14.57 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 270, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 69795 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 905.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 234.12 MiB is free. Process 167175 has 14.51 GiB memory in use. Of the allocated memory 14.39 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 362.12 MiB is free. Process 173299 has 14.38 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 1.78 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,887.07072,14333.902848,0.0,13931.380736,13915.964416,s,1,7.47439208984375,7.47439208984375,0.0,7.47439208984375,7.47439208984375,7.47439208984375,7.47439208984375,[7.47439208984375],,kWh,9.213675799977258e-06,9.982020764318627e-07,4.800281617978186e-06,1.5012159494387307e-05,,MB,1306.001408,14751.236096,0.0,14336.131072,14287.960064,s,10,1.7811404113769531,0.1781140411376953,0.004392679254174259,0.17825081634521484,0.18225464630126953,0.18226930618286133,0.18228103408813476,"[0.16668876647949218, 0.1769303741455078, 0.17579714965820312, 0.17809609985351563, 0.1779764404296875, 0.17840553283691407, 0.1822513885498047, 0.18147386169433594, 0.18123683166503907, 0.18228396606445313]",tokens/s,1437.281408949074,kWh,5.1865786588441335e-06,5.719818876152198e-07,3.4650856180703925e-06,9.223646164529745e-06,tokens/kWh,27754750.71718038,MB,1325.973504,14919.008256,0.0,14503.903232,14465.567232,s,10,37.88452563476563,3.7884525634765622,0.003749050164809423,3.78772216796875,3.793669409179688,3.794905261230469,3.795893942871094,"[3.7848916015625, 3.786021240234375, 3.790031494140625, 3.793394775390625, 3.79614111328125, 3.784463134765625, 3.786521240234375, 3.788923095703125, 3.789542236328125, 3.784595703125]",tokens/s,16.629481020130967,kWh,0.00011046599971115425,1.2184027577641802e-05,7.335236472472724e-05,0.00019600239201352326,tokens/kWh,321424.6487137427,,s,630,37.8802965888977,0.06012745490301223,0.0005053489444007644,0.06004763221740723,0.060469866943359374,0.06063509502410889,0.06319956707000733,"[0.06288902282714844, 0.06079641723632812, 0.059953601837158206, 0.05990361785888672, 0.05964633560180664, 0.0596049919128418, 0.059568126678466796, 0.05960294342041016, 0.05975859069824219, 0.05984441757202148, 0.0596104621887207, 0.05974483108520508, 0.05978857421875, 0.0597391357421875, 0.059719680786132816, 0.060046527862548826, 0.06038198471069336, 0.06023171234130859, 0.06013897705078125, 0.05977552032470703, 0.05974595260620117, 0.059694591522216796, 0.05969801712036133, 0.05967769622802734, 0.059753471374511716, 0.05982534408569336, 0.05988150405883789, 0.05979324722290039, 0.05992534255981445, 0.05978736114501953, 0.0598466567993164, 0.059908096313476565, 0.060319744110107425, 0.060360576629638674, 0.06029702377319336, 0.06024544143676758, 0.060074817657470705, 0.06012729644775391, 0.060203006744384766, 0.06001641464233398, 0.06002617645263672, 0.060044158935546876, 0.0600863037109375, 0.06014915084838867, 0.06008278274536133, 0.060055423736572265, 0.060057697296142576, 0.060184608459472655, 0.06064892959594727, 0.060304065704345704, 0.06072713470458985, 0.06034841537475586, 0.06018252944946289, 0.06009619140625, 0.06011116790771484, 0.06013337707519531, 0.06010060882568359, 0.060061214447021484, 0.06014729690551758, 0.060131649017333984, 0.06021526336669922, 0.06034438323974609, 0.060317344665527343, 0.06321401596069336, 0.06097273635864258, 0.060058048248291016, 0.06001369476318359, 0.05977177429199219, 0.05979036712646484, 0.0596448974609375, 0.05967462539672851, 0.059895008087158204, 0.05967747116088867, 0.05959167861938477, 0.05974937438964844, 0.05977088165283203, 0.059684864044189455, 0.05969715118408203, 0.06007401657104492, 0.06050608062744141, 0.06054915237426758, 0.0604521598815918, 0.06022383880615234, 0.05987964630126953, 0.05974639892578125, 0.059862239837646485, 0.05973276901245117, 0.05981788635253906, 0.05978326416015625, 0.05991187286376953, 0.0598961296081543, 0.05991219329833984, 0.05985686492919922, 0.06001804733276367, 0.06016057586669922, 0.06039971160888672, 0.06044435119628906, 0.060444992065429685, 0.060467201232910155, 0.06032902526855469, 0.06027155303955078, 0.06003507232666016, 0.06005759811401367, 0.059856319427490236, 0.0598144645690918, 0.05988761520385742, 0.059799552917480465, 0.0598098258972168, 0.05991011047363281, 0.06004940795898438, 0.06011897659301758, 0.0602174072265625, 0.060218910217285156, 0.06037142562866211, 0.06026652908325195, 0.060270111083984376, 0.06024975967407226, 0.06007068634033203, 0.06007561492919922, 0.06003139114379883, 0.06017011260986328, 0.06007411193847656, 0.06007398223876953, 0.06005145645141602, 0.060026687622070314, 0.060127006530761716, 0.06373990249633789, 0.061265918731689455, 0.060350334167480466, 0.060080127716064455, 0.05998982238769531, 0.05972124862670898, 0.059658367156982424, 0.059869857788085935, 0.05997884750366211, 0.05980182266235352, 0.05983712005615234, 0.05989587020874024, 0.05989779281616211, 0.0599101448059082, 0.05995884704589844, 0.06015020751953125, 0.06038937759399414, 0.06055936050415039, 0.060335391998291014, 0.060111328125, 0.0599854736328125, 0.05972447967529297, 0.059701248168945314, 0.05981184005737305, 0.05987039947509765, 0.059941310882568356, 0.059799678802490236, 0.059955455780029296, 0.059834369659423826, 0.0598397102355957, 0.06000035095214844, 0.060058303833007816, 0.060305023193359376, 0.0602218246459961, 0.06030473709106445, 0.060263072967529294, 0.06022348785400391, 0.06013859176635742, 0.060091297149658204, 0.059977535247802735, 0.06002249526977539, 0.05999052810668945, 0.059934688568115235, 0.059940608978271484, 0.060041534423828126, 0.0599879035949707, 0.060096126556396484, 0.060160385131835935, 0.06019686508178711, 0.06043190383911133, 0.060256542205810545, 0.060297409057617185, 0.06032137680053711, 0.060345951080322265, 0.060318527221679685, 0.06023360061645508, 0.060237567901611326, 0.060338497161865234, 0.06037846374511719, 0.060056385040283204, 0.06012508773803711, 0.06009036636352539, 0.060157470703125, 0.0629376335144043, 0.06091158294677734, 0.06019046401977539, 0.06005785751342774, 0.05976473617553711, 0.05977088165283203, 0.05982003021240234, 0.05976992034912109, 0.05985756683349609, 0.05988364791870117, 0.06009667205810547, 0.05982617568969727, 0.059858943939208986, 0.060028865814208986, 0.05991635131835937, 0.06028656005859375, 0.06063145446777344, 0.060655521392822265, 0.06044681549072266, 0.060257408142089845, 0.06008102416992187, 0.060022785186767576, 0.05994908905029297, 0.059828193664550784, 0.0599818229675293, 0.06002188873291016, 0.06014041519165039, 0.060014591217041016, 0.05992819213867188, 0.06000268936157226, 0.0600167350769043, 0.06018652725219727, 0.060429790496826175, 0.060570144653320314, 0.060587615966796876, 0.06046966552734375, 0.06034636688232422, 0.06016793441772461, 0.0601616325378418, 0.060042049407958986, 0.06008208084106445, 0.05998089599609375, 0.05997795104980469, 0.0598771858215332, 0.06011577606201172, 0.06015536117553711, 0.060221183776855466, 0.060388126373291016, 0.060300960540771484, 0.06054742431640625, 0.06062815856933594, 0.060642112731933595, 0.06042803192138672, 0.06028908920288086, 0.06026422500610352, 0.060117408752441405, 0.06023372650146484, 0.06027468872070312, 0.060131423950195315, 0.06012102508544922, 0.06017020797729492, 0.06005145645141602, 0.06004435348510742, 0.06334896087646484, 0.06135452651977539, 0.060217151641845705, 0.06007129669189453, 0.05987212753295899, 0.059840255737304685, 0.059805694580078124, 0.05974153518676758, 0.05985756683349609, 0.059924095153808594, 0.05989807891845703, 0.05984067153930664, 0.05990534210205078, 0.05996310424804688, 0.059990718841552736, 0.06011523056030273, 0.060679454803466794, 0.06068912124633789, 0.06052403259277344, 0.06033369445800781, 0.060164192199707034, 0.059912479400634766, 0.059877216339111326, 0.05989648056030274, 0.05991561508178711, 0.05990265655517578, 0.06002479934692383, 0.05990329742431641, 0.06002975845336914, 0.0600013427734375, 0.06020943832397461, 0.0600970573425293, 0.060524543762207034, 0.060633087158203126, 0.060590080261230465, 0.060581535339355466, 0.060408321380615235, 0.06039468765258789, 0.06031126403808594, 0.06025484848022461, 0.06014086532592773, 0.05996441650390625, 0.060045310974121094, 0.06007807922363281, 0.06020819091796875, 0.060119998931884765, 0.06029312133789062, 0.06036883163452148, 0.060362815856933594, 0.06057305526733398, 0.06054771041870117, 0.060672000885009764, 0.06058393478393555, 0.060440513610839845, 0.06033638381958008, 0.06018393707275391, 0.06009900665283203, 0.060047359466552735, 0.06003308868408203, 0.06006982421875, 0.06031919860839844, 0.060276382446289065, 0.060279582977294924, 0.06326403045654297, 0.06108176040649414, 0.0602446403503418, 0.059945152282714846, 0.05969712066650391, 0.05972787094116211, 0.059619327545166016, 0.059560192108154296, 0.0596577262878418, 0.05969535827636719, 0.05972572708129883, 0.05973961639404297, 0.05987100982666015, 0.05988854217529297, 0.06002473449707031, 0.06028700637817383, 0.060715007781982425, 0.060872703552246096, 0.060289215087890625, 0.059854656219482424, 0.059828159332275394, 0.05987129592895508, 0.05979974365234375, 0.05972671890258789, 0.05993084716796875, 0.059947742462158206, 0.05996134567260742, 0.05985449600219726, 0.059838497161865234, 0.05980716705322266, 0.059907135009765626, 0.060119903564453125, 0.060365791320800784, 0.060469249725341796, 0.06045084762573242, 0.06030124664306641, 0.060034561157226565, 0.05977763366699219, 0.059864574432373044, 0.059721534729003906, 0.05982672119140625, 0.059754592895507816, 0.059752193450927735, 0.05974828720092774, 0.05989328002929688, 0.05986383819580078, 0.05993878555297852, 0.060006401062011716, 0.060208927154541014, 0.060286624908447266, 0.0602957763671875, 0.06065331268310547, 0.060286624908447266, 0.060152416229248044, 0.06005759811401367, 0.059968990325927736, 0.06016400146484375, 0.06001318359375, 0.05997568130493164, 0.06000624084472656, 0.059977664947509765, 0.05997795104980469, 0.05996953582763672, 0.06339379119873047, 0.06111135864257813, 0.060154815673828126, 0.05997158432006836, 0.05990195083618164, 0.059641857147216794, 0.059650047302246094, 0.05968809509277344, 0.059790176391601564, 0.059650047302246094, 0.05960713577270508, 0.059734977722167966, 0.059864032745361326, 0.05972342300415039, 0.05975462341308594, 0.06015558242797851, 0.060430881500244144, 0.060453887939453124, 0.06035779190063477, 0.06001651382446289, 0.05999763107299805, 0.059845024108886716, 0.059758720397949217, 0.05971926498413086, 0.059705760955810545, 0.05980976104736328, 0.05978953552246094, 0.059789119720458986, 0.059885150909423826, 0.05984912109375, 0.06002435302734375, 0.060174880981445314, 0.060418014526367185, 0.06047292709350586, 0.0604716796875, 0.060440574645996094, 0.060313697814941405, 0.060061119079589845, 0.05993695831298828, 0.05988560104370117, 0.0599288330078125, 0.059864734649658205, 0.059867488861083985, 0.05997273635864258, 0.0600052490234375, 0.06005964660644531, 0.060083232879638675, 0.060194816589355465, 0.060310497283935546, 0.060307392120361326, 0.06039878463745117, 0.060531585693359376, 0.06043033599853516, 0.060286369323730465, 0.06007049560546875, 0.0599469108581543, 0.06004336166381836, 0.05995727920532227, 0.06003244781494141, 0.06012988662719727, 0.060100543975830076, 0.060045310974121094, 0.060170238494873046, 0.06316419219970704, 0.060982177734375, 0.0602184944152832, 0.059963905334472656, 0.059801025390625, 0.05981836700439453, 0.05986908721923828, 0.0597154541015625, 0.05972351837158203, 0.05981203079223633, 0.059844894409179686, 0.059867359161376955, 0.0598961296081543, 0.059846111297607425, 0.05986361694335938, 0.06016988754272461, 0.06038150405883789, 0.06063673782348633, 0.06024035263061524, 0.06009996795654297, 0.06000191879272461, 0.05970608139038086, 0.05976294326782226, 0.0597088623046875, 0.05978521728515625, 0.059817951202392576, 0.05976943969726563, 0.05979340744018555, 0.0599101448059082, 0.0598364143371582, 0.05988761520385742, 0.0602501106262207, 0.060378494262695315, 0.060447486877441406, 0.06061580657958984, 0.06034438323974609, 0.060244670867919924, 0.06015939331054688, 0.06006435012817383, 0.059948448181152345, 0.05991279983520508, 0.05995436859130859, 0.05993494415283203, 0.05997628784179688, 0.06001049423217773, 0.06022553634643555, 0.06011084747314453, 0.060442623138427735, 0.06031324768066406, 0.06033615875244141, 0.06035836791992188, 0.06048214340209961, 0.06036860656738281, 0.06027468872070312, 0.060182815551757814, 0.060252159118652344, 0.06006095886230469, 0.06004790496826172, 0.060106975555419925, 0.06014358520507813, 0.06017020797729492, 0.060061729431152344, 0.060352512359619144, 0.06375628662109376, 0.06122883224487305, 0.060248126983642576, 0.05996764755249023, 0.05972937774658203, 0.05971200180053711, 0.05960297775268555, 0.05962137603759766, 0.059701248168945314, 0.05964908981323242, 0.0596278076171875, 0.05965891265869141, 0.05970739364624023, 0.06019276809692383, 0.059617279052734375, 0.06013951873779297, 0.060524543762207034, 0.06042009735107422, 0.06023273468017578, 0.06020105743408203, 0.05983116912841797, 0.05968281555175781, 0.059734016418457034, 0.05972515106201172, 0.05987311935424805, 0.059841377258300785, 0.05987939071655273, 0.05996057510375977, 0.05987004852294922, 0.05996656036376953, 0.06011318588256836, 0.06030089569091797, 0.06046780776977539, 0.060625247955322266, 0.060526592254638675, 0.060477439880371096, 0.060260353088378904, 0.060251937866210935, 0.06003324890136719, 0.06011904144287109, 0.06006790542602539, 0.0600302734375, 0.060023231506347655, 0.05996476745605469, 0.05997654342651367, 0.06007603073120117, 0.06017228698730469, 0.060194816589355465, 0.060432384490966794, 0.06048694229125977, 0.06073417663574219, 0.06060435104370117, 0.06046112060546875, 0.06021459197998047, 0.060182369232177735, 0.06007279968261719, 0.06005702209472656, 0.06004550552368164, 0.06007846450805664, 0.060032863616943356, 0.06010476684570312, 0.06001980972290039, 0.060044288635253906, 0.06384896087646484, 0.06162166213989258, 0.060230369567871096, 0.059891712188720705, 0.05964595031738281, 0.05955583953857422, 0.05950259017944336, 0.05963161468505859, 0.05960294342041016, 0.05948825454711914, 0.059520065307617186, 0.059653057098388675, 0.0596861457824707, 0.059714305877685545, 0.05963337707519531, 0.060081790924072266, 0.06044460678100586, 0.06057033538818359, 0.060348094940185545, 0.06006950378417969, 0.059854881286621094, 0.059740192413330076, 0.059722145080566405, 0.059660511016845705, 0.059719680786132816, 0.05975043106079102, 0.05976470565795899, 0.05971148681640625, 0.05980271911621094, 0.059818912506103515, 0.05989388656616211, 0.06011068725585938, 0.06025331115722656, 0.06049270248413086, 0.06058303833007812, 0.060537376403808595, 0.06040524673461914, 0.060087135314941406, 0.0599736328125, 0.059928607940673825, 0.05987529754638672, 0.05991763305664063, 0.059953601837158206, 0.059887905120849606, 0.06008979034423828, 0.059970081329345705, 0.05992038345336914, 0.059924480438232425, 0.06015702438354492, 0.060305343627929685, 0.06030435180664063, 0.060286495208740236, 0.060260223388671874, 0.06030556869506836, 0.06034377670288086, 0.06000703811645508, 0.05993619155883789, 0.05998479843139649, 0.05995695877075195, 0.05997903823852539, 0.0601610221862793, 0.06001811218261719, 0.06017267227172852]",tokens/s,16.631337574707533,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 54.12 MiB is free. Process 75907 has 14.69 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 1.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 272, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 406.12 MiB is free. Process 78866 has 14.34 GiB memory in use. Of the allocated memory 14.23 GiB is allocated by PyTorch, and 1.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,790.188032,2266.89024,0.0,1864.368128,1714.486272,s,1,7.74055126953125,7.74055126953125,0.0,7.74055126953125,7.74055126953125,7.74055126953125,7.74055126953125,[7.74055126953125],,kWh,4.678508799967555e-06,5.086050564963081e-07,9.6333410398719e-07,6.1504479604510534e-06,,MB,1230.790656,2283.667456,0.0,1868.562432,1692.384256,s,10,0.21691036796569824,0.021691036796569824,0.0006454393369824417,0.02154745578765869,0.022083401489257814,0.022766436958312986,0.02331286533355713,"[0.023449472427368164, 0.021693920135498045, 0.021931615829467774, 0.021215776443481445, 0.0211297607421875, 0.02175699234008789, 0.021276159286499022, 0.02181171226501465, 0.021243967056274415, 0.021400991439819335]",tokens/s,11802.109894557152,kWh,7.562938229599305e-07,8.340566908037451e-08,4.998442398290078e-07,1.339543731869313e-06,tokens/kWh,191109848.7563044,MB,1258.094592,2432.565248,0.0,2017.460224,1714.451968,s,10,12.328065429687502,1.23280654296875,0.010431314273765363,1.2300000610351562,1.2461649169921876,1.25026171875,1.25353916015625,"[1.2452545166015625, 1.2413148193359376, 1.2543585205078125, 1.227635498046875, 1.2298369140625, 1.2194002685546874, 1.23022021484375, 1.229609130859375, 1.2202723388671874, 1.2301632080078124]",tokens/s,51.10290852958019,kWh,3.563326932871138e-05,3.929892122966499e-06,1.9867616498570115e-05,5.943077795024799e-05,tokens/kWh,1060056.7950286628,,s,630,12.320317493438715,0.019556059513394796,0.0004690303623632822,0.019499072074890135,0.019871692276000975,0.020071441078186033,0.022063179225921638,"[0.019394752502441406, 0.019859615325927733, 0.01990345573425293, 0.020123615264892578, 0.01977244758605957, 0.019652063369750977, 0.019624351501464844, 0.019646240234375, 0.019601472854614256, 0.019763071060180663, 0.020265279769897462, 0.020653247833251953, 0.01988483238220215, 0.019611648559570313, 0.019826591491699217, 0.023178335189819335, 0.01985024070739746, 0.01958502388000488, 0.019918848037719726, 0.019783679962158202, 0.019619840621948242, 0.01984921646118164, 0.019428640365600585, 0.019462560653686522, 0.01970832061767578, 0.0200263671875, 0.01972707176208496, 0.01969366455078125, 0.01969366455078125, 0.019615743637084963, 0.019833887100219726, 0.020080671310424805, 0.019662784576416015, 0.020001951217651366, 0.019595104217529295, 0.019759103775024413, 0.019744096755981447, 0.019575456619262695, 0.01971609687805176, 0.01948886489868164, 0.01979587173461914, 0.019649919509887696, 0.019246912002563475, 0.019526464462280273, 0.01957683181762695, 0.019535743713378906, 0.01941926383972168, 0.01949193572998047, 0.019252063751220704, 0.01939462471008301, 0.019459327697753905, 0.020005311965942383, 0.019630399703979493, 0.019595264434814453, 0.019697664260864257, 0.019514591217041015, 0.019660608291625976, 0.01967407989501953, 0.019501056671142578, 0.0194334716796875, 0.019427135467529298, 0.0199048957824707, 0.019875551223754884, 0.01930076789855957, 0.01958502388000488, 0.01965875244140625, 0.019808063507080077, 0.020422847747802734, 0.019777536392211914, 0.019566112518310547, 0.019390655517578126, 0.019644063949584963, 0.01966748809814453, 0.019644159317016602, 0.01961404800415039, 0.020060159683227538, 0.019800064086914062, 0.019611583709716798, 0.019697599411010742, 0.019685504913330078, 0.019550207138061524, 0.019494047164916994, 0.019472671508789063, 0.019540159225463868, 0.019947616577148438, 0.019586591720581054, 0.019575551986694337, 0.01982054328918457, 0.024190752029418946, 0.020976959228515626, 0.01942211151123047, 0.019213823318481444, 0.019353439331054687, 0.019604127883911134, 0.01944576072692871, 0.019391679763793947, 0.019467071533203126, 0.0192675838470459, 0.019628032684326172, 0.019469440460205076, 0.019420032501220704, 0.01964851188659668, 0.019646303176879883, 0.019241119384765627, 0.019320831298828126, 0.019224576950073242, 0.019540031433105468, 0.019591104507446288, 0.01983875274658203, 0.019603679656982422, 0.019703424453735352, 0.020130176544189454, 0.019587295532226563, 0.019506975173950194, 0.01949875259399414, 0.01954636764526367, 0.019509248733520508, 0.019886079788208007, 0.01971971130371094, 0.019841503143310547, 0.019562496185302734, 0.019486719131469727, 0.019564287185668945, 0.019323135375976564, 0.019462303161621095, 0.019745792388916016, 0.01930227279663086, 0.01965126419067383, 0.019591232299804688, 0.019783679962158202, 0.019650911331176756, 0.01962972831726074, 0.019761152267456054, 0.020237663269042968, 0.01972515106201172, 0.019822399139404298, 0.019595104217529295, 0.01961382484436035, 0.019703840255737303, 0.019619840621948242, 0.019761152267456054, 0.02020147132873535, 0.019906591415405274, 0.01967305564880371, 0.019728479385375978, 0.01959516716003418, 0.019746719360351564, 0.019583263397216798, 0.01964627265930176, 0.020051551818847657, 0.019755424499511717, 0.019949087142944334, 0.01966716766357422, 0.019955968856811522, 0.01964998435974121, 0.019853631973266603, 0.01983647918701172, 0.020345535278320313, 0.01999785614013672, 0.0219553279876709, 0.02064188766479492, 0.02045270347595215, 0.02027372741699219, 0.020012800216674804, 0.0198123836517334, 0.020318464279174806, 0.019630144119262696, 0.020426752090454102, 0.019934560775756834, 0.019692192077636717, 0.01996419143676758, 0.019904224395751954, 0.019740575790405272, 0.020117599487304686, 0.019709951400756837, 0.01965603256225586, 0.019364288330078125, 0.019499391555786134, 0.02219375991821289, 0.020630847930908202, 0.01972867202758789, 0.019826431274414063, 0.019453887939453126, 0.01990550422668457, 0.01962396812438965, 0.019676351547241212, 0.01950172805786133, 0.01947238349914551, 0.01987126350402832, 0.01921232032775879, 0.019438207626342772, 0.019439456939697265, 0.019554304122924804, 0.019501344680786133, 0.019773151397705076, 0.01968332862854004, 0.019369983673095705, 0.019357568740844728, 0.01938960075378418, 0.019643360137939454, 0.01985647964477539, 0.019598592758178712, 0.021550752639770507, 0.022891807556152343, 0.02058025550842285, 0.019335296630859374, 0.019200416564941408, 0.019167520523071288, 0.0192675838470459, 0.019705215454101564, 0.019825279235839845, 0.01943552017211914, 0.019574783325195313, 0.01949827194213867, 0.01937014389038086, 0.01942790412902832, 0.019513216018676758, 0.019052448272705077, 0.019322368621826173, 0.019238784790039064, 0.01918243217468262, 0.019298303604125978, 0.019420703887939452, 0.01962851142883301, 0.019326784133911132, 0.01928006362915039, 0.019277952194213868, 0.019287200927734376, 0.019345632553100588, 0.019513248443603515, 0.019523359298706053, 0.019323776245117188, 0.01938425636291504, 0.01964166450500488, 0.019288000106811524, 0.019358463287353515, 0.019002527236938477, 0.019123039245605468, 0.01908470344543457, 0.019429983139038084, 0.01927350425720215, 0.019218399047851564, 0.01879270362854004, 0.01891043281555176, 0.018866207122802733, 0.019213056564331053, 0.019427616119384764, 0.019238624572753906, 0.019325056076049806, 0.019223615646362303, 0.01957766342163086, 0.01934316825866699, 0.01885798454284668, 0.019503711700439453, 0.019591360092163085, 0.019570688247680663, 0.019554304122924804, 0.01965884780883789, 0.019437503814697266, 0.019218399047851564, 0.019687679290771483, 0.019884960174560547, 0.019252063751220704, 0.01923481559753418, 0.019492191314697267, 0.020271648406982423, 0.019760704040527342, 0.01999110412597656, 0.019628000259399415, 0.01959119987487793, 0.01951862335205078, 0.019846080780029297, 0.019382015228271484, 0.019484832763671876, 0.019725568771362306, 0.019622112274169923, 0.019579328536987305, 0.01996099281311035, 0.019677183151245118, 0.019563199996948243, 0.01943987274169922, 0.019658720016479492, 0.019438783645629884, 0.020261472702026367, 0.01945363235473633, 0.01951206398010254, 0.01945795249938965, 0.01937295913696289, 0.019206432342529296, 0.01972684860229492, 0.019103904724121094, 0.01910585594177246, 0.01920204734802246, 0.019165184020996092, 0.01946118354797363, 0.0193504638671875, 0.019449951171875, 0.019271711349487304, 0.019293344497680665, 0.01934409523010254, 0.019768608093261718, 0.01950931167602539, 0.019567264556884765, 0.019411039352416993, 0.01944156837463379, 0.01965056037902832, 0.01939263916015625, 0.0194881591796875, 0.01956092834472656, 0.01945599937438965, 0.019539968490600586, 0.01961177635192871, 0.019392383575439452, 0.01919152069091797, 0.019286304473876952, 0.018965887069702148, 0.019431360244750978, 0.01936249542236328, 0.01927987289428711, 0.01922470474243164, 0.01933299255371094, 0.01936387252807617, 0.019465599060058594, 0.0196409912109375, 0.019451839447021484, 0.019300352096557616, 0.019474431991577147, 0.01931817626953125, 0.019253311157226564, 0.019317279815673827, 0.019166336059570313, 0.019411840438842774, 0.01990870475769043, 0.01952457618713379, 0.01961689567565918, 0.019551200866699217, 0.01933817672729492, 0.01923843193054199, 0.01960767936706543, 0.019161344528198242, 0.019132415771484376, 0.01961577606201172, 0.019304000854492187, 0.01925980758666992, 0.01923072052001953, 0.0191016960144043, 0.019087360382080077, 0.01903366470336914, 0.018993120193481445, 0.019681760787963867, 0.0191810245513916, 0.019018272399902343, 0.01907711982727051, 0.018907136917114258, 0.01906287956237793, 0.019418495178222656, 0.019257856369018556, 0.019288095474243164, 0.019077184677124024, 0.01910163116455078, 0.019224447250366213, 0.01919808006286621, 0.019047807693481446, 0.0190732479095459, 0.018837919235229494, 0.019322656631469728, 0.01894790458679199, 0.019169696807861326, 0.019972288131713867, 0.019519039154052734, 0.01922915267944336, 0.019224096298217773, 0.01930179214477539, 0.01936636734008789, 0.02230847930908203, 0.019333568572998047, 0.019407167434692382, 0.01963007926940918, 0.01892953681945801, 0.019128448486328126, 0.01904844856262207, 0.019267295837402342, 0.01936630439758301, 0.01955401611328125, 0.019494335174560548, 0.020488927841186524, 0.019579904556274414, 0.01942835235595703, 0.01940656089782715, 0.01954025650024414, 0.019523647308349608, 0.019478431701660158, 0.019822751998901367, 0.019789695739746094, 0.019661888122558594, 0.019446720123291017, 0.019488895416259765, 0.01918979263305664, 0.01930633544921875, 0.019167327880859376, 0.01950262451171875, 0.019189184188842773, 0.01925654411315918, 0.019180479049682616, 0.019351808547973633, 0.019450016021728515, 0.019500511169433594, 0.0197860164642334, 0.01951603126525879, 0.019376127243041993, 0.019462112426757813, 0.019447551727294923, 0.01975529670715332, 0.019408895492553712, 0.019731552124023437, 0.01957539176940918, 0.01941689682006836, 0.019433984756469725, 0.019332639694213866, 0.019641088485717773, 0.019805919647216796, 0.019951616287231445, 0.019757055282592775, 0.01962598419189453, 0.019693216323852538, 0.019369600296020507, 0.019608095169067384, 0.019374271392822266, 0.01981439971923828, 0.01950284767150879, 0.0194192008972168, 0.019493024826049806, 0.01945523262023926, 0.019454463958740235, 0.019816864013671876, 0.019910816192626954, 0.019724063873291016, 0.019578880310058593, 0.019492799758911133, 0.0196177921295166, 0.019513151168823243, 0.019163135528564454, 0.01956617546081543, 0.019423967361450196, 0.019414880752563476, 0.019418464660644532, 0.019268096923828124, 0.019718143463134767, 0.01957683181762695, 0.01947216033935547, 0.019527488708496094, 0.01945008087158203, 0.019462015151977537, 0.01952115249633789, 0.019692352294921875, 0.019582847595214843, 0.01941094398498535, 0.01927587127685547, 0.01934489631652832, 0.019294624328613282, 0.019765024185180665, 0.01925142478942871, 0.019191808700561523, 0.019105791091918945, 0.01919548797607422, 0.019218847274780272, 0.019717472076416016, 0.019552640914916992, 0.019456287384033204, 0.019735904693603517, 0.019446432113647463, 0.019357183456420898, 0.019607967376708984, 0.019396480560302735, 0.01927190399169922, 0.019449535369873046, 0.019597631454467773, 0.019619104385375976, 0.01977622413635254, 0.01945919990539551, 0.019421152114868164, 0.019476896286010743, 0.019638784408569337, 0.01949286460876465, 0.01971609687805176, 0.019383712768554686, 0.019898975372314453, 0.01933625602722168, 0.019413951873779298, 0.019486751556396485, 0.02210723114013672, 0.01950979232788086, 0.019302047729492188, 0.01960006332397461, 0.019503231048583983, 0.019410816192626953, 0.01970809555053711, 0.019328832626342773, 0.019283967971801756, 0.01940399932861328, 0.019180320739746095, 0.019533567428588867, 0.019458303451538084, 0.019449855804443358, 0.018933759689331055, 0.01940275192260742, 0.019519680023193358, 0.01935545539855957, 0.019568288803100586, 0.019783519744873048, 0.019204063415527342, 0.019208351135253907, 0.01926355171203613, 0.01946860885620117, 0.019780927658081055, 0.019319488525390626, 0.019304128646850587, 0.019370304107666016, 0.019535903930664063, 0.019759296417236328, 0.01944704055786133, 0.019621952056884766, 0.019361536026000978, 0.01916387176513672, 0.019290111541748048, 0.019186847686767577, 0.018932575225830077, 0.018763776779174804, 0.018920671463012694, 0.01958086395263672, 0.019198047637939454, 0.019056735992431642, 0.019251392364501952, 0.01935408020019531, 0.01963827133178711, 0.019853311538696287, 0.019568639755249022, 0.019494943618774414, 0.019268672943115236, 0.01962396812438965, 0.019282272338867187, 0.018948640823364258, 0.01902079963684082, 0.01888768005371094, 0.01923686408996582, 0.019306495666503908, 0.019121952056884765, 0.019113983154296875, 0.019099359512329103, 0.019673599243164062, 0.01930006408691406, 0.019498527526855467, 0.01950182342529297, 0.01939455986022949, 0.01981001663208008, 0.019506912231445312, 0.01947500801086426, 0.019503103256225587, 0.01947238349914551, 0.019793920516967774, 0.019333120346069335, 0.019336223602294922, 0.019401695251464842, 0.019392511367797852, 0.0192325439453125, 0.01936787223815918, 0.019170656204223632, 0.01887887954711914, 0.019328927993774413, 0.019368255615234375, 0.019496639251708983, 0.019552352905273438, 0.019395904541015627, 0.019456703186035155, 0.019750911712646483, 0.019527679443359376, 0.01942937660217285, 0.019478527069091797, 0.01950480079650879, 0.019562847137451173, 0.019721248626708984, 0.01947750473022461, 0.019936288833618164, 0.019284927368164062, 0.019248479843139647, 0.019622560501098632, 0.01956038475036621, 0.019807424545288086, 0.019578752517700197, 0.01956092834472656, 0.020107616424560548, 0.02213104057312012, 0.02078704071044922, 0.01927484893798828, 0.019346656799316405, 0.0192708797454834, 0.01951318359375, 0.019528575897216797, 0.020041471481323243, 0.019482816696166992, 0.0195479679107666, 0.019539167404174804, 0.019325727462768554, 0.019464000701904297, 0.019622079849243163, 0.019795167922973634, 0.019656511306762697, 0.01954915237426758, 0.01945599937438965, 0.019461952209472656, 0.0194071044921875, 0.019285951614379883, 0.0193055362701416, 0.019615840911865235, 0.019507232666015624, 0.019364671707153322, 0.01898624038696289, 0.01861417579650879, 0.018652000427246095, 0.01906483268737793, 0.01928105545043945, 0.019360160827636717, 0.01945427131652832, 0.01931865692138672, 0.019461568832397462, 0.019297344207763672, 0.01933695983886719, 0.019498207092285155, 0.01961859130859375, 0.019525632858276368]",tokens/s,51.13504585701718,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 344.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 66.12 MiB is free. Process 159512 has 14.67 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 1.71 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 354, in __init__ self.v_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 156706 has 14.74 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 90.39 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 353, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 164320 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,899.354624,15848.046592,0.0,15445.52448,15444.76416,s,1,7.68470556640625,7.68470556640625,0.0,7.68470556640625,7.68470556640625,7.68470556640625,7.68470556640625,[7.68470556640625],,kWh,9.175855208331995e-06,1.0044520224132192e-06,4.476948025997768e-06,1.4657255256742982e-05,,MB,1343.049728,16053.567488,0.0,15638.462464,15612.9408,s,10,1.9430299682617187,0.19430299682617186,0.0041804764787503656,0.19587385559082032,0.1972969512939453,0.1975438690185547,0.19774140319824218,"[0.18255075073242188, 0.19396981811523437, 0.1932409210205078, 0.19361410522460937, 0.19779078674316405, 0.19724208068847657, 0.19608262634277343, 0.19640054321289063, 0.19647325134277344, 0.1956650848388672]",tokens/s,1317.5298589399717,kWh,5.623931640384682e-06,6.202113478733541e-07,3.7612797184230656e-06,1.0005422706681103e-05,tokens/kWh,25586125.39468787,MB,1367.425024,16055.66464,0.0,15640.559616,15627.385344,s,10,39.65585546875,3.965585546875,0.003637779020966907,3.967002197265625,3.9684305419921873,3.969508850097656,3.970371496582031,"[3.959125732421875, 3.9593994140625, 3.9645400390625, 3.963926025390625, 3.966466064453125, 3.967900146484375, 3.96819091796875, 3.967538330078125, 3.968181640625, 3.970587158203125]",tokens/s,15.886682875785112,kWh,0.00011598700759086503,1.2793632800457899e-05,7.69337848375768e-05,0.00020571442522889974,tokens/kWh,306249.7923026035,,s,630,39.652738430023206,0.06294085465083046,0.0004902808169269491,0.06283251190185547,0.06331190643310547,0.06351374092102051,0.06597822845458985,"[0.06549504089355469, 0.06306972885131835, 0.06291865539550781, 0.06265494537353515, 0.06260227203369141, 0.06282134246826172, 0.06258483123779297, 0.06265449523925781, 0.06262355041503906, 0.06251939010620117, 0.06259721755981446, 0.06316787338256837, 0.06277180862426758, 0.06263417434692382, 0.06276220703125, 0.06317526245117187, 0.06299244689941406, 0.06283631896972657, 0.06272211074829101, 0.06254751968383788, 0.06298457717895507, 0.06290864181518555, 0.06285094451904297, 0.06259699249267578, 0.06256268692016602, 0.06249267196655273, 0.06268518447875976, 0.06269852828979493, 0.0626800308227539, 0.06292240142822265, 0.06308646392822266, 0.06308095932006835, 0.0629125747680664, 0.0629062728881836, 0.06267244720458984, 0.06268767929077149, 0.06271180725097657, 0.06269468688964844, 0.06275878524780273, 0.06268195343017578, 0.06269500732421875, 0.0626794548034668, 0.06280131149291993, 0.06274083328247071, 0.06273427200317383, 0.06290070343017579, 0.06282633590698242, 0.06307190322875976, 0.06313999938964844, 0.06296361541748047, 0.06289231872558594, 0.0628016014099121, 0.06260153579711913, 0.06267712020874024, 0.06294655990600585, 0.06259088134765625, 0.06290505599975586, 0.06272409439086914, 0.06305971145629882, 0.06278179168701171, 0.06297385787963868, 0.06279334259033204, 0.06281196975708007, 0.06600326538085938, 0.06307980728149414, 0.06252928161621094, 0.06267299270629882, 0.06258480072021484, 0.06253766250610351, 0.06254476928710938, 0.06255206298828125, 0.06249881744384766, 0.0627632942199707, 0.06258358383178711, 0.0625591697692871, 0.06249062347412109, 0.06265241622924805, 0.06320742416381836, 0.06355558395385742, 0.06362937545776368, 0.06315615844726563, 0.06271177673339844, 0.06244559860229492, 0.062481792449951175, 0.062472545623779296, 0.06245171356201172, 0.06263123321533202, 0.06261859130859375, 0.06259487915039062, 0.06253587341308593, 0.06257452774047852, 0.06251529693603515, 0.06277241516113281, 0.06314064025878906, 0.06328067016601563, 0.06346390533447266, 0.06311737442016602, 0.06298412704467773, 0.06272000122070312, 0.06252339172363282, 0.06247423934936523, 0.06254710388183594, 0.06256726455688477, 0.062488510131835935, 0.06303545761108398, 0.06276220703125, 0.06279862213134765, 0.06283257675170899, 0.06303955078125, 0.0629227523803711, 0.06314713668823242, 0.06311382293701172, 0.06339408111572266, 0.06304729461669922, 0.06291289520263672, 0.06268928146362304, 0.0626954231262207, 0.06253472137451171, 0.0625304946899414, 0.06253724670410156, 0.06254435348510742, 0.06288588714599609, 0.0629552001953125, 0.06285043334960938, 0.0629277458190918, 0.06322540664672852, 0.06605615997314453, 0.06342659378051758, 0.06296899032592773, 0.06257123184204101, 0.062646240234375, 0.06266483306884765, 0.06270732879638671, 0.06279206466674804, 0.0626536636352539, 0.06270441436767578, 0.0626688003540039, 0.06266265487670898, 0.06266191864013672, 0.06283756637573243, 0.0629389762878418, 0.06338156890869141, 0.06352896118164063, 0.06338956832885742, 0.06308467102050781, 0.06303513717651367, 0.06264239883422852, 0.06250211334228516, 0.06241974258422851, 0.06242870330810547, 0.06269347381591797, 0.06261708831787109, 0.06252022552490234, 0.06260025787353515, 0.06296467208862305, 0.06284038543701172, 0.06313113784790039, 0.06321206283569336, 0.06371577453613281, 0.06323356628417968, 0.06309318542480469, 0.06286966323852539, 0.06283244705200196, 0.0626319694519043, 0.06259507369995117, 0.06250700759887695, 0.062481792449951175, 0.06267967987060546, 0.06266854476928711, 0.06270102310180664, 0.06278224182128907, 0.06299964904785156, 0.06298044967651367, 0.06335721588134766, 0.06301929473876954, 0.06309068679809571, 0.0631022071838379, 0.06306687927246094, 0.0628070411682129, 0.06274764633178712, 0.06270771026611328, 0.06263606262207032, 0.06282236862182618, 0.06254569625854492, 0.0627339859008789, 0.06309945678710938, 0.06318899154663087, 0.06334265518188477, 0.06322310256958008, 0.06664918518066407, 0.06346809768676757, 0.06276105499267579, 0.06256006240844726, 0.06245212936401367, 0.06247423934936523, 0.06270566558837891, 0.06274457550048829, 0.06259097671508788, 0.06261072158813477, 0.06267721557617187, 0.06260179138183594, 0.06261958312988282, 0.06269337463378906, 0.06299772644042968, 0.06357788848876954, 0.0637122573852539, 0.06350207901000976, 0.06297151947021484, 0.06279641723632813, 0.0626723518371582, 0.06269302368164062, 0.06266876983642578, 0.06260521697998046, 0.06261254501342774, 0.06271177673339844, 0.06275888061523438, 0.06262982559204101, 0.06272332763671876, 0.06302396774291992, 0.06307164764404297, 0.06308678436279297, 0.06311523056030273, 0.06326675033569336, 0.06300105667114257, 0.0628059196472168, 0.06272419357299805, 0.06264217758178711, 0.06253337478637695, 0.06254959869384766, 0.06253535842895508, 0.06258787155151367, 0.06281216049194335, 0.06274867248535156, 0.06286054229736328, 0.06300310516357421, 0.06309286499023438, 0.06293929672241211, 0.06304959869384766, 0.06298432159423828, 0.06314169692993164, 0.06309273529052735, 0.06278982543945312, 0.06270156860351563, 0.06265241622924805, 0.06261350250244141, 0.06261964797973633, 0.06290838241577149, 0.0627999038696289, 0.06309225463867188, 0.06305635070800782, 0.06330572891235352, 0.06314787292480468, 0.0662548828125, 0.06353510284423829, 0.0628900146484375, 0.06255958557128906, 0.06273292922973633, 0.06267286300659179, 0.06276099014282227, 0.06270758438110352, 0.06286713409423828, 0.06289862442016601, 0.06264985656738281, 0.06263808059692383, 0.06276966476440429, 0.06283062362670898, 0.06324016189575195, 0.064, 0.0635863037109375, 0.06318460845947266, 0.06285481643676757, 0.06270425415039063, 0.06253353500366211, 0.06251238250732422, 0.0626328010559082, 0.06268108749389649, 0.06270556640625, 0.06264179229736327, 0.0627798728942871, 0.06263398361206055, 0.06270544052124023, 0.06301865768432617, 0.06305196762084961, 0.06315804672241211, 0.06351113510131835, 0.06319116973876954, 0.06291830444335937, 0.0627070083618164, 0.06265465545654297, 0.06273916625976562, 0.06273638534545899, 0.06277734375, 0.06259056091308594, 0.06277081680297851, 0.06281814575195313, 0.06262825775146484, 0.06301750564575195, 0.06301081466674804, 0.06309817504882813, 0.0630258560180664, 0.06318489456176758, 0.06328700637817383, 0.06321340942382812, 0.06291283035278321, 0.06277337646484375, 0.06271295928955078, 0.0628433609008789, 0.06340208053588867, 0.06280019378662109, 0.06267289733886719, 0.06294473648071289, 0.06294992065429687, 0.06288703918457031, 0.06292294311523437, 0.06308524703979493, 0.06655907440185548, 0.06354396820068359, 0.06269945526123047, 0.06263190460205079, 0.06279404830932617, 0.06275276947021484, 0.06277030563354492, 0.06272655868530273, 0.06297232055664062, 0.06259100723266602, 0.06262377548217773, 0.06259302520751953, 0.06250691223144532, 0.06259312057495117, 0.06342451095581055, 0.06353308868408203, 0.0633732795715332, 0.0631091194152832, 0.06281212615966797, 0.0627663345336914, 0.06255628967285157, 0.06251971054077149, 0.06284723281860352, 0.06290022277832032, 0.0627507209777832, 0.06255535888671875, 0.06269990539550781, 0.06259910583496094, 0.0626014060974121, 0.06277148818969727, 0.06301286315917969, 0.0636497917175293, 0.06353715133666993, 0.0631308479309082, 0.06300547027587891, 0.0629893455505371, 0.06270153427124023, 0.06271488189697266, 0.06269478225708008, 0.06286809539794921, 0.06286544036865234, 0.06290444946289063, 0.06284092712402344, 0.0628856315612793, 0.06291206359863281, 0.06297190475463867, 0.06314633560180664, 0.06341641616821289, 0.06351587295532227, 0.06335308837890626, 0.06306185531616211, 0.06305977630615234, 0.06286016082763672, 0.06271699142456055, 0.06279033660888672, 0.06270083236694336, 0.0629708480834961, 0.06297103881835937, 0.06298297500610352, 0.06292633438110351, 0.06317110443115234, 0.06300467300415039, 0.06306777572631836, 0.0658222427368164, 0.06312787246704102, 0.06280969619750977, 0.06288777542114257, 0.06286000061035156, 0.06274662399291993, 0.06279593658447266, 0.06280176162719726, 0.06273638534545899, 0.06275686264038086, 0.0626396484375, 0.06276348876953125, 0.06276454544067382, 0.06310758590698243, 0.06337484741210937, 0.06342700958251953, 0.06328643035888672, 0.06317558288574218, 0.06285673522949219, 0.06274035263061524, 0.06302576065063477, 0.0629678077697754, 0.06278758239746093, 0.06261094284057617, 0.0625873908996582, 0.0629227523803711, 0.0627201271057129, 0.06257475280761719, 0.06284441757202149, 0.06327036666870117, 0.06338147354125977, 0.06328803253173829, 0.06349625778198242, 0.06318304061889649, 0.06286441421508789, 0.06280233764648438, 0.06273881530761719, 0.06300384140014649, 0.06284550476074219, 0.06272192001342773, 0.06269276809692383, 0.06286419296264649, 0.0626607666015625, 0.06274601745605468, 0.06275132751464843, 0.06311731338500977, 0.06325247955322266, 0.06323926544189454, 0.06317561721801758, 0.06337123107910156, 0.0631091194152832, 0.06286896133422852, 0.0627553596496582, 0.06296160125732422, 0.06312972640991212, 0.06298108673095704, 0.06290486526489258, 0.06276051330566407, 0.06272063827514648, 0.0627196159362793, 0.06284684753417968, 0.06291532897949219, 0.06319884872436524, 0.06591693115234375, 0.06332758331298828, 0.06286790466308594, 0.06269721603393555, 0.06257660675048828, 0.06268566513061523, 0.06272966384887696, 0.06272623825073242, 0.06277907180786133, 0.06275468826293945, 0.0628581428527832, 0.06267820739746094, 0.06270428848266602, 0.06276454544067382, 0.06318112182617187, 0.0636317138671875, 0.06329958343505859, 0.06320742416381836, 0.06326220703125, 0.06324684906005859, 0.06267004776000977, 0.06257545471191406, 0.06275206375122071, 0.06253222274780273, 0.06262169647216796, 0.06261452865600586, 0.06293104171752929, 0.06279852676391602, 0.06275455856323242, 0.06308323287963867, 0.06331161499023437, 0.06327865600585937, 0.06311980819702148, 0.06298214340209961, 0.06344249725341797, 0.06318889617919922, 0.06288409423828124, 0.06304169464111328, 0.06262556838989258, 0.06268460845947266, 0.06267587280273437, 0.0626209602355957, 0.06280998229980468, 0.06283964920043945, 0.06286716842651367, 0.06313148880004883, 0.06333193588256836, 0.06314889526367187, 0.06312960052490234, 0.06305791854858399, 0.06297359848022462, 0.06318729782104492, 0.06301692962646484, 0.06282857513427734, 0.06283059310913086, 0.06284288024902343, 0.06294630432128906, 0.06268214416503906, 0.06257660675048828, 0.0628039665222168, 0.06296537780761718, 0.06300038528442382, 0.06317715072631835, 0.06641123199462891, 0.06342819213867187, 0.0629268798828125, 0.06257702255249023, 0.06263398361206055, 0.06263203048706055, 0.06294927978515626, 0.06282825469970703, 0.06273052978515625, 0.06277228927612305, 0.06270252990722656, 0.06261145782470703, 0.06278758239746093, 0.06283468627929688, 0.06317839813232422, 0.06391843032836914, 0.06371091079711914, 0.06330153656005859, 0.06303376007080078, 0.06257238388061523, 0.0626525764465332, 0.06250905609130859, 0.06253567886352539, 0.0627300796508789, 0.06270540618896485, 0.06265283203125, 0.06278656005859375, 0.06269440078735351, 0.06263555145263672, 0.06302972793579102, 0.06315008163452149, 0.0635596809387207, 0.06360883331298828, 0.0635260467529297, 0.06319913482666016, 0.06295161437988281, 0.06262233734130859, 0.06272627258300781, 0.06259487915039062, 0.06272556686401368, 0.06281676864624024, 0.06281654357910156, 0.06288790512084962, 0.0627319679260254, 0.06291263961791992, 0.06305814361572265, 0.06308451080322265, 0.06312960052490234, 0.06377401733398437, 0.06347436904907226, 0.06309478378295899, 0.06286131286621094, 0.0628408317565918, 0.06268422317504883, 0.06266361618041992, 0.06281398391723633, 0.06282006454467773, 0.06279951858520508, 0.06277606582641601, 0.06283683013916015, 0.06303529739379883, 0.06285043334960938, 0.06295542526245117, 0.06611289978027343, 0.06331452941894532, 0.06274457550048829, 0.06271702575683594, 0.06271683120727539, 0.0629043197631836, 0.06281795120239257, 0.06285696029663086, 0.06284963226318359, 0.06290227127075196, 0.06259302520751953, 0.06281558227539062, 0.06284531021118164, 0.0627182388305664, 0.06332188796997071, 0.06371964645385743, 0.06333171081542968, 0.06317094421386718, 0.06295491027832031, 0.06298710250854492, 0.06308044815063477, 0.06297769546508789, 0.062880126953125, 0.06278960037231446, 0.06270300674438477, 0.06277180862426758, 0.06278303909301758, 0.06297209548950196, 0.06293529510498047, 0.06314179229736328, 0.06336249542236329, 0.06339344024658203, 0.06311628723144531, 0.0629227523803711, 0.0628235206604004, 0.06325065612792968, 0.06307267379760742, 0.06291279983520508, 0.06281379318237304, 0.06275113677978515, 0.06269952011108398, 0.06274259185791016, 0.06320326232910156, 0.06307833480834961, 0.06298015975952148, 0.0633733139038086, 0.0631066551208496, 0.0630665283203125, 0.06307420730590821, 0.06285740661621093, 0.06295132827758788, 0.06313119888305664, 0.06309859085083008, 0.06294988632202149, 0.06278371047973633, 0.06271590423583985, 0.06278691101074219, 0.06262860870361328, 0.0626789436340332, 0.06282364654541016, 0.06314064025878906, 0.06329964828491211, 0.0632503662109375]",tokens/s,15.887931702668826,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,888.553472,7981.62944,0.0,7579.107328,7559.873536,s,1,7.6767763671875,7.6767763671875,0.0,7.6767763671875,7.6767763671875,7.6767763671875,7.6767763671875,[7.6767763671875],,kWh,6.3549673250463455e-06,6.932313320328843e-07,2.49611310798592e-06,9.54431176506515e-06,,MB,1308.667904,8235.384832,0.0,7820.279808,7764.228096,s,10,0.8190033226013184,0.08190033226013185,0.007355595993207662,0.08396497344970702,0.08681038665771484,0.0871225357055664,0.08737225494384765,"[0.08203327941894531, 0.08493315124511719, 0.08223334503173828, 0.08512694549560547, 0.08490636444091797, 0.0820465316772461, 0.08674102020263671, 0.08302358245849609, 0.08743468475341797, 0.06052441787719726]",tokens/s,3125.7504449053117,kWh,2.533359933728408e-06,2.7926479971154645e-07,1.6856526895172087e-06,4.498277422957163e-06,tokens/kWh,56910674.00456281,MB,1328.553984,8298.299392,0.0,7883.194368,7840.648704,s,10,19.827079223632815,1.9827079223632815,0.0021785477280367406,1.983622741699219,1.9850278198242186,1.9850837951660156,1.985128575439453,"[1.9794190673828125, 1.979141845703125, 1.981212158203125, 1.9837572021484375, 1.98348828125, 1.9838486328125, 1.985015380859375, 1.984930419921875, 1.98112646484375, 1.9851397705078124]",tokens/s,31.774725510203933,kWh,5.783829075585461e-05,6.379419641127124e-06,3.836951824348383e-05,0.00010258722864046557,tokens/kWh,614111.5305960183,,s,630,19.823370273590072,0.031465667100936644,0.00046897895051185954,0.03134144020080566,0.03170574684143067,0.03207600231170654,0.0340822193145752,"[0.0335992317199707, 0.03278054428100586, 0.03202217483520508, 0.03160960006713867, 0.03134611129760742, 0.03125263977050781, 0.031209888458251952, 0.031281152725219724, 0.03126067161560059, 0.031229951858520507, 0.03118284797668457, 0.03121766471862793, 0.03129875183105469, 0.031226688385009766, 0.03120332717895508, 0.03119024085998535, 0.031231935501098634, 0.031177568435668945, 0.031287296295166016, 0.03117670440673828, 0.031258655548095704, 0.031201248168945313, 0.031207231521606444, 0.031224000930786134, 0.03124140739440918, 0.031251264572143556, 0.0312295036315918, 0.03136966323852539, 0.031438848495483396, 0.03148764801025391, 0.03156355285644531, 0.03161555290222168, 0.03172473526000977, 0.03167315292358398, 0.03163340759277344, 0.03149516868591309, 0.03142348861694336, 0.03137289619445801, 0.03146384048461914, 0.03141836738586426, 0.03134592056274414, 0.031324928283691406, 0.03122380828857422, 0.03128495979309082, 0.03132406425476074, 0.03129587173461914, 0.031270912170410156, 0.03122790336608887, 0.0313093433380127, 0.031254911422729494, 0.03134883117675781, 0.031306880950927735, 0.031337343215942386, 0.031270912170410156, 0.031285247802734374, 0.03130118370056152, 0.031312320709228514, 0.03141398429870605, 0.03142399978637695, 0.031417119979858396, 0.03151667213439941, 0.03160479927062988, 0.031464864730834964, 0.03387494277954101, 0.03307724761962891, 0.032302112579345704, 0.0317982406616211, 0.0314879035949707, 0.031342687606811526, 0.03128217506408691, 0.031290367126464845, 0.03121151924133301, 0.031202751159667967, 0.03119513511657715, 0.03115884780883789, 0.031192415237426756, 0.031242912292480468, 0.03119923210144043, 0.03116851234436035, 0.031089759826660155, 0.03113680076599121, 0.031157312393188478, 0.031217567443847655, 0.03127519989013672, 0.03107254409790039, 0.031193536758422853, 0.03119308853149414, 0.031322111129760744, 0.03122790336608887, 0.031297536849975584, 0.03130278396606445, 0.03137139129638672, 0.03130035209655762, 0.031512575149536134, 0.03159859275817871, 0.03164159965515137, 0.031714784622192384, 0.03164153671264648, 0.031570528030395506, 0.031459327697753905, 0.031417375564575196, 0.0313920955657959, 0.03133875274658203, 0.031221248626708983, 0.031183040618896485, 0.03109724807739258, 0.03111350440979004, 0.031318016052246093, 0.031268863677978515, 0.03126000022888183, 0.03127769660949707, 0.031309856414794925, 0.031268863677978515, 0.031389696121215824, 0.03131391906738281, 0.03132547187805176, 0.03131785583496094, 0.031347583770751954, 0.03129743957519531, 0.031357023239135744, 0.03126681518554687, 0.03136310386657715, 0.03140595245361328, 0.031490144729614256, 0.03165184020996094, 0.03148185539245606, 0.034236415863037106, 0.0334620475769043, 0.03251011276245117, 0.031983680725097656, 0.03156377601623535, 0.03134464073181152, 0.031239391326904297, 0.03117750358581543, 0.031121408462524414, 0.031162368774414063, 0.03122380828857422, 0.03120742416381836, 0.03120742416381836, 0.031233760833740236, 0.03112579154968262, 0.031155744552612306, 0.03108425521850586, 0.031174848556518555, 0.031176416397094727, 0.03114201545715332, 0.031193824768066408, 0.03119923210144043, 0.03136102485656738, 0.031287296295166016, 0.03123404884338379, 0.031272703170776364, 0.03127228736877442, 0.031247264862060548, 0.03137740707397461, 0.03129913520812988, 0.031451583862304684, 0.03152627182006836, 0.03161708831787109, 0.031705663681030276, 0.03170038414001465, 0.03158998489379883, 0.03141529655456543, 0.03146342468261719, 0.031426559448242186, 0.03137238311767578, 0.0313721923828125, 0.03136511993408203, 0.03132825660705566, 0.03129724884033203, 0.0313606071472168, 0.03130233573913574, 0.031315008163452146, 0.03133945655822754, 0.03136895942687988, 0.031359167098999025, 0.031434431076049804, 0.03128563117980957, 0.031244287490844725, 0.03130294418334961, 0.03132694435119629, 0.0313753604888916, 0.03141942405700684, 0.031310880661010745, 0.031416255950927736, 0.03139993667602539, 0.03145692825317383, 0.03145699119567871, 0.03147430419921875, 0.03447798538208008, 0.03337260818481445, 0.0325775032043457, 0.03193366432189942, 0.03151337623596191, 0.03131961631774902, 0.03125702476501465, 0.031100927352905275, 0.03123200035095215, 0.031227840423583984, 0.031143999099731444, 0.031180799484252928, 0.031151840209960938, 0.031225439071655273, 0.031187551498413086, 0.031174751281738283, 0.03127705574035645, 0.03117840003967285, 0.03130198478698731, 0.03123366355895996, 0.03121401596069336, 0.03126470375061035, 0.031406080245971676, 0.031297536849975584, 0.031233312606811524, 0.031285888671875, 0.031338592529296876, 0.03136620712280273, 0.03139449691772461, 0.0313919677734375, 0.03166985511779785, 0.0315928955078125, 0.03174991989135742, 0.03164591979980469, 0.031647552490234376, 0.03165407943725586, 0.031582399368286135, 0.03157148742675781, 0.03143503952026367, 0.03143270492553711, 0.031432384490966796, 0.031637727737426756, 0.031488096237182614, 0.031348127365112305, 0.03137292861938477, 0.03134499168395996, 0.031352895736694336, 0.03135545539855957, 0.031373311996459964, 0.03132620811462403, 0.03132825660705566, 0.03133235168457031, 0.031297536849975584, 0.03131932830810547, 0.03132268714904785, 0.03126198387145996, 0.03131007957458496, 0.03134089660644531, 0.03135663986206055, 0.03145369529724121, 0.03156179237365723, 0.031687999725341795, 0.031569664001464846, 0.03417475128173828, 0.03323769760131836, 0.03250614547729492, 0.031868896484375, 0.03153830337524414, 0.031283327102661136, 0.03144985580444336, 0.031261760711669924, 0.031238143920898437, 0.031241151809692384, 0.031254528045654296, 0.031190271377563476, 0.03129820823669434, 0.031166559219360353, 0.03118284797668457, 0.03121561622619629, 0.031204416275024415, 0.03125276756286621, 0.03126748847961426, 0.031221759796142577, 0.03124019241333008, 0.0313384952545166, 0.03121561622619629, 0.03121670341491699, 0.03125139236450195, 0.031266399383544925, 0.0313657283782959, 0.03140537643432617, 0.0314967041015625, 0.031320064544677735, 0.031471616744995115, 0.031526912689208986, 0.032064544677734376, 0.03175712013244629, 0.03175235176086426, 0.03168870353698731, 0.031660032272338864, 0.031514623641967776, 0.031481983184814454, 0.03142598342895508, 0.031340480804443356, 0.031350496292114255, 0.03128585624694824, 0.031289535522460936, 0.03136307144165039, 0.03137126350402832, 0.03134838485717773, 0.03129971122741699, 0.031365343093872074, 0.03133030319213867, 0.03135078430175781, 0.03139993667602539, 0.031322111129760744, 0.03134464073181152, 0.031320064544677735, 0.031303680419921875, 0.031322015762329104, 0.03138515281677246, 0.031347232818603514, 0.031340543746948245, 0.03156991958618164, 0.031647743225097655, 0.03160054397583008, 0.03435129547119141, 0.03331660842895508, 0.03267631912231445, 0.03208537673950195, 0.031577728271484376, 0.03133744049072266, 0.03127705574035645, 0.03119308853149414, 0.031164415359497072, 0.031245952606201173, 0.031230335235595704, 0.03126470375061035, 0.03120748710632324, 0.03130572891235352, 0.031227392196655275, 0.03124684715270996, 0.03125862312316895, 0.031221120834350587, 0.03125312042236328, 0.03122601509094238, 0.031346527099609375, 0.03123404884338379, 0.031268863677978515, 0.03121894454956055, 0.03119539260864258, 0.03128166389465332, 0.03127078437805176, 0.031434879302978516, 0.03143251228332519, 0.03132844734191895, 0.03151872062683105, 0.03166399955749512, 0.03175164794921875, 0.03179100799560547, 0.031741920471191405, 0.03177091217041016, 0.03165753555297852, 0.031581119537353514, 0.03147091293334961, 0.03135763168334961, 0.0313732795715332, 0.03130371284484863, 0.03129251289367676, 0.03134761619567871, 0.03138764762878418, 0.03137936019897461, 0.0313467845916748, 0.03134464073181152, 0.031352832794189454, 0.03133235168457031, 0.031332128524780276, 0.03142064094543457, 0.031375104904174805, 0.03133465576171875, 0.031407167434692386, 0.03141670417785645, 0.03133404731750488, 0.03131075286865234, 0.03136307144165039, 0.03128720092773438, 0.03149628829956055, 0.031528959274291994, 0.03144499206542969, 0.034328575134277346, 0.03347836685180664, 0.03263107299804688, 0.03189724731445313, 0.03144457626342773, 0.031273727416992185, 0.03129343986511231, 0.031246047973632812, 0.031254816055297854, 0.03125420761108398, 0.03122003173828125, 0.03125862312316895, 0.03126588821411133, 0.03123907279968262, 0.031263904571533205, 0.03127987289428711, 0.031250528335571286, 0.031227392196655275, 0.031259136199951174, 0.031241792678833008, 0.031263168334960935, 0.03126457595825195, 0.03122604751586914, 0.031336544036865234, 0.03135683250427246, 0.03130982398986817, 0.03127840042114258, 0.0313187198638916, 0.03137945556640625, 0.03136006355285645, 0.03170188713073731, 0.03170012855529785, 0.03170348739624024, 0.03177273559570312, 0.031750560760498044, 0.03176022338867188, 0.031643648147583005, 0.03149225616455078, 0.03145113563537598, 0.031366943359375, 0.03138582420349121, 0.03127017593383789, 0.03129167938232422, 0.031346464157104494, 0.03138835144042969, 0.03136099243164062, 0.031441055297851565, 0.031362911224365235, 0.031376575469970705, 0.031359647750854494, 0.03174211120605469, 0.03139788818359375, 0.03134259223937988, 0.031419744491577146, 0.03134339141845703, 0.031399423599243165, 0.03145337677001953, 0.031333887100219726, 0.03139619255065918, 0.03144467163085937, 0.031596927642822265, 0.03151289558410644, 0.03164566421508789, 0.03452191925048828, 0.03360265731811524, 0.03277507019042969, 0.03213926315307617, 0.03162931251525879, 0.031393823623657224, 0.031296672821044924, 0.031269407272338866, 0.031248672485351563, 0.031343744277954104, 0.031294015884399416, 0.03128310394287109, 0.031250816345214844, 0.03131190490722656, 0.031238143920898437, 0.03124019241333008, 0.03123302459716797, 0.031251455307006834, 0.03127900886535644, 0.03125814437866211, 0.031242143630981444, 0.031205087661743163, 0.031253440856933594, 0.03121971130371094, 0.031268863677978515, 0.03132198333740235, 0.03140620803833008, 0.03128934478759766, 0.031281152725219724, 0.03133017539978027, 0.03144307136535644, 0.03162310409545899, 0.03165190315246582, 0.03165593528747559, 0.03172352027893066, 0.031655424118041994, 0.03163411140441894, 0.03150992012023926, 0.03150275230407715, 0.031715328216552735, 0.0313753604888916, 0.03149004745483398, 0.0313118724822998, 0.03129270362854004, 0.03134489631652832, 0.03151919937133789, 0.03138291168212891, 0.0313514232635498, 0.031307775497436525, 0.03130745506286621, 0.0313666877746582, 0.03132083129882812, 0.03144092750549316, 0.031399616241455076, 0.03135017585754395, 0.031312768936157226, 0.03138944053649902, 0.031313695907592774, 0.031275007247924806, 0.03141888046264649, 0.031448543548583986, 0.031523359298706054, 0.03153715133666992, 0.0341668815612793, 0.0332410888671875, 0.03250284957885742, 0.031830272674560546, 0.0314006404876709, 0.031301631927490234, 0.031231456756591797, 0.031246879577636718, 0.03118284797668457, 0.03121353530883789, 0.031208608627319338, 0.03130252838134766, 0.03126198387145996, 0.031212255477905272, 0.03118396759033203, 0.031217952728271485, 0.03116694450378418, 0.031166624069213868, 0.031214591979980468, 0.031304704666137696, 0.03126028823852539, 0.031234432220458984, 0.03131756782531738, 0.03124790382385254, 0.03132918357849121, 0.031252479553222655, 0.031190528869628906, 0.031232511520385742, 0.03136716842651367, 0.03128479957580566, 0.03155974388122559, 0.031744352340698244, 0.031721504211425784, 0.03171327972412109, 0.031598047256469725, 0.031524639129638675, 0.03148780822753906, 0.031554143905639646, 0.03148793601989746, 0.03133686447143555, 0.03133238410949707, 0.0312893123626709, 0.031178272247314454, 0.031242719650268556, 0.031294496536254886, 0.03127977561950684, 0.0316296329498291, 0.031178752899169923, 0.03122790336608887, 0.031322111129760744, 0.03129494476318359, 0.0314168643951416, 0.03128303909301758, 0.03129360008239746, 0.031221696853637695, 0.031348800659179686, 0.031160320281982422, 0.031321439743041996, 0.03134041595458984, 0.031344831466674807, 0.03149398422241211, 0.031518943786621095, 0.03173622322082519, 0.03164275169372559, 0.03305561447143555, 0.03322470474243164, 0.03295584106445312, 0.03248803329467773, 0.03200611114501953, 0.0317071361541748, 0.0315160961151123, 0.03136569595336914, 0.03134668731689453, 0.03128319931030273, 0.03128895950317383, 0.03126617622375488, 0.03126169586181641, 0.03121887969970703, 0.03128960037231445, 0.03116089630126953, 0.031301631927490234, 0.03121561622619629, 0.03123404884338379, 0.031231071472167967, 0.03114896011352539, 0.03143475151062012, 0.0312741756439209, 0.03135980796813965, 0.031340639114379884, 0.031391647338867186, 0.03134198379516601, 0.031330911636352536, 0.031336448669433595, 0.03131974411010742, 0.031308095932006834, 0.03136851119995117, 0.03155446434020996, 0.031498016357421874, 0.031748096466064454, 0.03170649528503418, 0.0316210880279541, 0.03161734390258789, 0.03160819244384765, 0.03148220825195312, 0.031404672622680666, 0.03137126350402832, 0.031410112380981445, 0.03144636726379395, 0.031336992263793946, 0.031303871154785154, 0.03135814476013184, 0.031419200897216795, 0.03138319969177246, 0.03138390350341797, 0.03141782379150391, 0.031386144638061525, 0.03145833587646484, 0.03143984031677246, 0.03137286376953125, 0.03144905662536621, 0.0314680004119873, 0.03141427230834961, 0.03142419242858887, 0.03140640068054199, 0.03135897636413574, 0.031488000869750975]",tokens/s,31.780670557282825,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 717, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 188.12 MiB is free. Process 38516 has 14.55 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.45 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 1195, in __init__ self.model = MixtralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in __init__ [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 755, in __init__ self.self_attn = MIXTRAL_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 349, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 72.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 38.12 MiB is free. Process 183048 has 14.70 GiB memory in use. Of the allocated memory 14.55 GiB is allocated by PyTorch, and 41.65 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,811.880448,12523.077632,0.0,12127.830016,12122.08896,s,1,7.14912158203125,7.14912158203125,0.0,7.14912158203125,7.14912158203125,7.14912158203125,7.14912158203125,[7.14912158203125],,kWh,1.16733577624989e-05,1.2804078687359217e-06,6.025560375999724e-06,1.8979326007234543e-05,,MB,1144.537088,12697.141248,0.0,12289.31072,12248.586752,s,10,1.830730926513672,0.1830730926513672,0.004234504773003929,0.1828845748901367,0.18805665130615234,0.18874602279663086,0.18929751998901367,"[0.17489613342285157, 0.1831339569091797, 0.18228807067871095, 0.1866171875, 0.1813321533203125, 0.18943539428710937, 0.18263519287109375, 0.18790345764160157, 0.1776706848144531, 0.18481869506835938]",tokens/s,1398.3485846689125,kWh,5.5184299355345865e-06,6.085838316859708e-07,3.665511318150949e-06,9.792525085371505e-06,tokens/kWh,26142388.99243912,MB,1185.230848,12705.529856,0.0,12297.699328,12248.589312,s,10,33.465481201171876,3.3465481201171867,0.0032630793536641643,3.3474332275390624,3.3502978515625,3.3506911621093747,3.3510058105468747,"[3.341596923828125, 3.343175048828125, 3.341919189453125, 3.34682958984375, 3.345089111328125, 3.34810986328125, 3.348036865234375, 3.35021044921875, 3.3494296875, 3.35108447265625]",tokens/s,18.8253680325965,kWh,9.774854880654882e-05,1.0781801465979355e-05,6.493179355964916e-05,0.00017346214383217733,tokens/kWh,363191.6371387165,,s,630,33.46292974090575,0.053115761493501217,0.00048685466314468635,0.053035871505737305,0.053312567901611325,0.05350089111328125,0.05635084976196289,"[0.056864990234375, 0.05423715209960937, 0.05326361465454101, 0.05293033599853516, 0.05284262466430664, 0.05283107376098633, 0.05295718383789062, 0.05289363098144531, 0.05282003021240234, 0.05274323272705078, 0.05282502365112305, 0.052751937866210935, 0.05267910385131836, 0.05283225631713867, 0.052779006958007815, 0.05272371292114258, 0.05263679885864258, 0.05282438278198242, 0.0535107192993164, 0.05336886215209961, 0.05309369659423828, 0.05307254409790039, 0.0529705924987793, 0.05297449493408203, 0.05297532653808594, 0.05300252914428711, 0.05307392120361328, 0.05286297607421875, 0.05279334259033203, 0.05274166488647461, 0.05284457778930664, 0.052873119354248044, 0.05281644821166992, 0.052743839263916015, 0.05269945526123047, 0.05284403228759765, 0.05301708984375, 0.05314963150024414, 0.0530203857421875, 0.05301283264160156, 0.052944896697998046, 0.052891681671142575, 0.053026782989501954, 0.05299609756469727, 0.052916160583496095, 0.05293231964111328, 0.05296572875976562, 0.05313238525390625, 0.05309123229980469, 0.053052703857421876, 0.05297020721435547, 0.052985855102539066, 0.05288959884643555, 0.05286902236938477, 0.05313455963134766, 0.053162879943847656, 0.05311283111572265, 0.05309772872924805, 0.05302899169921875, 0.05316636657714844, 0.05294457626342773, 0.05305001449584961, 0.05308415985107422, 0.056467041015625, 0.054403072357177736, 0.0535280647277832, 0.05318713760375977, 0.05289388656616211, 0.05288505554199219, 0.05284713745117187, 0.052832321166992186, 0.05285014343261719, 0.05286540985107422, 0.05286707305908203, 0.05284220886230469, 0.0527400016784668, 0.05282870483398437, 0.052762622833251956, 0.052715137481689454, 0.052660606384277345, 0.05270281600952149, 0.0529453125, 0.05316201782226562, 0.05312905502319336, 0.05314524841308594, 0.05304774475097656, 0.0531388168334961, 0.05305001449584961, 0.053055423736572266, 0.05290195083618164, 0.05292851257324219, 0.05284211349487305, 0.05306729507446289, 0.05284540939331055, 0.05289507293701172, 0.05293942260742188, 0.05271862411499023, 0.05274012756347656, 0.05263622283935547, 0.05269334411621094, 0.05305347061157226, 0.053233665466308595, 0.05320899200439453, 0.05298799896240235, 0.05314889526367188, 0.053203102111816405, 0.05304383850097656, 0.052822017669677736, 0.05291212844848633, 0.052975582122802733, 0.053225440979003905, 0.05330281448364258, 0.0531495361328125, 0.05318931198120117, 0.052929920196533205, 0.052974208831787106, 0.05295308685302735, 0.05294675064086914, 0.053211326599121096, 0.0535470085144043, 0.05303868865966797, 0.05334675216674805, 0.05318473434448242, 0.05291795349121094, 0.052813888549804684, 0.052851776123046874, 0.055715904235839844, 0.05353148651123047, 0.05297308731079101, 0.05286345672607422, 0.052832286834716795, 0.052912094116210937, 0.0530247688293457, 0.05293033599853516, 0.05299836730957031, 0.05322668838500977, 0.053012287139892575, 0.052853759765625, 0.0528524169921875, 0.052832576751708986, 0.052752384185791014, 0.05267865753173828, 0.05270249557495117, 0.05302345657348633, 0.05322956848144531, 0.05341603088378906, 0.053122974395751955, 0.05295820617675781, 0.052867488861083986, 0.05286137771606445, 0.05292252731323242, 0.053059585571289064, 0.052918270111083986, 0.053036064147949216, 0.05284534454345703, 0.05294918441772461, 0.053020160675048826, 0.05310310363769531, 0.05292031860351563, 0.05293056106567383, 0.05274012756347656, 0.052714656829833985, 0.05262828826904297, 0.053100543975830077, 0.05335836791992187, 0.05317244720458984, 0.05294899368286133, 0.052993759155273434, 0.05287964630126953, 0.05287321472167969, 0.052891647338867184, 0.05290115356445312, 0.05309513473510742, 0.053143520355224606, 0.05323574447631836, 0.053161087036132815, 0.05330745697021484, 0.05301536178588867, 0.05293625640869141, 0.05288924789428711, 0.053297119140625, 0.05317715072631836, 0.05311078262329102, 0.05330739212036133, 0.05312220764160156, 0.05306777572631836, 0.0529252815246582, 0.052866943359375, 0.05293587112426758, 0.056655902862548825, 0.05414064025878906, 0.05344076919555664, 0.052944896697998046, 0.05295513534545898, 0.05284659194946289, 0.05285820770263672, 0.05287923049926758, 0.053007102966308596, 0.05298179244995117, 0.052980960845947264, 0.05284124755859375, 0.05298518371582031, 0.052865089416503905, 0.052811809539794925, 0.05284272003173828, 0.05272143936157227, 0.05312160110473633, 0.05324380874633789, 0.05327881622314453, 0.05345280075073242, 0.05321318435668945, 0.05305475234985352, 0.053080223083496095, 0.053084735870361326, 0.05314713668823242, 0.05302937698364258, 0.052989761352539064, 0.05285014343261719, 0.05298454284667969, 0.05296131134033203, 0.053069793701171875, 0.05297484970092774, 0.052996864318847654, 0.05294879913330078, 0.05306745529174805, 0.05318502426147461, 0.05300428771972656, 0.05295487976074219, 0.05323769760131836, 0.05321964645385742, 0.05329919815063477, 0.053231616973876954, 0.05295446395874023, 0.052955806732177736, 0.052803585052490234, 0.052951038360595705, 0.05320294570922852, 0.053082111358642575, 0.05303039932250977, 0.053142017364501956, 0.052942718505859375, 0.052975265502929685, 0.05321980667114258, 0.05318860626220703, 0.05314329528808594, 0.05297097778320312, 0.05296412658691406, 0.05295878219604492, 0.053193153381347655, 0.053180065155029294, 0.05315209579467774, 0.05312307357788086, 0.056371200561523435, 0.0541736946105957, 0.053305343627929686, 0.052893695831298826, 0.05288259124755859, 0.052873119354248044, 0.05304991912841797, 0.05293494415283203, 0.053004383087158206, 0.053008384704589843, 0.053135551452636716, 0.05288473510742187, 0.052789825439453125, 0.05284572982788086, 0.0528306884765625, 0.052687232971191406, 0.05269475173950195, 0.0530366096496582, 0.05316886520385742, 0.05358182525634766, 0.053571456909179686, 0.05307609558105469, 0.05293868637084961, 0.052813919067382815, 0.05291823959350586, 0.05297971343994141, 0.05294870376586914, 0.05304361724853516, 0.053026687622070315, 0.053016574859619144, 0.05291382217407226, 0.05283260726928711, 0.05283379364013672, 0.052942943572998044, 0.0528633918762207, 0.05272576141357422, 0.053030815124511715, 0.05316182327270508, 0.053106945037841795, 0.053305343627929686, 0.05326623916625976, 0.05307411193847656, 0.052852607727050784, 0.05303091049194336, 0.05287097549438476, 0.0529279670715332, 0.05338556671142578, 0.05338982391357422, 0.05334988784790039, 0.05313897705078125, 0.052910465240478516, 0.05277308654785156, 0.05284668731689453, 0.05298614501953125, 0.053300926208496094, 0.05319712066650391, 0.05299776077270508, 0.05318899154663086, 0.05328236770629883, 0.053182910919189454, 0.05289779281616211, 0.05291417694091797, 0.05286502456665039, 0.05668511962890625, 0.05410153579711914, 0.05316806411743164, 0.053093921661376955, 0.05300060653686523, 0.05288175964355469, 0.05303116989135742, 0.05304115295410156, 0.053026782989501954, 0.053120094299316405, 0.05307865524291992, 0.05292678451538086, 0.052665950775146485, 0.05266435241699219, 0.05273977661132812, 0.052798145294189455, 0.05281302261352539, 0.05330115127563476, 0.053334911346435546, 0.05331148910522461, 0.05330944061279297, 0.05312307357788086, 0.05293260955810547, 0.052910079956054686, 0.05291334533691406, 0.05299020767211914, 0.05304787063598633, 0.05302067184448242, 0.05297356796264648, 0.05306140899658203, 0.053037120819091794, 0.052908191680908205, 0.05283996963500977, 0.05287369537353516, 0.05329103851318359, 0.053086177825927734, 0.052995326995849606, 0.05334092712402344, 0.0532880973815918, 0.05337990570068359, 0.052999488830566405, 0.05312176132202148, 0.053174304962158206, 0.05305750274658203, 0.05294899368286133, 0.05293414306640625, 0.05320755386352539, 0.05317196655273437, 0.05307606506347656, 0.05309455871582031, 0.053136608123779294, 0.05298278427124024, 0.05337644958496094, 0.053131168365478515, 0.05307027053833008, 0.05336678314208984, 0.05322137451171875, 0.05305865478515625, 0.05299609756469727, 0.05323196792602539, 0.053406272888183594, 0.053016609191894534, 0.0529653434753418, 0.05629951858520508, 0.05410380935668945, 0.05306803131103516, 0.05288550567626953, 0.05289539337158203, 0.052807838439941406, 0.052887649536132814, 0.052948577880859375, 0.053067905426025394, 0.0530456314086914, 0.05298755264282227, 0.05303932952880859, 0.05304537582397461, 0.05297151947021484, 0.0528353271484375, 0.0527534065246582, 0.052726879119873046, 0.05296419143676758, 0.053340160369873046, 0.05351007843017578, 0.05327990341186523, 0.053128158569335934, 0.0532644157409668, 0.053323486328125, 0.053114433288574216, 0.05308803176879883, 0.053025760650634766, 0.05293868637084961, 0.052983070373535154, 0.05329987335205078, 0.052865089416503905, 0.05293027114868164, 0.05305168151855469, 0.0530794563293457, 0.05292092895507813, 0.05306547164916992, 0.053096702575683594, 0.053065727233886716, 0.05304076766967773, 0.05316032028198242, 0.05304528045654297, 0.053357601165771484, 0.05313017654418945, 0.05288140869140625, 0.052883678436279294, 0.05304467010498047, 0.053182815551757814, 0.05312239837646485, 0.05307459259033203, 0.05325619125366211, 0.053147647857666014, 0.05316534423828125, 0.05300457763671875, 0.052964832305908205, 0.05299708938598633, 0.0532715835571289, 0.05310870361328125, 0.05336576080322265, 0.053192577362060546, 0.05320512008666992, 0.05330905532836914, 0.053117313385009766, 0.053028865814208986, 0.056301025390625, 0.053897727966308595, 0.053232929229736325, 0.052969566345214845, 0.052908382415771484, 0.052920639038085936, 0.05289267349243164, 0.05292035293579102, 0.05302479934692383, 0.05304617691040039, 0.0529958381652832, 0.05307398223876953, 0.05302076721191406, 0.05292246246337891, 0.05287936019897461, 0.05308415985107422, 0.0528056640625, 0.05342819213867187, 0.05339136123657227, 0.05354694366455078, 0.05331155014038086, 0.05326230239868164, 0.053053184509277346, 0.053269985198974606, 0.05316444778442383, 0.05303443145751953, 0.05299708938598633, 0.053098400115966796, 0.05294646453857422, 0.05297932815551758, 0.05306816101074219, 0.053008159637451174, 0.05287401580810547, 0.05287526321411133, 0.05312102508544922, 0.0531082878112793, 0.05306780624389648, 0.053090721130371096, 0.053106334686279295, 0.05354025650024414, 0.05325423812866211, 0.05323980712890625, 0.05317510223388672, 0.05299929428100586, 0.05294172668457031, 0.05297356796264648, 0.05306163024902344, 0.05306367874145508, 0.053267616271972656, 0.0533287353515625, 0.053245281219482424, 0.05309036636352539, 0.05287587356567383, 0.05310579299926758, 0.05298995208740234, 0.0532383041381836, 0.05329955291748047, 0.05344460678100586, 0.05337059020996094, 0.05336297607421875, 0.053139041900634766, 0.05319071960449219, 0.05306531143188477, 0.05669683074951172, 0.054657024383544923, 0.05362649536132812, 0.05325372695922852, 0.052972320556640626, 0.05289363098144531, 0.05284159851074219, 0.052951999664306644, 0.05321830368041992, 0.053064449310302735, 0.052977279663085936, 0.052783744812011715, 0.052891326904296876, 0.05301279830932617, 0.053036449432373046, 0.052902496337890625, 0.05290595245361328, 0.05300537490844726, 0.053197792053222656, 0.053321727752685545, 0.053294654846191405, 0.05330579376220703, 0.05320054244995117, 0.05300259017944336, 0.05296332931518555, 0.05297257614135742, 0.052902240753173825, 0.05290252685546875, 0.05300390243530274, 0.053026206970214845, 0.053023712158203125, 0.05299817657470703, 0.052985824584960935, 0.05289295959472656, 0.05297020721435547, 0.0530513916015625, 0.053130592346191406, 0.05303567886352539, 0.053016574859619144, 0.053065727233886716, 0.05317622375488281, 0.05336687850952149, 0.05322956848144531, 0.05308006286621094, 0.053034526824951175, 0.05285279846191406, 0.05295759963989258, 0.053130817413330075, 0.053262367248535156, 0.053604766845703124, 0.05313689422607422, 0.05301094436645508, 0.053144607543945316, 0.05307468795776367, 0.05312124633789062, 0.053298686981201174, 0.053175807952880856, 0.05325904083251953, 0.05315804672241211, 0.05306540679931641, 0.05309686279296875, 0.05291999816894531, 0.05304899215698242, 0.05680332946777344, 0.05424284744262695, 0.05320751953125, 0.05307526397705078, 0.0529488639831543, 0.05304198455810547, 0.053000190734863284, 0.05297971343994141, 0.053133312225341796, 0.05317987060546875, 0.05297788619995117, 0.05281209564208984, 0.052838401794433595, 0.053040321350097654, 0.05312172698974609, 0.052932769775390624, 0.052889568328857425, 0.05323980712890625, 0.05367193603515625, 0.05363302230834961, 0.053123104095458985, 0.05322963333129883, 0.053041057586669924, 0.052870559692382815, 0.0529923210144043, 0.0530926399230957, 0.05303894424438477, 0.053097919464111326, 0.05287395095825195, 0.05286707305908203, 0.05289779281616211, 0.05301769638061524, 0.053095329284667966, 0.05312102508544922, 0.05290393447875977, 0.052981311798095704, 0.053106590270996096, 0.05324857711791992, 0.05337651062011719, 0.053281246185302736, 0.05300223922729492, 0.05304729461669922, 0.05309215927124023, 0.05313532638549805, 0.05302908706665039, 0.05312921524047851, 0.05320867156982422, 0.0531583023071289, 0.053110305786132815, 0.05327283096313477, 0.053357822418212894, 0.053049343109130856, 0.05308063888549805, 0.05295759963989258, 0.053171199798583986, 0.05327974319458008, 0.05348966217041016, 0.053471233367919924, 0.05323980712890625, 0.05322684860229492, 0.05314345550537109, 0.053108959197998046, 0.05303350448608399]",tokens/s,18.826803417331245,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 510, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 201, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 95648 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.727168,806.289408,0.0,411.041792,391.374848,s,1,7.34471484375,7.34471484375,0.0,7.34471484375,7.34471484375,7.34471484375,7.34471484375,[7.34471484375],,kWh,4.979785358329991e-06,5.420543576206471e-07,1.008334140004119e-06,6.530173855954757e-06,,MB,1157.623808,881.78688,0.0,473.956352,454.832128,s,18,0.17947494411468504,0.009970830228593614,0.00041730724994131976,0.009976736068725586,0.010160755348205566,0.010392897748947142,0.011255180959701535,"[0.010007264137268067, 0.009718848228454589, 0.01008521556854248, 0.010202688217163086, 0.009753952026367188, 0.009642751693725585, 0.009979840278625488, 0.009712639808654786, 0.009546496391296387, 0.009600768089294434, 0.009583616256713867, 0.010026559829711915, 0.009973631858825684, 0.010042240142822265, 0.009878175735473633, 0.010106719970703126, 0.011470751762390137, 0.010142784118652343]",tokens/s,25674.89307619159,kWh,2.869601984267479e-07,3.164608360747545e-08,1.79733564523107e-07,4.983398465573304e-07,tokens/kWh,513705660.4414013,MB,1186.168832,909.049856,0.0,501.219328,454.834688,s,18,10.204578491210937,0.5669210272894964,0.010994312838018253,0.5672750854492188,0.5786530029296875,0.5795169067382813,0.5801510864257813,"[0.5803096313476562, 0.5649026489257812, 0.5646107788085938, 0.5665227661132812, 0.5680274047851562, 0.57097021484375, 0.5630609741210938, 0.5465955810546875, 0.5445765380859375, 0.546088134765625, 0.5648844604492187, 0.5793770141601563, 0.5783427124023437, 0.5777233276367187, 0.5779256591796875, 0.5652088012695312, 0.57047607421875, 0.5749757690429688]",tokens/s,111.12658900871787,kWh,1.611264126638789e-05,1.7769285780013281e-06,7.406182998032452e-06,2.529575284242167e-05,tokens/kWh,2490536.6680507436,,s,1134,10.194637586593622,0.008989980235091384,0.0002737373905640035,0.009007359981536865,0.00931346254348755,0.009394726514816284,0.009587025899887087,"[0.009422847747802734, 0.009308064460754394, 0.009404512405395507, 0.009428159713745117, 0.009292608261108398, 0.00938361644744873, 0.009435456275939941, 0.009379743576049804, 0.009396320343017578, 0.009295647621154784, 0.009268896102905273, 0.009192000389099121, 0.009263104438781738, 0.009144319534301757, 0.009259008407592773, 0.009332736015319825, 0.009464159965515136, 0.009314208030700684, 0.009199359893798827, 0.009211903572082519, 0.009334783554077148, 0.009252863883972168, 0.009203488349914551, 0.009160672187805176, 0.00925705623626709, 0.009261216163635254, 0.009248671531677246, 0.009336928367614745, 0.009347295761108399, 0.009195296287536622, 0.009383935928344727, 0.009297599792480469, 0.00915443229675293, 0.009131775856018067, 0.00902233600616455, 0.009092960357666015, 0.00908022403717041, 0.009120320320129395, 0.009250240325927734, 0.009259584426879883, 0.009268608093261719, 0.009378432273864746, 0.009406463623046875, 0.009397695541381836, 0.009359840393066407, 0.009346367835998536, 0.00921455955505371, 0.009047391891479493, 0.008957056045532227, 0.008928383827209473, 0.008945247650146485, 0.009150527954101562, 0.008868800163269044, 0.008819968223571777, 0.008755776405334472, 0.008980287551879882, 0.009119808197021484, 0.00893779182434082, 0.009252863883972168, 0.009072575569152832, 0.009037887573242188, 0.00888764762878418, 0.008878751754760742, 0.00851417636871338, 0.008738816261291504, 0.008873984336853028, 0.008887583732604981, 0.008769439697265626, 0.008719200134277344, 0.008769503593444824, 0.00890675163269043, 0.008815711975097656, 0.008790528297424317, 0.008802720069885254, 0.008859199523925781, 0.008688063621520995, 0.00896992015838623, 0.009326911926269532, 0.009486335754394531, 0.009266655921936035, 0.00935968017578125, 0.00910051155090332, 0.00890777587890625, 0.00890006446838379, 0.008946208000183106, 0.008869888305664063, 0.008855839729309083, 0.008856448173522949, 0.008761823654174805, 0.008763039588928222, 0.00882051181793213, 0.008808480262756348, 0.008811424255371094, 0.008935423851013183, 0.009224287986755371, 0.009137503623962403, 0.009251392364501954, 0.009261152267456055, 0.009158464431762695, 0.0088472318649292, 0.008841440200805665, 0.008857407569885255, 0.008853280067443847, 0.00895631980895996, 0.00889241600036621, 0.008771871566772461, 0.008901663780212402, 0.00903222370147705, 0.009186944007873536, 0.008790559768676758, 0.008695808410644532, 0.008619647979736328, 0.008677696228027344, 0.008582655906677245, 0.009029472351074219, 0.009464832305908203, 0.009360383987426758, 0.009278335571289063, 0.009143808364868163, 0.00928598403930664, 0.008988672256469727, 0.009065567970275879, 0.009099712371826172, 0.00906611156463623, 0.009079551696777344, 0.009041248321533203, 0.009126879692077637, 0.009217568397521973, 0.009429696083068848, 0.009115424156188965, 0.009019583702087402, 0.00899401569366455, 0.008919648170471191, 0.008761343955993652, 0.008764863967895508, 0.008771295547485352, 0.008888319969177246, 0.008825695991516114, 0.008867839813232421, 0.008828831672668456, 0.008788064002990722, 0.008734047889709473, 0.008690336227416991, 0.008882176399230958, 0.008814592361450196, 0.008681471824645997, 0.008668928146362305, 0.008591775894165038, 0.008654687881469727, 0.008755200386047364, 0.009107711791992188, 0.009412351608276368, 0.009325823783874511, 0.009179648399353027, 0.009319680213928223, 0.009153023719787597, 0.009011712074279785, 0.008804351806640624, 0.008818752288818359, 0.008839167594909669, 0.008951744079589844, 0.008853471755981446, 0.00882652759552002, 0.008806912422180176, 0.008750975608825684, 0.008720383644104004, 0.00908083152770996, 0.008998175621032715, 0.008946399688720703, 0.008967840194702148, 0.009050463676452637, 0.0090862398147583, 0.0090447998046875, 0.008990912437438965, 0.009237567901611328, 0.009147040367126465, 0.00903708839416504, 0.008970303535461426, 0.009054688453674317, 0.009038335800170898, 0.009017024040222168, 0.009053631782531739, 0.00890937614440918, 0.008950048446655274, 0.00892467212677002, 0.008765151977539062, 0.00876540756225586, 0.00897439956665039, 0.009310751914978028, 0.009320096015930176, 0.009319135665893555, 0.009262944221496582, 0.009048128128051757, 0.008931008338928223, 0.009074175834655761, 0.00903446388244629, 0.008959296226501465, 0.008919520378112793, 0.008843487739562988, 0.008808639526367188, 0.008832832336425782, 0.008928832054138183, 0.00927519989013672, 0.009138879776000976, 0.008984512329101562, 0.008951871871948243, 0.008849344253540038, 0.008914943695068359, 0.008779135704040528, 0.00875331211090088, 0.008791520118713378, 0.008985152244567871, 0.008927680015563965, 0.008838303565979004, 0.008712512016296386, 0.008669599533081055, 0.008783391952514649, 0.008824895858764649, 0.008748831748962403, 0.008648960113525391, 0.00857369613647461, 0.008547167778015136, 0.008759488105773925, 0.008836959838867188, 0.009403264045715332, 0.009566207885742188, 0.009387264251708985, 0.009312864303588866, 0.009058655738830567, 0.008939328193664551, 0.009005311965942384, 0.009016160011291505, 0.008985088348388673, 0.008960639953613281, 0.008843168258666993, 0.009076607704162598, 0.008971551895141601, 0.009207615852355957, 0.009349023818969727, 0.009083392143249512, 0.009055007934570312, 0.009034815788269044, 0.009033632278442384, 0.00913702392578125, 0.0091278076171875, 0.009023327827453614, 0.009005215644836426, 0.009001055717468261, 0.009027487754821777, 0.00901910400390625, 0.008923423767089845, 0.008877344131469726, 0.008674719810485839, 0.00886025619506836, 0.00876364803314209, 0.00872217559814453, 0.00871014404296875, 0.008661151885986328, 0.008626015663146972, 0.0086080961227417, 0.00921987247467041, 0.009463680267333985, 0.009400383949279785, 0.00933071994781494, 0.009267040252685547, 0.00900102424621582, 0.009107456207275391, 0.009108672142028809, 0.009007136344909667, 0.008989695549011231, 0.009013119697570801, 0.008863648414611817, 0.008843263626098634, 0.008810336112976075, 0.009181216239929199, 0.009477472305297852, 0.009083680152893066, 0.008939519882202148, 0.008826784133911133, 0.008931424140930176, 0.008957951545715333, 0.009009152412414552, 0.008838399887084961, 0.008782272338867187, 0.008895808219909668, 0.00899772834777832, 0.009034048080444336, 0.008853119850158691, 0.008792384147644043, 0.008717311859130859, 0.00887833595275879, 0.008987584114074708, 0.009283295631408691, 0.008988672256469727, 0.009211135864257812, 0.009054783821105958, 0.009128128051757813, 0.009234687805175782, 0.009381407737731934, 0.009455615997314454, 0.00926534366607666, 0.00926681613922119, 0.00925654411315918, 0.00926585578918457, 0.009109951972961427, 0.009011936187744141, 0.008954560279846192, 0.008942943572998047, 0.008971199989318848, 0.008996864318847657, 0.008897695541381836, 0.008835647583007812, 0.009015071868896484, 0.008942303657531738, 0.008836992263793945, 0.008502176284790039, 0.00880784034729004, 0.009112064361572265, 0.010286944389343262, 0.010262528419494628, 0.010143744468688964, 0.009098943710327148, 0.0089965763092041, 0.009091903686523437, 0.008937248229980469, 0.008914943695068359, 0.008955904006958008, 0.009111488342285156, 0.00916431999206543, 0.00895798397064209, 0.008710783958435058, 0.008710016250610351, 0.008727999687194824, 0.009170880317687988, 0.009484928131103515, 0.009469311714172363, 0.00938976001739502, 0.009464832305908203, 0.009243583679199219, 0.009184479713439941, 0.009045568466186524, 0.008847583770751953, 0.008765439987182617, 0.008784992218017578, 0.008778656005859375, 0.009676223754882812, 0.008823712348937989, 0.008779328346252441, 0.008757344245910645, 0.008964096069335938, 0.008978336334228516, 0.00904412841796875, 0.008837056159973145, 0.008766528129577636, 0.00904854393005371, 0.009265536308288575, 0.009134176254272462, 0.008972384452819825, 0.008789919853210449, 0.008705663681030273, 0.008704000473022461, 0.008914336204528809, 0.008825632095336914, 0.00870969581604004, 0.009134112358093261, 0.009363840103149414, 0.009013471603393554, 0.00879747200012207, 0.008737504005432129, 0.008683072090148927, 0.008843711853027344, 0.008763615608215331, 0.008873760223388672, 0.00910927963256836, 0.009344896316528321, 0.009257311820983887, 0.009274623870849609, 0.009327391624450683, 0.009162752151489258, 0.009096416473388672, 0.009007583618164063, 0.008894335746765137, 0.009071040153503417, 0.008959456443786621, 0.008854047775268555, 0.008851455688476563, 0.008901984214782714, 0.008896448135375977, 0.008819616317749024, 0.00894979190826416, 0.009021023750305175, 0.008957119941711425, 0.008804800033569336, 0.008788800239562989, 0.00903551959991455, 0.00921737575531006, 0.009126655578613281, 0.008935327529907227, 0.008725760459899902, 0.00873532772064209, 0.008755647659301757, 0.008812159538269042, 0.008861215591430664, 0.008902432441711426, 0.00890713596343994, 0.009146783828735352, 0.009086879730224609, 0.008912863731384278, 0.008903967857360839, 0.008766528129577636, 0.008671008110046386, 0.00861184024810791, 0.00863980770111084, 0.008569631576538086, 0.00886070442199707, 0.009313216209411621, 0.009289664268493652, 0.009326047897338867, 0.009230879783630372, 0.009174495697021485, 0.009066975593566895, 0.00911571216583252, 0.009011167526245117, 0.00894547176361084, 0.008866016387939452, 0.009027584075927735, 0.009191424369812011, 0.008917056083679199, 0.009504575729370118, 0.008814944267272948, 0.009017024040222168, 0.008899968147277832, 0.008815327644348144, 0.008678943634033202, 0.008673184394836426, 0.008649215698242188, 0.008609184265136719, 0.008622752189636231, 0.008914943695068359, 0.008976736068725587, 0.008713983535766601, 0.0084399995803833, 0.008846783638000488, 0.008683199882507325, 0.00888492774963379, 0.009137248039245606, 0.008833600044250487, 0.008669535636901855, 0.00867033576965332, 0.0086496000289917, 0.00858521556854248, 0.008775679588317872, 0.009214303970336913, 0.009157535552978515, 0.009073408126831054, 0.008957951545715333, 0.008790016174316406, 0.008712191581726075, 0.008640512466430664, 0.008560640335083008, 0.008566783905029298, 0.008581119537353516, 0.008531968116760253, 0.008566816329956055, 0.008597472190856933, 0.008597503662109375, 0.008617919921875, 0.008689855575561524, 0.00869331169128418, 0.008634143829345704, 0.008651295661926269, 0.008622079849243165, 0.0086179838180542, 0.008585344314575196, 0.008595552444458008, 0.008572128295898438, 0.008567359924316406, 0.008527968406677246, 0.008525983810424805, 0.008588095664978028, 0.008575743675231933, 0.008616127967834473, 0.00857692813873291, 0.00858675193786621, 0.008563296318054199, 0.0086179838180542, 0.008631839752197265, 0.008663519859313965, 0.00860979175567627, 0.008643967628479004, 0.00870032024383545, 0.008775103569030761, 0.008703104019165039, 0.008633088111877442, 0.00859763240814209, 0.008582176208496094, 0.00859216022491455, 0.008590304374694824, 0.008623711585998535, 0.008564767837524414, 0.008644672393798828, 0.00858348846435547, 0.00861184024810791, 0.0086364164352417, 0.008398847579956055, 0.00860159969329834, 0.008550111770629882, 0.008577312469482422, 0.008558367729187012, 0.008532256126403808, 0.009459648132324219, 0.00900924777984619, 0.008589119911193847, 0.00858448028564453, 0.00859382438659668, 0.008636832237243652, 0.008598784446716308, 0.008643424034118653, 0.008590880393981934, 0.008595135688781739, 0.008642784118652344, 0.008550880432128906, 0.008589311599731446, 0.008612095832824707, 0.00861353588104248, 0.008579520225524903, 0.008545184135437011, 0.008555264472961425, 0.00856287956237793, 0.00857596778869629, 0.0085696964263916, 0.008665056228637695, 0.008636608123779296, 0.00860979175567627, 0.008585056304931641, 0.008670495986938477, 0.00863920021057129, 0.00862822437286377, 0.008589311599731446, 0.008703680038452148, 0.008577343940734863, 0.00857907199859619, 0.008556096076965333, 0.008581503868103028, 0.008525888442993164, 0.008572064399719238, 0.008581983566284179, 0.00858521556854248, 0.008643808364868165, 0.008577759742736817, 0.00858937644958496, 0.008519359588623047, 0.00855686378479004, 0.00855395221710205, 0.00854032039642334, 0.008620736122131347, 0.00857260799407959, 0.008580767631530761, 0.00856611156463623, 0.008647808074951172, 0.008707967758178711, 0.00883670425415039, 0.00937548828125, 0.009038496017456054, 0.008652799606323243, 0.00865180778503418, 0.008678367614746094, 0.008376192092895508, 0.008861503601074218, 0.0086844482421875, 0.00865187168121338, 0.008612256050109863, 0.00864633560180664, 0.008617759704589844, 0.008598336219787598, 0.00860979175567627, 0.008586239814758301, 0.008668160438537598, 0.008673343658447265, 0.008638272285461426, 0.008638591766357421, 0.00876035213470459, 0.008713184356689453, 0.008666496276855468, 0.008595775604248047, 0.008659263610839844, 0.008584544181823731, 0.008516480445861816, 0.008599328041076661, 0.008568384170532227, 0.008576704025268554, 0.008551136016845703, 0.008550432205200196, 0.00856390380859375, 0.008579039573669433, 0.008586079597473144, 0.008553728103637696, 0.008575551986694336, 0.00854975986480713, 0.008563520431518555, 0.008598655700683593, 0.008561440467834473, 0.008597599983215331, 0.008559679985046387, 0.008812928199768066, 0.008642623901367188, 0.008591872215270996, 0.008665375709533692, 0.008644319534301758, 0.008584927558898926, 0.00858255958557129, 0.008607999801635742, 0.00854697608947754, 0.008582719802856445, 0.008526528358459472, 0.008595168113708496, 0.008566975593566895, 0.008633440017700195, 0.008639007568359376, 0.008658143997192383, 0.00862435245513916, 0.009238656044006349, 0.008744768142700195, 0.00947868824005127, 0.008918975830078125, 0.00959727954864502, 0.008677151679992676, 0.008705951690673829, 0.008650208473205566, 0.008610912322998047, 0.008331263542175293, 0.008650015830993653, 0.008618720054626465, 0.00861184024810791, 0.008541952133178711, 0.008634336471557617, 0.008565024375915527, 0.008632320404052735, 0.008566847801208496, 0.008578144073486327, 0.008632255554199218, 0.008622688293457031, 0.008595775604248047, 0.008675423622131348, 0.009055392265319824, 0.008632575988769531, 0.008565247535705567, 0.008553471565246582, 0.008535200119018555, 0.008619135856628417, 0.008602335929870605, 0.008598591804504395, 0.008813247680664063, 0.008614336013793945, 0.008636223793029784, 0.008753151893615722, 0.00876540756225586, 0.008884256362915038, 0.008919232368469239, 0.009077664375305175, 0.00897555160522461, 0.009048031806945802, 0.009080063819885255, 0.00912332820892334, 0.009152607917785644, 0.009193599700927734, 0.009163552284240723, 0.00924783992767334, 0.00936847972869873, 0.009426912307739259, 0.009250271797180175, 0.009281439781188965, 0.00919593620300293, 0.00928179168701172, 0.009256959915161133, 0.009115648269653321, 0.009066495895385742, 0.009183232307434081, 0.009204863548278808, 0.009538432121276856, 0.009218111991882324, 0.009287551879882812, 0.009266752243041992, 0.00923852825164795, 0.009458175659179687, 0.00924403190612793, 0.009149279594421387, 0.009277215957641602, 0.009307711601257325, 0.009150912284851074, 0.009005375862121582, 0.009182656288146972, 0.00945321559906006, 0.008957440376281739, 0.00918393611907959, 0.009283391952514649, 0.009277440071105958, 0.009441280364990234, 0.009510911941528321, 0.009209407806396484, 0.00912656021118164, 0.009257920265197754, 0.009130111694335937, 0.009119872093200684, 0.009112159729003906, 0.009276576042175293, 0.00936569595336914, 0.00922486400604248, 0.009441280364990234, 0.009332032203674316, 0.009093855857849121, 0.009148256301879883, 0.009136159896850586, 0.009307552337646484, 0.009525952339172363, 0.009195520401000976, 0.00930611228942871, 0.009289376258850098, 0.009145824432373047, 0.009357536315917969, 0.009282015800476074, 0.009104864120483398, 0.009025792121887206, 0.008968671798706055, 0.008922783851623536, 0.00899728012084961, 0.009027520179748534, 0.009347071647644043, 0.009263104438781738, 0.009173055648803712, 0.009201375961303711, 0.009210047721862792, 0.00917033576965332, 0.009439231872558594, 0.00917363166809082, 0.009197567939758301, 0.009117695808410644, 0.009013152122497559, 0.008933216094970704, 0.00906265640258789, 0.009258111953735352, 0.009268095970153808, 0.009051872253417968, 0.009119392395019531, 0.009267871856689452, 0.009391424179077148, 0.009222816467285157, 0.009138175964355469, 0.009032928466796875, 0.008899359703063965, 0.0089619197845459, 0.00901318359375, 0.009321760177612305, 0.009202143669128419, 0.009134528160095214, 0.009093119621276855, 0.009160479545593262, 0.009395808219909667, 0.00939414405822754, 0.00942956829071045, 0.009278656005859375, 0.009175968170166016, 0.009295167922973633, 0.009155263900756836, 0.009117088317871093, 0.009155167579650878, 0.009135807991027832, 0.008974080085754394, 0.009061216354370117, 0.009042719841003417, 0.009159616470336914, 0.009211903572082519, 0.009179231643676757, 0.009021344184875489, 0.00921126365661621, 0.009240192413330078, 0.009115839958190918, 0.009140704154968262, 0.009046367645263672, 0.009009152412414552, 0.009046015739440917, 0.00902284812927246, 0.009222911834716798, 0.00928054428100586, 0.009145024299621583, 0.008966303825378419, 0.008888544082641602, 0.009129055976867676, 0.009749183654785156, 0.009058303833007812, 0.009029055595397949, 0.008978464126586915, 0.009183775901794434, 0.009389408111572265, 0.009273664474487305, 0.009257311820983887, 0.009101375579833985, 0.009131967544555664, 0.009244640350341797, 0.009254688262939454, 0.00913987159729004, 0.009032447814941407, 0.009056096076965331, 0.009089280128479003, 0.009209440231323243, 0.009207839965820313, 0.009100416183471679, 0.009217023849487305, 0.009158656120300293, 0.009101311683654785, 0.009410847663879394, 0.00915129566192627, 0.009130016326904297, 0.009076704025268554, 0.009030495643615723, 0.00920800018310547, 0.009336704254150391, 0.009328831672668457, 0.009301823616027832, 0.009228351593017579, 0.009265151977539063, 0.00925228786468506, 0.009192000389099121, 0.00925875186920166, 0.009253120422363282, 0.009245856285095215, 0.009231040000915527, 0.009146528244018555, 0.009101311683654785, 0.009125568389892579, 0.009016863822937012, 0.009034527778625488, 0.008976351737976075, 0.009101344108581542, 0.009154175758361817, 0.009046208381652833, 0.008980223655700684, 0.00907526397705078, 0.009183103561401368, 0.00923852825164795, 0.009213983535766601, 0.009176128387451172, 0.009247648239135741, 0.009352224349975586, 0.00919215965270996, 0.009021023750305175, 0.00894428825378418, 0.008900863647460937, 0.008962112426757813, 0.009155584335327148, 0.009214655876159668, 0.009363615989685058, 0.009291616439819336, 0.00954918384552002, 0.009261695861816407, 0.009193632125854492, 0.009061504364013671, 0.009222816467285157, 0.009266528129577636, 0.009106143951416016, 0.009087072372436524, 0.009193375587463378, 0.009240575790405273, 0.009255135536193847, 0.009285280227661133, 0.009203840255737304, 0.009377792358398437, 0.009203712463378906, 0.009115455627441406, 0.009099136352539063, 0.009157024383544921, 0.009131551742553711, 0.009025919914245605, 0.009054207801818847, 0.009237919807434082, 0.009343839645385743, 0.009272704124450684, 0.009202048301696778, 0.009086976051330567, 0.008957951545715333, 0.00889583969116211, 0.008957951545715333, 0.00865328025817871, 0.009000960350036622, 0.00920576000213623, 0.009158143997192383, 0.009122271537780762, 0.008998944282531738, 0.009025535583496093, 0.009082015991210937, 0.009222208023071289, 0.008996831893920898, 0.009244768142700196, 0.009177824020385743, 0.009379839897155762, 0.009652223587036133, 0.009451519966125489, 0.009928704261779785, 0.009515263557434083, 0.009676223754882812, 0.00989568042755127, 0.009249183654785156, 0.009289024353027343, 0.00912179183959961, 0.009040736198425292, 0.008902655601501466, 0.008978464126586915, 0.008986207962036133, 0.009152671813964845, 0.00909670352935791, 0.009022175788879394, 0.008988096237182617, 0.009011775970458984, 0.009184288024902345, 0.009108448028564453, 0.009076800346374512, 0.009074624061584474, 0.008972064018249511, 0.00899839973449707, 0.008954591751098633, 0.009089216232299805, 0.00917689609527588, 0.00916703987121582, 0.009197183609008789, 0.009223936080932617, 0.009053855895996095, 0.0090283842086792, 0.008986528396606446, 0.008984031677246094, 0.009357760429382325, 0.00900268840789795, 0.009019519805908203, 0.008945343971252441, 0.009064352035522461, 0.009132896423339844, 0.00920195198059082, 0.009293760299682617, 0.009364831924438476, 0.009369983673095703, 0.009215999603271484, 0.009082176208496094, 0.00906719970703125, 0.009209856033325196, 0.009176480293273925, 0.009278047561645507, 0.009110048294067382, 0.009154335975646972, 0.008954079627990722, 0.008859647750854491, 0.0088722562789917, 0.009008831977844238, 0.009043264389038085, 0.008849920272827149, 0.008709792137145997, 0.00891977596282959, 0.009028639793395996, 0.00902019214630127, 0.009110527992248535, 0.009101344108581542, 0.008963040351867676, 0.008795807838439942, 0.008730976104736329, 0.008736767768859864, 0.008832032203674317, 0.008887264251708984, 0.008779104232788087, 0.00872105598449707, 0.009076064109802245, 0.00913475227355957, 0.008800224304199218, 0.008710176467895507, 0.008859647750854491, 0.008763392448425293, 0.008652480125427246, 0.008743231773376464, 0.008552063941955566, 0.008569215774536133, 0.008748448371887207, 0.009091839790344239, 0.009438464164733887, 0.009295583724975586, 0.009354111671447753, 0.009311264038085937, 0.009268192291259766, 0.009252896308898926, 0.009084511756896972, 0.008884639739990234, 0.008939488410949707, 0.008987775802612305, 0.008991583824157715, 0.00886406421661377, 0.008859359741210937, 0.008824288368225098, 0.008978976249694823, 0.009265151977539063, 0.00910147190093994, 0.009314144134521484, 0.008973983764648438, 0.00887782382965088, 0.008770367622375489, 0.008809696197509766, 0.008917280197143555, 0.00906668758392334, 0.009216095924377441, 0.009072256088256837, 0.008888704299926758, 0.008962047576904298, 0.00910540771484375, 0.010554143905639649, 0.009851903915405273, 0.009129983901977539, 0.009019455909729003, 0.00912384033203125, 0.00912172794342041, 0.0090600004196167, 0.009004639625549317, 0.008944160461425781, 0.008859999656677246, 0.008738592147827148, 0.008697152137756348, 0.008683520317077637, 0.00878889560699463, 0.009366815567016601, 0.00941868782043457, 0.009364128112792969, 0.009420255661010742, 0.00931884765625, 0.009324511528015137, 0.009162303924560546, 0.009157183647155762, 0.009164959907531738, 0.009101152420043945, 0.009074048042297363, 0.009115519523620606, 0.008908576011657714, 0.008886431694030762, 0.00897862434387207, 0.009382399559020996, 0.009109791755676269, 0.008984416007995606, 0.008739904403686523, 0.008769536018371582, 0.008963007926940918, 0.009279616355895996, 0.009310432434082031, 0.009150112152099609, 0.009015104293823243, 0.009081024169921875, 0.009048064231872559, 0.008955904006958008, 0.00885865592956543, 0.008927424430847168, 0.008905856132507325, 0.008831999778747558, 0.00881935977935791, 0.009033727645874023, 0.008990495681762696, 0.00884115219116211, 0.00880668830871582, 0.008892640113830567, 0.008928863525390626, 0.008893792152404784, 0.00889737606048584, 0.008950816154479981, 0.00897532844543457, 0.008843263626098634, 0.008828448295593263, 0.008809056282043457, 0.008826751708984375, 0.008843263626098634, 0.009054400444030762, 0.009355775833129883, 0.009312735557556153, 0.00940236759185791, 0.009293824195861817, 0.009313568115234375, 0.009159296035766602, 0.009201760292053223, 0.00903551959991455, 0.009084671974182128, 0.00912656021118164, 0.008980511665344238, 0.009082688331604003, 0.008867456436157226, 0.008968576431274414, 0.009215935707092286, 0.009174400329589844, 0.00917363166809082, 0.009135199546813964, 0.009079775810241698, 0.009207807540893554, 0.009236543655395509, 0.009082816123962402, 0.008957311630249024, 0.008953503608703613, 0.009012191772460938, 0.008970239639282226, 0.009089088439941407, 0.009048031806945802, 0.009180928230285644, 0.009164992332458497, 0.009037535667419433, 0.009168607711791992, 0.00924947166442871, 0.009013152122497559, 0.00902137565612793, 0.00910547161102295, 0.009072287559509278, 0.008960576057434081, 0.009297663688659669, 0.009059647560119628, 0.008889056205749512, 0.008798208236694336, 0.008900351524353027, 0.009261311531066895, 0.009397567749023438, 0.00934291172027588, 0.009399040222167969, 0.009309503555297851, 0.009204128265380859, 0.009281760215759277, 0.009306367874145507, 0.009166432380676269, 0.009150752067565918, 0.00903264045715332, 0.008954879760742187, 0.00898252773284912, 0.008956064224243164, 0.008824671745300293, 0.00917244815826416, 0.009213824272155762, 0.009077216148376465, 0.008984319686889649, 0.008974944114685059]",tokens/s,111.23494978293853,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.320512,14274.199552,0.0,13878.951936,13865.632768,s,1,7.65110205078125,7.65110205078125,0.0,7.65110205078125,7.65110205078125,7.65110205078125,7.65110205078125,[7.65110205078125],,kWh,1.2706350270832445e-05,1.3818572771536117e-06,5.095281853995104e-06,1.9183489401981163e-05,,MB,1148.06784,14697.824256,0.0,14289.993728,14237.628416,s,10,1.8708953247070312,0.18708953247070312,0.005850038407180689,0.1897191162109375,0.1903833023071289,0.1907439353942871,0.19103244186401366,"[0.18225616455078125, 0.18965721130371094, 0.19030316162109376, 0.1911045684814453, 0.1879140167236328, 0.1710326385498047, 0.18994551086425782, 0.18882829284667968, 0.19007273864746094, 0.18978102111816406]",tokens/s,1368.328824276087,kWh,5.581673963128834e-06,6.1531401767901e-07,3.7278593764151706e-06,9.924847357223015e-06,tokens/kWh,25793847.581312235,MB,1165.975552,14865.596416,0.0,14457.765888,14415.235584,s,10,38.91846997070313,3.8918469970703127,0.00808245757850383,3.8932133789062497,3.9007560302734374,3.9016928344726565,3.9024422778320313,"[3.875691162109375, 3.883259033203125, 3.88573486328125, 3.88935595703125, 3.8903212890625, 3.89610546875, 3.9005478515625, 3.8968857421875, 3.89793896484375, 3.902629638671875]",tokens/s,16.187686732655436,kWh,0.0001137893467947874,1.2551477776801096e-05,7.54338983973839e-05,0.00020177472296897236,tokens/kWh,312229.3965914043,,s,630,38.91394614791871,0.06176816848875984,0.0006070965176817643,0.06171547126770019,0.062132289123535155,0.06224803886413574,0.0650584912109375,"[0.0646937255859375, 0.0624031982421875, 0.06150543975830078, 0.06130284881591797, 0.06101833724975586, 0.06089932632446289, 0.06093407821655274, 0.060762016296386716, 0.060932254791259764, 0.06090502548217774, 0.06112303924560547, 0.061316287994384766, 0.061047584533691406, 0.061317119598388675, 0.06105452728271484, 0.0614504623413086, 0.061891937255859376, 0.06169484710693359, 0.061427295684814455, 0.06129296112060547, 0.06113846588134766, 0.061043167114257814, 0.06099148941040039, 0.061329086303710936, 0.061448577880859376, 0.061362110137939456, 0.061216766357421876, 0.06121881484985352, 0.061271617889404294, 0.06127228927612305, 0.061626625061035153, 0.061781566619873045, 0.061843902587890624, 0.06176124954223633, 0.061617919921875, 0.06153993606567383, 0.061585662841796875, 0.06148982238769531, 0.061190174102783206, 0.06135804748535156, 0.06122659301757812, 0.06129296112060547, 0.06137855911254883, 0.06160793685913086, 0.06156425476074219, 0.06146524810791015, 0.06177177429199219, 0.06187011337280274, 0.06203577423095703, 0.06179759979248047, 0.06181369781494141, 0.06175324630737305, 0.061698143005371096, 0.06172652816772461, 0.061793472290039064, 0.06177862548828125, 0.0614956169128418, 0.06142483139038086, 0.06145516967773437, 0.06183116912841797, 0.061687808990478515, 0.06188851165771484, 0.06187007904052735, 0.06510774230957031, 0.06277423858642578, 0.061744895935058594, 0.061610240936279294, 0.06134991836547852, 0.061220832824707035, 0.061061023712158206, 0.06116470336914062, 0.061149921417236325, 0.0611596794128418, 0.06125337600708008, 0.06122723388671875, 0.06121267318725586, 0.061292545318603515, 0.061720577239990235, 0.061767711639404296, 0.06203744125366211, 0.061886398315429685, 0.061567264556884764, 0.06138857650756836, 0.06137459182739258, 0.06119260787963867, 0.06127206420898437, 0.061590816497802736, 0.06147760009765625, 0.061501502990722656, 0.06129452896118164, 0.06161407852172852, 0.06150348663330078, 0.061558719635009765, 0.06174319839477539, 0.06176559829711914, 0.06188982391357422, 0.061950687408447266, 0.06192947387695313, 0.061712383270263675, 0.06147404861450195, 0.06155945587158203, 0.06144979095458984, 0.06134592056274414, 0.06136259078979492, 0.06136627197265625, 0.06128643035888672, 0.061505569458007815, 0.06163654327392578, 0.06184550476074219, 0.06185292816162109, 0.06183603286743164, 0.06222784042358399, 0.06204687881469727, 0.061889793395996096, 0.06170492935180664, 0.061753345489501954, 0.06161203384399414, 0.061677566528320314, 0.061529407501220705, 0.0614714241027832, 0.06137638473510742, 0.06146879959106445, 0.0614257926940918, 0.06161743927001953, 0.06160240173339844, 0.06188851165771484, 0.06509616088867187, 0.06276697540283203, 0.06175139236450195, 0.06156288146972656, 0.0611759033203125, 0.06106240081787109, 0.06111708831787109, 0.06107340621948242, 0.061431678771972656, 0.06127833557128906, 0.06123110580444336, 0.06137651062011719, 0.06121638488769531, 0.06130080032348633, 0.06149766540527344, 0.06189056015014648, 0.06209331130981445, 0.062064640045166014, 0.06182092666625977, 0.06153955078125, 0.06138044738769531, 0.061295520782470705, 0.06125116729736328, 0.06123772811889648, 0.06133689498901367, 0.061663936614990235, 0.06138252639770508, 0.06142092895507813, 0.061534942626953124, 0.0615813102722168, 0.061859489440917965, 0.06203631973266602, 0.06212796783447266, 0.062120094299316406, 0.06199705505371094, 0.061730846405029294, 0.06166739273071289, 0.06151366424560547, 0.061378528594970706, 0.061402816772460934, 0.0615140495300293, 0.061306880950927733, 0.06150348663330078, 0.06187129592895508, 0.061487934112548825, 0.0616673583984375, 0.06174512100219726, 0.061906623840332034, 0.06183353424072266, 0.06207897567749023, 0.062037696838378904, 0.06203334426879883, 0.06194793701171875, 0.06167023849487305, 0.06170009613037109, 0.061437950134277344, 0.061423614501953126, 0.06133145523071289, 0.06156256103515625, 0.061663551330566405, 0.06152761459350586, 0.06175993728637695, 0.06204787063598633, 0.06505612945556641, 0.06332454299926758, 0.06217452621459961, 0.06167644882202149, 0.061261089324951175, 0.061147361755371096, 0.0611835823059082, 0.06109686279296875, 0.0611693115234375, 0.061326847076416016, 0.06121353530883789, 0.06126300811767578, 0.06133615875244141, 0.0612059211730957, 0.06143475341796875, 0.06171852874755859, 0.06199251174926758, 0.0618639030456543, 0.06191558456420899, 0.06158950424194336, 0.06133935928344727, 0.061484512329101564, 0.061367103576660156, 0.06151919937133789, 0.0614304313659668, 0.061609375, 0.06133411026000977, 0.06133145523071289, 0.061483009338378906, 0.06155059051513672, 0.06176496124267578, 0.06196284866333008, 0.062182945251464845, 0.06204678344726562, 0.06185776138305664, 0.061685760498046874, 0.061878273010253906, 0.06148198318481445, 0.061484031677246094, 0.061603839874267576, 0.061506847381591796, 0.06176227188110352, 0.06177791976928711, 0.06157894515991211, 0.061532478332519534, 0.06181820678710938, 0.0621308479309082, 0.061955329895019534, 0.06190361785888672, 0.061900798797607424, 0.06195337677001953, 0.06208784103393555, 0.06187417602539062, 0.061624126434326174, 0.06173510360717773, 0.0617347526550293, 0.06185385513305664, 0.06156902313232422, 0.06182297515869141, 0.06172585678100586, 0.06195644760131836, 0.061896766662597656, 0.061868606567382814, 0.0655218276977539, 0.06336307144165039, 0.06227478408813476, 0.06163286590576172, 0.0613092155456543, 0.06114284896850586, 0.06121088027954102, 0.0612391357421875, 0.06120265579223633, 0.06114300918579101, 0.061206558227539065, 0.06127926254272461, 0.06138159942626953, 0.0613438720703125, 0.06142310333251953, 0.061721057891845704, 0.061926559448242186, 0.06205724716186523, 0.06176969528198242, 0.06167728042602539, 0.06167372894287109, 0.061560863494873046, 0.06152550506591797, 0.0614155502319336, 0.061478816986083984, 0.06144825744628906, 0.061278656005859376, 0.06166291046142578, 0.06173519897460938, 0.06177328109741211, 0.06163711929321289, 0.061716510772705076, 0.06216025543212891, 0.06213286590576172, 0.061857505798339846, 0.06177536010742188, 0.06148988723754883, 0.061619297027587894, 0.06175638580322266, 0.061726753234863284, 0.06156259155273437, 0.061407489776611326, 0.06160521697998047, 0.061639358520507816, 0.06166934585571289, 0.06188652801513672, 0.06169724655151367, 0.061905311584472655, 0.062093631744384765, 0.062029407501220706, 0.06221865463256836, 0.0619683837890625, 0.061712383270263675, 0.061652992248535154, 0.061693950653076174, 0.061654209136962894, 0.06165151977539062, 0.06158975982666016, 0.0615813102722168, 0.06171443176269531, 0.06167552185058594, 0.06195814514160156, 0.062133758544921876, 0.06830694580078125, 0.0645693130493164, 0.06316835021972657, 0.06209145736694336, 0.061423583984375, 0.06153548812866211, 0.061290271759033205, 0.06118048095703125, 0.06107791900634765, 0.06109584045410156, 0.06122063827514648, 0.0612825927734375, 0.06115740966796875, 0.06109596633911133, 0.06120640182495117, 0.06130905532836914, 0.06159561538696289, 0.06180422210693359, 0.06207110214233398, 0.061886463165283206, 0.061992961883544924, 0.061652992248535154, 0.061521919250488284, 0.061392383575439455, 0.06133145523071289, 0.06146297454833984, 0.061289886474609374, 0.06158748626708985, 0.06156895828247071, 0.06157587051391602, 0.06155059051513672, 0.06150457763671875, 0.061844287872314455, 0.06183747100830078, 0.06189456176757813, 0.06199699020385742, 0.06207104110717773, 0.06223795318603516, 0.06207302474975586, 0.06178246307373047, 0.06176678466796875, 0.061535102844238285, 0.06157721710205078, 0.06175859069824219, 0.06150377655029297, 0.06151023864746094, 0.06165462493896484, 0.06170399856567383, 0.06185539245605469, 0.0618935661315918, 0.06198028945922852, 0.06188278579711914, 0.06190217590332031, 0.062021728515625, 0.06196688079833984, 0.06186393737792969, 0.061790271759033205, 0.06172256088256836, 0.06181798553466797, 0.06159193420410156, 0.06190950393676758, 0.06188556671142578, 0.061960639953613283, 0.06541251373291015, 0.0634823989868164, 0.062201343536376956, 0.06179894256591797, 0.061388671875, 0.061294689178466796, 0.061347007751464844, 0.061366401672363284, 0.06124771118164062, 0.06134835052490235, 0.06133103942871094, 0.06141584014892578, 0.061389984130859374, 0.06152019119262695, 0.06146310424804687, 0.06182601547241211, 0.062241790771484375, 0.062211872100830075, 0.06215497589111328, 0.062015487670898435, 0.06179779052734375, 0.061751903533935545, 0.061620223999023435, 0.06160793685913086, 0.06150688171386719, 0.061534912109375, 0.06154367828369141, 0.06159846496582031, 0.06186598587036133, 0.06181071853637695, 0.06178531265258789, 0.06195657730102539, 0.06191132736206055, 0.062061790466308595, 0.062350112915039065, 0.062367198944091796, 0.06218310546875, 0.062139232635498046, 0.061943809509277345, 0.061876224517822265, 0.061830623626708985, 0.06166329574584961, 0.06167599868774414, 0.061599552154541014, 0.06172668838500977, 0.06194095993041992, 0.061848575592041016, 0.06193766403198242, 0.061992961883544924, 0.062297470092773435, 0.06215948867797851, 0.062238719940185545, 0.06226943969726562, 0.06216195297241211, 0.06194659042358398, 0.06189491271972656, 0.06185968017578125, 0.06187760162353516, 0.06176851272583008, 0.061982719421386716, 0.0619315185546875, 0.06185929489135742, 0.06189315032958984, 0.06505945587158203, 0.06316239929199219, 0.062101566314697265, 0.06172662353515625, 0.06140111923217773, 0.06124550247192383, 0.06145180892944336, 0.06125196838378906, 0.06123724746704102, 0.0612782096862793, 0.06137187194824219, 0.06158185577392578, 0.061417312622070314, 0.06146047973632812, 0.06167932891845703, 0.06182489776611328, 0.06208774566650391, 0.06213017654418945, 0.06203340911865234, 0.06185420989990234, 0.061788257598876954, 0.061671329498291017, 0.06145014572143555, 0.061464672088623044, 0.06147020721435547, 0.06148966217041016, 0.061618175506591794, 0.06159564971923828, 0.061661182403564455, 0.06162428665161133, 0.061767711639404296, 0.062013214111328124, 0.06209312057495117, 0.06228329467773437, 0.062233470916748045, 0.062121440887451175, 0.06218163299560547, 0.06223046493530274, 0.06191142272949219, 0.06166425704956055, 0.06168425750732422, 0.061585887908935544, 0.061599712371826175, 0.06177507019042969, 0.061788959503173826, 0.06189814376831055, 0.0620645751953125, 0.06196207809448242, 0.06231532669067383, 0.06198067092895508, 0.06194697570800781, 0.06189503860473633, 0.06200579071044922, 0.06195395278930664, 0.06176729583740234, 0.061645278930664064, 0.061656929016113284, 0.06188460922241211, 0.061954017639160155, 0.061859264373779296, 0.06188908767700195, 0.06175539016723633, 0.06185692977905274, 0.06502175903320312, 0.06303590393066406, 0.06203097534179688, 0.06165532684326172, 0.061409950256347656, 0.06140927886962891, 0.06127519989013672, 0.061294689178466796, 0.06141219329833984, 0.06123724746704102, 0.06121596908569336, 0.06140393447875977, 0.061568832397460936, 0.061367809295654295, 0.06149571228027344, 0.061773727416992184, 0.06188275146484375, 0.061960193634033205, 0.0621033935546875, 0.06176729583740234, 0.061739585876464845, 0.061513023376464845, 0.06144483184814453, 0.061633758544921875, 0.06157385635375977, 0.06157929611206055, 0.06166278457641602, 0.06167907333374024, 0.06169708633422852, 0.06174835205078125, 0.06191347122192383, 0.06192985534667969, 0.062205951690673826, 0.06210153579711914, 0.06236550521850586, 0.06216447830200195, 0.06217504119873047, 0.06208393478393555, 0.06184489440917969, 0.0616671028137207, 0.06169068908691406, 0.06190460968017578, 0.06176943969726562, 0.06164332962036133, 0.06189404678344727, 0.06177580642700195, 0.06177654266357422, 0.061833248138427735, 0.061976577758789064, 0.062210079193115234, 0.06241068649291992, 0.06221823883056641, 0.06214246368408203, 0.062064414978027345, 0.06204191970825195, 0.062126270294189455, 0.06188054275512695, 0.06177526473999023, 0.06185219192504883, 0.06188243103027344, 0.061900798797607424, 0.061753345489501954, 0.06190697479248047, 0.06509228515625, 0.06318505477905273, 0.06206038284301758, 0.061625919342041015, 0.06138719940185547, 0.06134783935546875, 0.061357471466064455, 0.06155939102172851, 0.06150559997558594, 0.06150454330444336, 0.06197481536865234, 0.061659870147705076, 0.06163600158691406, 0.06144607925415039, 0.06168812942504883, 0.062085407257080075, 0.06222230529785156, 0.06223257446289063, 0.061956127166748046, 0.06175900650024414, 0.06163689422607422, 0.06142316818237305, 0.061534046173095706, 0.06144073486328125, 0.06175727844238281, 0.061724864959716796, 0.061876224517822265, 0.061642784118652344, 0.061873950958251954, 0.06172691345214844, 0.06201958465576172, 0.06207692718505859, 0.06225920104980469, 0.062132225036621094, 0.06217692947387695, 0.06212361526489258, 0.06193436813354492, 0.06182601547241211, 0.06174591827392578, 0.0618658561706543, 0.06192371368408203, 0.06180252838134766, 0.06187731170654297, 0.06172288131713867, 0.06205305480957031, 0.06197244644165039, 0.062004737854003907, 0.06209926223754883, 0.061902942657470705, 0.062024288177490235, 0.06212326431274414, 0.06221481704711914, 0.06225315093994141, 0.06196559906005859, 0.06208585739135742, 0.06211756896972656, 0.06221196746826172, 0.06199065780639648, 0.062065376281738284, 0.061760608673095706, 0.061784927368164065, 0.06200252914428711, 0.062021728515625]",tokens/s,16.1895685830797,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,868.409344,2698.903552,0.0,2296.38144,2202.20672,s,1,7.656544921875,7.656544921875,0.0,7.656544921875,7.656544921875,7.656544921875,7.656544921875,[7.656544921875],,kWh,5.139401458313841e-06,5.58541053539125e-07,1.8900015120032476e-06,7.587944023856214e-06,,MB,1317.31456,2797.469696,0.0,2382.364672,2267.889152,s,10,0.28861289596557615,0.028861289596557614,0.0007781269847646284,0.028741279602050782,0.029324178504943847,0.030168537044525146,0.030844023876190188,"[0.031012895584106446, 0.028752607345581056, 0.028864320755004884, 0.028159584045410156, 0.028757568359375, 0.02870697593688965, 0.028110208511352538, 0.029136543273925782, 0.028382240295410158, 0.028729951858520508]",tokens/s,8870.01251775437,kWh,1.0145201222510147e-06,1.1181723871595634e-07,6.697950960207716e-07,1.7961324569877427e-06,tokens/kWh,142528463.8691583,MB,1360.982016,2797.469696,0.0,2382.364672,2267.891712,s,10,13.476153686523439,1.347615368652344,0.0046218266946442465,1.3484338989257814,1.3522509887695313,1.3524691223144532,1.3526436291503907,"[1.3391866455078125, 1.3401832275390626, 1.3451014404296875, 1.3522025146484375, 1.3511488037109376, 1.3517061767578125, 1.347132080078125, 1.3497357177734375, 1.34706982421875, 1.352687255859375]",tokens/s,46.749244232055545,kWh,3.8588299465665336e-05,4.255927136703192e-06,2.2988418274979276e-05,6.58326448773478e-05,tokens/kWh,956972.0329082133,,s,630,13.469758226394665,0.021380568613324845,0.0002991875232721857,0.02133391952514648,0.02160470008850098,0.02180513353347778,0.02238969120025635,"[0.02124595260620117, 0.021265567779541014, 0.021440992355346678, 0.021307008743286133, 0.021295360565185547, 0.021301759719848632, 0.021322784423828126, 0.021210079193115235, 0.021321727752685548, 0.02128486442565918, 0.02131715202331543, 0.021258079528808593, 0.02127872085571289, 0.021115392684936524, 0.021280096054077147, 0.02108700752258301, 0.021305599212646485, 0.02116377639770508, 0.021196800231933592, 0.021239839553833007, 0.021239776611328125, 0.021609952926635742, 0.02116646385192871, 0.021125280380249023, 0.02127791976928711, 0.02110736083984375, 0.021127296447753907, 0.021130752563476563, 0.02176051139831543, 0.020934528350830078, 0.021119359970092774, 0.021093215942382813, 0.021011360168457033, 0.021087743759155272, 0.021117439270019533, 0.022132383346557618, 0.021074272155761718, 0.02127052879333496, 0.021131296157836914, 0.02122300720214844, 0.021156095504760743, 0.021155967712402343, 0.021128320693969728, 0.02119968032836914, 0.021071935653686525, 0.021209087371826172, 0.02113961601257324, 0.02110038375854492, 0.021344255447387696, 0.021366783142089844, 0.02123788833618164, 0.021378143310546875, 0.02142902374267578, 0.021405696868896484, 0.021120159149169922, 0.021308256149291993, 0.02129305648803711, 0.021192415237426758, 0.021270816802978515, 0.02127574348449707, 0.021228063583374025, 0.02122534370422363, 0.02132428741455078, 0.021297056198120116, 0.021261695861816407, 0.021336799621582032, 0.021356224060058594, 0.021454368591308594, 0.021260992050170898, 0.02119808006286621, 0.021111808776855468, 0.02108345603942871, 0.021371423721313478, 0.021184511184692383, 0.021278688430786133, 0.02120809555053711, 0.021134336471557616, 0.02107107162475586, 0.0212139835357666, 0.021127168655395507, 0.02120243263244629, 0.021050111770629883, 0.021071712493896486, 0.020981664657592772, 0.0210468807220459, 0.0209719352722168, 0.02109667205810547, 0.021089696884155275, 0.021268896102905274, 0.021145599365234375, 0.02115171241760254, 0.021247871398925783, 0.021338239669799804, 0.021346303939819337, 0.020971519470214844, 0.021224544525146483, 0.021167007446289063, 0.02123311996459961, 0.021195295333862305, 0.02101862335205078, 0.021161983489990235, 0.02122457695007324, 0.021131168365478514, 0.02108720016479492, 0.021088512420654296, 0.021185792922973633, 0.02114726448059082, 0.021130271911621094, 0.021020416259765626, 0.021063072204589844, 0.02120569610595703, 0.021211135864257814, 0.02124185562133789, 0.021335391998291015, 0.022399648666381836, 0.021379072189331053, 0.021282783508300783, 0.02125008010864258, 0.021200672149658203, 0.02122332763671875, 0.022591327667236327, 0.021483423233032227, 0.022020767211914063, 0.021483455657958984, 0.021828895568847657, 0.021405376434326173, 0.021386240005493166, 0.021612255096435547, 0.02126367950439453, 0.021164064407348634, 0.021242528915405273, 0.021847776412963867, 0.021192991256713867, 0.021583871841430666, 0.021251071929931642, 0.021166528701782227, 0.021236288070678712, 0.021261791229248046, 0.021185056686401367, 0.021178112030029297, 0.021313407897949218, 0.021126623153686522, 0.02127676773071289, 0.02134099197387695, 0.02110684776306152, 0.021038944244384766, 0.020981760025024415, 0.02107494354248047, 0.02107084846496582, 0.021160127639770508, 0.021243711471557618, 0.021421632766723632, 0.021231327056884765, 0.02158403205871582, 0.021172159194946288, 0.02112499237060547, 0.021269279479980467, 0.021311456680297852, 0.021352447509765626, 0.021135360717773437, 0.021202720642089844, 0.021125600814819335, 0.021122047424316406, 0.02114384078979492, 0.021139936447143556, 0.021128480911254882, 0.02137740707397461, 0.0212174072265625, 0.021048959732055665, 0.021224031448364256, 0.021241792678833006, 0.02139459228515625, 0.02138153648376465, 0.022120960235595705, 0.023981536865234375, 0.021271072387695312, 0.02107366371154785, 0.021149311065673828, 0.021291839599609376, 0.02122528076171875, 0.02176335906982422, 0.021334815979003906, 0.02191961669921875, 0.022067487716674803, 0.021358272552490235, 0.021569631576538087, 0.02128281593322754, 0.02114579200744629, 0.021265216827392578, 0.02100105667114258, 0.0214116153717041, 0.021456895828247072, 0.021192928314208985, 0.02234332847595215, 0.021403648376464843, 0.021326400756835937, 0.02143824005126953, 0.021220991134643555, 0.022055295944213866, 0.02125004768371582, 0.021383167266845703, 0.021325727462768555, 0.021413984298706053, 0.021425344467163085, 0.02182022476196289, 0.022253568649291993, 0.021752864837646484, 0.02123369598388672, 0.021340864181518555, 0.02111529541015625, 0.02112905693054199, 0.02125209617614746, 0.021565055847167967, 0.02117056083679199, 0.021293119430541994, 0.021446016311645506, 0.021330495834350587, 0.021304351806640625, 0.0212608642578125, 0.021240224838256837, 0.021331968307495116, 0.021313535690307618, 0.021318880081176758, 0.02144755172729492, 0.022734848022460938, 0.02160185623168945, 0.021567935943603515, 0.02142812728881836, 0.021534912109375, 0.021496959686279297, 0.021242559432983397, 0.02143235206604004, 0.021407039642333984, 0.021451200485229492, 0.021566944122314455, 0.02154070472717285, 0.021547935485839845, 0.02150809669494629, 0.021315744400024414, 0.02137055969238281, 0.021463136672973632, 0.021338176727294923, 0.02145484733581543, 0.021456031799316405, 0.021338207244873047, 0.02147609519958496, 0.02147123146057129, 0.021493408203125, 0.021542591094970705, 0.02137766456604004, 0.021538463592529297, 0.021282623291015625, 0.02141360092163086, 0.021393440246582032, 0.021360639572143555, 0.02140943908691406, 0.021223455429077148, 0.021375520706176758, 0.021384000778198242, 0.021384159088134767, 0.021417919158935546, 0.021407808303833008, 0.02139129638671875, 0.021753087997436523, 0.021467071533203125, 0.021381248474121095, 0.021248224258422852, 0.02142220878601074, 0.021617055892944336, 0.021319936752319336, 0.02123923110961914, 0.021610815048217772, 0.02149580764770508, 0.021420032501220702, 0.021419551849365233, 0.02138115119934082, 0.021392063140869142, 0.021544704437255858, 0.02138051223754883, 0.02145894432067871, 0.021574464797973633, 0.021333791732788085, 0.021251552581787108, 0.021283359527587892, 0.02122319984436035, 0.021219551086425783, 0.021151168823242188, 0.02145747184753418, 0.021553152084350585, 0.021942272186279296, 0.0222740478515625, 0.021149696350097655, 0.021166080474853514, 0.021208383560180663, 0.02121404838562012, 0.02122502326965332, 0.021377567291259766, 0.021405439376831054, 0.02110198402404785, 0.021043615341186525, 0.021221343994140624, 0.021045183181762694, 0.021102880477905272, 0.021375072479248046, 0.02123971176147461, 0.021172224044799806, 0.02178767967224121, 0.024714080810546876, 0.02145907211303711, 0.021368831634521485, 0.021405696868896484, 0.02143836784362793, 0.02174492835998535, 0.0213819522857666, 0.021188255310058593, 0.021637247085571288, 0.021524063110351564, 0.021373472213745116, 0.021337984085083007, 0.021249216079711915, 0.021589056015014648, 0.021640640258789062, 0.02145702362060547, 0.021465280532836913, 0.021481472015380858, 0.021310495376586913, 0.021519071578979494, 0.021529983520507813, 0.021399551391601563, 0.021572032928466798, 0.021410240173339843, 0.021534719467163087, 0.021407936096191408, 0.02200761604309082, 0.02142617607116699, 0.0218787841796875, 0.02161859130859375, 0.02144470405578613, 0.021460416793823243, 0.021491296768188478, 0.021330080032348632, 0.021314207077026366, 0.021481632232666015, 0.021313535690307618, 0.021375200271606446, 0.02136787223815918, 0.02129804801940918, 0.021278783798217772, 0.021557024002075195, 0.021325824737548828, 0.021325824737548828, 0.02150399971008301, 0.021529855728149413, 0.021448448181152345, 0.021368928909301758, 0.021412384033203124, 0.02139379119873047, 0.021497119903564454, 0.021340896606445312, 0.021426015853881836, 0.021479583740234374, 0.021340160369873046, 0.021370880126953123, 0.021587711334228515, 0.021250303268432618, 0.021559295654296876, 0.021369056701660158, 0.021327007293701173, 0.021327903747558594, 0.0214370231628418, 0.021218496322631834, 0.021371711730957033, 0.021391359329223633, 0.021606399536132814, 0.021529760360717774, 0.021528863906860353, 0.02149446487426758, 0.02123161506652832, 0.0209998722076416, 0.02142064094543457, 0.02117737579345703, 0.02148601531982422, 0.02143667221069336, 0.021309440612792968, 0.02151219177246094, 0.021451967239379883, 0.021527200698852538, 0.021716480255126954, 0.02134806442260742, 0.02145526313781738, 0.02155516815185547, 0.021400127410888672, 0.02147123146057129, 0.02137494468688965, 0.02149295997619629, 0.021418495178222655, 0.021364896774291993, 0.021614559173583986, 0.021208831787109375, 0.0212576961517334, 0.021294048309326172, 0.021166080474853514, 0.02123980712890625, 0.021204992294311522, 0.021263711929321288, 0.021363359451293945, 0.021196863174438477, 0.021460927963256837, 0.02179020881652832, 0.021457088470458983, 0.02140127944946289, 0.021190975189208986, 0.021383743286132812, 0.021354015350341798, 0.02113968086242676, 0.021171455383300782, 0.02158595275878906, 0.021306079864501955, 0.021196800231933592, 0.021230943679809572, 0.021360736846923828, 0.021235456466674806, 0.02119705581665039, 0.021365312576293944, 0.02138051223754883, 0.02155196762084961, 0.021742528915405273, 0.021703487396240236, 0.02139686393737793, 0.021336191177368163, 0.02137174415588379, 0.021298143386840822, 0.02122127914428711, 0.021373727798461913, 0.021397279739379882, 0.0213055362701416, 0.021231647491455077, 0.021317632675170898, 0.021348127365112303, 0.021507328033447265, 0.021459039688110353, 0.02136134338378906, 0.02149580764770508, 0.021573631286621094, 0.02161359977722168, 0.021396543502807615, 0.021325279235839843, 0.021428672790527344, 0.021383167266845703, 0.02127872085571289, 0.021397504806518555, 0.021587327957153322, 0.021621376037597655, 0.021606399536132814, 0.02155958366394043, 0.02130646324157715, 0.021381824493408204, 0.02119696044921875, 0.02141756820678711, 0.021242048263549803, 0.021203136444091795, 0.021191999435424803, 0.02161305618286133, 0.02149184036254883, 0.021204864501953125, 0.021213184356689452, 0.021270015716552734, 0.021281375885009765, 0.02106982421875, 0.021686176300048828, 0.021178016662597655, 0.021117311477661133, 0.021165952682495118, 0.021203264236450196, 0.02122300720214844, 0.021237024307250975, 0.02123664093017578, 0.02137820816040039, 0.02163999938964844, 0.021513376235961914, 0.02247769546508789, 0.02188198471069336, 0.021373823165893556, 0.02165465545654297, 0.02129155158996582, 0.021348672866821287, 0.022224031448364257, 0.0214147834777832, 0.02128678321838379, 0.02139468765258789, 0.02115878486633301, 0.021417280197143555, 0.02115238380432129, 0.02111417579650879, 0.021363456726074218, 0.021360639572143555, 0.02157948875427246, 0.02134454345703125, 0.021436384201049805, 0.02160451126098633, 0.021536640167236328, 0.021767263412475587, 0.021285696029663084, 0.021292831420898436, 0.021549856185913086, 0.02141119956970215, 0.021529216766357422, 0.02146099281311035, 0.02148358345031738, 0.02120697593688965, 0.02124185562133789, 0.021388832092285155, 0.02183216094970703, 0.021561504364013672, 0.021376224517822267, 0.02132646369934082, 0.021163455963134764, 0.021508672714233398, 0.02172230339050293, 0.021404064178466797, 0.021491424560546875, 0.021240512847900392, 0.02170419120788574, 0.021455360412597657, 0.021508031845092774, 0.022365312576293945, 0.021289920806884764, 0.021776384353637695, 0.021381120681762695, 0.02185795211791992, 0.021362560272216797, 0.02157027244567871, 0.021083839416503908, 0.0211759033203125, 0.02121776008605957, 0.02124799919128418, 0.02128428840637207, 0.02122604751586914, 0.02115977668762207, 0.021210880279541017, 0.021286399841308593, 0.021293983459472657, 0.021213247299194337, 0.02131711959838867, 0.02119094467163086, 0.02118467140197754, 0.021230623245239257, 0.021382112503051758, 0.02134806442260742, 0.021334047317504882, 0.021463199615478514, 0.02151251220703125, 0.02155625534057617, 0.02124073600769043, 0.02133795166015625, 0.021147647857666017, 0.021190656661987304, 0.02122137641906738, 0.021211135864257814, 0.02119424057006836, 0.02128883171081543, 0.021271167755126955, 0.021129152297973634, 0.02132588768005371, 0.02125014305114746, 0.0212458553314209, 0.02128691291809082, 0.021099807739257813, 0.021328607559204103, 0.021209087371826172, 0.021575679779052736, 0.021342208862304687, 0.021272127151489257, 0.02131974411010742, 0.021209087371826172, 0.02111337661743164, 0.021519359588623048, 0.02236092758178711, 0.02127667236328125, 0.021067039489746094, 0.02114454460144043, 0.02131123161315918, 0.021149887084960937, 0.022226751327514647, 0.023446975708007814, 0.02142095947265625, 0.02121897506713867, 0.02125004768371582, 0.02133622360229492, 0.021272031784057618, 0.021416255950927734, 0.02137619209289551, 0.021986175537109375, 0.021950496673583984, 0.021464384078979493, 0.021412191390991212, 0.021582143783569336, 0.02143027114868164, 0.021579616546630858, 0.021416095733642577, 0.021540416717529296, 0.02130988883972168, 0.021202943801879884, 0.021325824737548828, 0.02125619125366211, 0.02126361656188965, 0.021440319061279297, 0.021310016632080077, 0.021220767974853515, 0.021363679885864257, 0.021817344665527344, 0.02160006332397461, 0.021489120483398436, 0.021312223434448243, 0.02129088020324707, 0.021325536727905273, 0.02174995231628418, 0.02154489517211914, 0.021525888442993163, 0.021523359298706055, 0.02142755126953125, 0.021579776763916016, 0.02145961570739746, 0.021592063903808592, 0.021542623519897462, 0.021408031463623047, 0.021382848739624025, 0.021263967514038085, 0.021435104370117187, 0.021469024658203124]",tokens/s,46.771440838892275,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.222208,6223.233024,0.0,5827.985408,5712.718848,s,1,7.54969677734375,7.54969677734375,0.0,7.54969677734375,7.54969677734375,7.54969677734375,7.54969677734375,[7.54969677734375],,kWh,1.0976639595829358e-05,1.1910635902911466e-06,4.5283369559945275e-06,1.6696040142115032e-05,,MB,1147.817984,6432.948224,0.0,6025.117696,5988.31104,s,10,0.6711704254150391,0.0671170425415039,0.004576825575853077,0.06750219345092773,0.07148004150390626,0.07151811599731446,0.07154857559204102,"[0.06773433685302735, 0.06620070648193359, 0.05516780853271484, 0.06727005004882812, 0.06571596527099609, 0.06938191986083984, 0.06544518280029298, 0.0712266845703125, 0.0714715805053711, 0.07155619049072266]",tokens/s,3814.2324260145174,kWh,2.2953832184243813e-06,2.5303696379512485e-07,1.532210427156221e-06,4.080630609375728e-06,tokens/kWh,62735401.585188806,MB,1175.916544,6516.834304,0.0,6109.003776,6092.144128,s,10,18.60473205566406,1.860473205566406,0.004147410932867698,1.8609232177734376,1.8643675903320311,1.865044512939453,1.8655860510253905,"[1.8559962158203125, 1.863658935546875, 1.8511259765625, 1.8593104248046874, 1.8601689453125, 1.861677490234375, 1.86344873046875, 1.85940673828125, 1.865721435546875, 1.8642171630859374]",tokens/s,33.862352766762996,kWh,5.426698956574311e-05,5.985475470271603e-06,3.596873624024384e-05,9.622120127625856e-05,tokens/kWh,654741.3580830497,,s,630,18.601391460418704,0.029526018191140795,0.0008989477070408279,0.029379535675048828,0.029734858322143554,0.030054059314727783,0.0352632112121582,"[0.03342947387695312, 0.031324159622192385, 0.03038755226135254, 0.029514400482177735, 0.029248863220214843, 0.029041311264038087, 0.02899760055541992, 0.029062175750732423, 0.02906563186645508, 0.029282272338867186, 0.028957311630249023, 0.029175840377807617, 0.029105119705200196, 0.02931372833251953, 0.029274560928344726, 0.0292105598449707, 0.029188032150268554, 0.029234304428100585, 0.02914121627807617, 0.029112960815429686, 0.029093919754028322, 0.029050880432128907, 0.029243392944335936, 0.029179904937744142, 0.029288127899169923, 0.029497663497924806, 0.029398656845092772, 0.02911270332336426, 0.02915238380432129, 0.02922995185852051, 0.0291246395111084, 0.02938412857055664, 0.029391008377075194, 0.029539775848388673, 0.029840320587158204, 0.029789791107177735, 0.02963907241821289, 0.02993721580505371, 0.029727039337158204, 0.029534271240234375, 0.029496383666992188, 0.029449216842651366, 0.02949068832397461, 0.02931353569030762, 0.029267967224121092, 0.029200096130371094, 0.029198400497436522, 0.029318975448608398, 0.029385120391845702, 0.029429759979248047, 0.02951308822631836, 0.029909631729125977, 0.02967046356201172, 0.029356992721557618, 0.029327360153198243, 0.029372415542602538, 0.02938470458984375, 0.029359840393066407, 0.02925391960144043, 0.02931056022644043, 0.029348255157470703, 0.029543872833251952, 0.029553216934204103, 0.03524521636962891, 0.0326418571472168, 0.031092735290527345, 0.02991926383972168, 0.02933897590637207, 0.029161983489990235, 0.029162784576416016, 0.029028608322143556, 0.02911408042907715, 0.029074079513549806, 0.029151456832885742, 0.029073408126831055, 0.029164575576782228, 0.02917465591430664, 0.029703807830810548, 0.0294998722076416, 0.029147136688232423, 0.029124607086181642, 0.029057024002075195, 0.02919628715515137, 0.029057024002075195, 0.029212671279907225, 0.02957107162475586, 0.029056224822998047, 0.02916348838806152, 0.029260608673095705, 0.02931439971923828, 0.029235872268676757, 0.029613183975219726, 0.02944691276550293, 0.02945449638366699, 0.02934169578552246, 0.029396223068237304, 0.029545183181762694, 0.02973695945739746, 0.02987343978881836, 0.029816736221313478, 0.02966815948486328, 0.029618015289306642, 0.02950495910644531, 0.029393632888793944, 0.029480960845947264, 0.02972256088256836, 0.029560895919799806, 0.029417472839355467, 0.029470239639282227, 0.029464832305908205, 0.029470432281494142, 0.029470399856567384, 0.029512128829956054, 0.029513952255249023, 0.0293656005859375, 0.02944220733642578, 0.029643423080444337, 0.029460479736328125, 0.0294334716796875, 0.02951603126525879, 0.029472896575927734, 0.029441280364990233, 0.029479679107666017, 0.02950553512573242, 0.029521791458129883, 0.02954457664489746, 0.029317472457885744, 0.029381792068481447, 0.030681407928466797, 0.030263519287109374, 0.02976799964904785, 0.029634559631347656, 0.029527711868286132, 0.02935638427734375, 0.029136608123779297, 0.029085599899291992, 0.029081375122070312, 0.0290382080078125, 0.029184095382690428, 0.029281152725219726, 0.02921459197998047, 0.02918822479248047, 0.02917910385131836, 0.029106559753417968, 0.029079967498779297, 0.029179904937744142, 0.029091392517089844, 0.02927155113220215, 0.029238208770751953, 0.02927577590942383, 0.02919001579284668, 0.029377056121826173, 0.029325279235839843, 0.029085695266723634, 0.029179744720458985, 0.02925788879394531, 0.029229055404663085, 0.02927414321899414, 0.0293305606842041, 0.029239391326904295, 0.029244159698486327, 0.029163520812988283, 0.029462207794189454, 0.0294136962890625, 0.02953830337524414, 0.02960383987426758, 0.029594655990600585, 0.029569503784179687, 0.029573631286621094, 0.029584991455078126, 0.0294936637878418, 0.029447296142578124, 0.02934668731689453, 0.029394943237304686, 0.029437408447265626, 0.0294487361907959, 0.02938470458984375, 0.029366207122802735, 0.029370431900024415, 0.029440031051635743, 0.02941334342956543, 0.029413375854492187, 0.029425664901733397, 0.029419519424438476, 0.02953327941894531, 0.029418399810791016, 0.029375680923461912, 0.029415231704711914, 0.029479103088378908, 0.03527056121826172, 0.03192457580566406, 0.030197792053222657, 0.029588991165161133, 0.029264352798461915, 0.02904832077026367, 0.029044448852539064, 0.029034431457519532, 0.028867424011230467, 0.029003263473510742, 0.02907935905456543, 0.02911712074279785, 0.028964448928833007, 0.02914899253845215, 0.029272544860839845, 0.029204639434814453, 0.029183263778686522, 0.029274816513061522, 0.029351520538330077, 0.02922742462158203, 0.029216768264770508, 0.02923014450073242, 0.029178720474243164, 0.029353759765625, 0.02945465660095215, 0.029396352767944337, 0.029311616897583007, 0.029253631591796874, 0.02916147232055664, 0.02927359962463379, 0.029118976593017577, 0.029319168090820313, 0.029441375732421875, 0.029552608489990233, 0.029647552490234375, 0.02958291244506836, 0.02956284713745117, 0.029654848098754884, 0.029843839645385742, 0.029646976470947266, 0.029543615341186522, 0.0293734073638916, 0.02932080078125, 0.029305248260498046, 0.029290496826171877, 0.02935315132141113, 0.029374656677246095, 0.02943824005126953, 0.029522111892700195, 0.029569023132324217, 0.029425312042236328, 0.029485439300537108, 0.029403263092041016, 0.02940447998046875, 0.02938889694213867, 0.029641311645507814, 0.029411327362060546, 0.02949087905883789, 0.029570783615112305, 0.02961199951171875, 0.029612159729003905, 0.029568799972534178, 0.029606624603271483, 0.03591609573364258, 0.03236640167236328, 0.030769311904907226, 0.02995199966430664, 0.02938051223754883, 0.029157375335693358, 0.029098079681396483, 0.028940031051635742, 0.0290731201171875, 0.029006368637084962, 0.028976255416870118, 0.0290251522064209, 0.029132736206054687, 0.02908576011657715, 0.029171712875366212, 0.029082656860351563, 0.028955615997314454, 0.02919171142578125, 0.029227487564086913, 0.02926585578918457, 0.029283456802368164, 0.029407743453979493, 0.029299135208129882, 0.029253631591796874, 0.029278207778930664, 0.02914633560180664, 0.029260128021240235, 0.02923289680480957, 0.029407936096191405, 0.029280256271362305, 0.02928233528137207, 0.029296607971191407, 0.02936627197265625, 0.02965212821960449, 0.029792896270751955, 0.029787839889526366, 0.02965724754333496, 0.02966102409362793, 0.029483552932739257, 0.029424671173095704, 0.029493759155273438, 0.029571456909179686, 0.02938585662841797, 0.029504159927368163, 0.029401151657104493, 0.02933308792114258, 0.029402816772460937, 0.02942624092102051, 0.02941993522644043, 0.029509311676025392, 0.029245759963989256, 0.029237247467041014, 0.029208448410034178, 0.029283456802368164, 0.029377536773681642, 0.029421567916870117, 0.02953327941894531, 0.0294835205078125, 0.02958937644958496, 0.029532703399658203, 0.02947260856628418, 0.029454431533813476, 0.029536319732666017, 0.03626790237426758, 0.0325305290222168, 0.030698528289794923, 0.029940095901489258, 0.029350496292114257, 0.02916147232055664, 0.029086847305297852, 0.028969856262207033, 0.02893824005126953, 0.029144704818725584, 0.029096351623535157, 0.028970975875854493, 0.029014047622680665, 0.029353952407836913, 0.029257280349731445, 0.029337312698364256, 0.029275007247924804, 0.029128543853759764, 0.02918524742126465, 0.029270368576049803, 0.02921628761291504, 0.029244287490844727, 0.02931715202331543, 0.029336639404296875, 0.029342655181884766, 0.02929804801940918, 0.02913046455383301, 0.02920307159423828, 0.029147424697875977, 0.029255231857299804, 0.02921072006225586, 0.029419424057006836, 0.029489599227905273, 0.02970729637145996, 0.029717344284057617, 0.02992959976196289, 0.030055744171142578, 0.029827680587768555, 0.029708383560180664, 0.029620223999023438, 0.029474815368652343, 0.02939632034301758, 0.029360416412353516, 0.029501407623291016, 0.029456064224243163, 0.029294656753540038, 0.029303455352783205, 0.02939084815979004, 0.029369888305664064, 0.029388383865356447, 0.02941632080078125, 0.029422687530517577, 0.02929142379760742, 0.02934377670288086, 0.029288415908813478, 0.0293621768951416, 0.02936422348022461, 0.02941935920715332, 0.02948726463317871, 0.029561887741088866, 0.029548608779907226, 0.029438880920410155, 0.029323135375976563, 0.03735049438476563, 0.03290796661376953, 0.030916383743286133, 0.030052000045776368, 0.0295263671875, 0.029189727783203126, 0.02896281623840332, 0.028950687408447265, 0.02914371109008789, 0.028958751678466798, 0.028917728424072267, 0.02919366455078125, 0.029168191909790038, 0.02913689613342285, 0.02918191909790039, 0.02917731285095215, 0.02903481674194336, 0.029083295822143553, 0.029106752395629883, 0.029151264190673827, 0.029159423828125, 0.029337600708007814, 0.029318464279174804, 0.029303487777709962, 0.02928755187988281, 0.029305599212646485, 0.02933919906616211, 0.029268543243408204, 0.02921660804748535, 0.029163520812988283, 0.02913865661621094, 0.029292287826538085, 0.029337791442871092, 0.02962838363647461, 0.02978006362915039, 0.03020841598510742, 0.029736991882324218, 0.02979638481140137, 0.029757312774658203, 0.029572256088256837, 0.02951468849182129, 0.029575168609619142, 0.02953830337524414, 0.0295251522064209, 0.029354496002197264, 0.029317472457885744, 0.029308576583862305, 0.029513536453247072, 0.029458976745605468, 0.029343391418457033, 0.029290847778320313, 0.029261760711669922, 0.029345855712890626, 0.02938265609741211, 0.02938265609741211, 0.0294072322845459, 0.029389919281005858, 0.029390815734863282, 0.02955708885192871, 0.02969046401977539, 0.02939449691772461, 0.029495775222778322, 0.02957923126220703, 0.0359403190612793, 0.03200185775756836, 0.030426368713378907, 0.029723615646362306, 0.029251455307006836, 0.029151327133178712, 0.02896281623840332, 0.029183359146118165, 0.029143680572509767, 0.029128416061401367, 0.029135040283203125, 0.028992639541625977, 0.029248416900634764, 0.028907583236694335, 0.028923871994018555, 0.02891779136657715, 0.028893087387084963, 0.029120576858520507, 0.029068384170532226, 0.029189056396484375, 0.02923107147216797, 0.02928748893737793, 0.029332447052001952, 0.029449344635009766, 0.0291910400390625, 0.029149183273315428, 0.029100032806396486, 0.029296640396118165, 0.029243423461914064, 0.029194047927856445, 0.029200544357299806, 0.029214719772338867, 0.02943292808532715, 0.029526784896850587, 0.029651103973388673, 0.029808639526367187, 0.02976464080810547, 0.02970899200439453, 0.029622528076171876, 0.029577247619628905, 0.029593599319458007, 0.029470367431640623, 0.029428064346313478, 0.029476863861083984, 0.029499391555786132, 0.02949660873413086, 0.02954310417175293, 0.02947385597229004, 0.029334495544433594, 0.02931920051574707, 0.02930838394165039, 0.02950809669494629, 0.02942060852050781, 0.029352127075195314, 0.029471456527709963, 0.029452159881591798, 0.029546655654907227, 0.029479936599731447, 0.029541376113891602, 0.029502496719360352, 0.029496288299560545, 0.029511327743530272, 0.029564416885375977, 0.03757638549804688, 0.032578174591064456, 0.03095756721496582, 0.03017318344116211, 0.02932905578613281, 0.029178207397460937, 0.02903615951538086, 0.028895488739013674, 0.029014144897460938, 0.02916761589050293, 0.029231103897094726, 0.02906425666809082, 0.029050912857055664, 0.029205408096313477, 0.029294591903686523, 0.029249216079711916, 0.02933955192565918, 0.029401504516601562, 0.0291778564453125, 0.029360128402709962, 0.029165567398071288, 0.029691808700561522, 0.029230623245239257, 0.02938243293762207, 0.02933635139465332, 0.029299840927124024, 0.029264768600463866, 0.029171712875366212, 0.02926358413696289, 0.02927440071105957, 0.029378559112548826, 0.02920159912109375, 0.029227840423583985, 0.029445728302001952, 0.029682079315185548, 0.029677568435668947, 0.029825023651123047, 0.029837312698364257, 0.029705888748168947, 0.029671775817871095, 0.02946393585205078, 0.02940582466125488, 0.02929254341125488, 0.029437952041625977, 0.029387968063354492, 0.02951865577697754, 0.029435903549194335, 0.029442047119140623, 0.02953830337524414, 0.029609535217285158, 0.02962063980102539, 0.029638687133789064, 0.02950553512573242, 0.029529247283935547, 0.029520736694335938, 0.029482336044311525, 0.029598367691040038, 0.029424831390380858, 0.02948588752746582, 0.02942742347717285, 0.029514015197753905, 0.02954457664489746, 0.02955251121520996, 0.036372577667236325, 0.032198078155517576, 0.030544448852539062, 0.0298024959564209, 0.02949836730957031, 0.02915603256225586, 0.02915670394897461, 0.029078527450561522, 0.029083520889282226, 0.02898543930053711, 0.029075456619262696, 0.02927824020385742, 0.029338592529296874, 0.029367200851440428, 0.02917740821838379, 0.029170207977294922, 0.029081600189208984, 0.02924457550048828, 0.02925859260559082, 0.029347871780395506, 0.029357952117919923, 0.029431552886962892, 0.029384191513061524, 0.029286272048950197, 0.029172895431518554, 0.029178848266601564, 0.029148000717163086, 0.029144895553588866, 0.029161663055419923, 0.02921062469482422, 0.029237247467041014, 0.029373727798461913, 0.02959228706359863, 0.029837312698364257, 0.02981888008117676, 0.02981888008117676, 0.029734624862670898, 0.029739295959472656, 0.029870080947875976, 0.029513759613037108, 0.02962428855895996, 0.029466623306274413, 0.02944118309020996, 0.029561695098876954, 0.02954854393005371, 0.029605920791625977, 0.029431520462036134, 0.029397024154663085, 0.02937001609802246, 0.029376415252685546, 0.02957993507385254, 0.02944000053405762, 0.02949862480163574, 0.029641471862792968, 0.02962227249145508, 0.029576768875122072, 0.029505983352661132, 0.02949241638183594, 0.02946950340270996, 0.02959676742553711, 0.029451168060302735, 0.02951545524597168, 0.029436223983764647]",tokens/s,33.86843405454676,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 54.12 MiB is free. Process 87865 has 14.69 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 1.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,836.358144,1903.099904,0.0,1507.852288,1469.840384,s,1,7.45740087890625,7.45740087890625,0.0,7.45740087890625,7.45740087890625,7.45740087890625,7.45740087890625,[7.45740087890625],,kWh,9.550118462531524e-06,1.0461776287751266e-06,3.1608358620066612e-06,1.3757131953313311e-05,,MB,1197.367296,1947.140096,0.0,1539.309568,1426.272256,s,10,0.24179651069641114,0.024179651069641112,0.00046095716185577874,0.024065407752990722,0.024881748008728027,0.024938137912750243,0.024983249835968016,"[0.024869216918945312, 0.024114591598510742, 0.024016223907470702, 0.023701440811157225, 0.02364329528808594, 0.02429817581176758, 0.024511583328247072, 0.023997503280639647, 0.023649951934814454, 0.02499452781677246]",tokens/s,10587.414982237777,kWh,7.189442209051912e-07,7.92598589746777e-08,4.742566738768366e-07,1.2724607537567055e-06,tokens/kWh,201184986.8408179,MB,1225.703424,1955.528704,0.0,1547.698176,1426.274816,s,10,13.2709794921875,1.32709794921875,0.012723619353887257,1.3269437866210936,1.3424001220703126,1.3441677124023437,1.3455817846679687,"[1.345935302734375, 1.34200732421875, 1.32387158203125, 1.3224248046875, 1.32552392578125, 1.3310013427734375, 1.316973876953125, 1.2987156982421875, 1.3283636474609375, 1.3361619873046875]",tokens/s,47.472004637704025,kWh,3.917520655284144e-05,4.3206234925896856e-06,1.9435648142923713e-05,6.293147818835484e-05,tokens/kWh,1001088.8320697009,,s,630,13.268557798385622,0.021061202854580345,0.0005340236860464876,0.021051663398742676,0.02142468090057373,0.021588804149627685,0.022359155750274674,"[0.022026239395141603, 0.021432319641113282, 0.021514240264892577, 0.021436416625976562, 0.021334016799926758, 0.021228927612304688, 0.021269119262695313, 0.021651456832885742, 0.021456544876098632, 0.02138083267211914, 0.02136284828186035, 0.021461471557617188, 0.021370304107666015, 0.021455167770385742, 0.02122368049621582, 0.02122137641906738, 0.021720447540283204, 0.02149849510192871, 0.0214835205078125, 0.021383167266845703, 0.021485567092895508, 0.021538528442382812, 0.021433664321899415, 0.02170364761352539, 0.02168832015991211, 0.021344095230102538, 0.02143657684326172, 0.021575199127197266, 0.021523136138916016, 0.02148464012145996, 0.021414432525634765, 0.02134809684753418, 0.021604768753051756, 0.021387264251708983, 0.02137455940246582, 0.02182796859741211, 0.021495840072631837, 0.021428224563598632, 0.02123980712890625, 0.02110054397583008, 0.02116307258605957, 0.021313983917236327, 0.02124236869812012, 0.021227519989013673, 0.021116512298583984, 0.021259967803955077, 0.021236448287963866, 0.021784576416015625, 0.02125929641723633, 0.02153536033630371, 0.021139808654785156, 0.021413888931274414, 0.021265663146972657, 0.021144096374511718, 0.0210229434967041, 0.02096291160583496, 0.020980127334594728, 0.020965375900268556, 0.020914176940917968, 0.021102239608764648, 0.021094751358032227, 0.021104639053344726, 0.021090303421020508, 0.021079999923706055, 0.02112339210510254, 0.020805376052856445, 0.02085433578491211, 0.021340608596801758, 0.021259263992309572, 0.02145587158203125, 0.021202720642089844, 0.023353567123413087, 0.02206697654724121, 0.021454944610595703, 0.021168256759643556, 0.021327871322631836, 0.0211079044342041, 0.02113817596435547, 0.021192768096923827, 0.020912128448486327, 0.020938688278198243, 0.02079484748840332, 0.02061190414428711, 0.021284223556518555, 0.021293472290039063, 0.021104639053344726, 0.02103059196472168, 0.021040800094604493, 0.020920991897583008, 0.02093680000305176, 0.020950464248657225, 0.021010303497314455, 0.021043807983398437, 0.021337984085083007, 0.02102694320678711, 0.02079280090332031, 0.021662080764770508, 0.025331232070922853, 0.026960512161254883, 0.02122956848144531, 0.02137855911254883, 0.021315488815307617, 0.021262943267822267, 0.0210512638092041, 0.020803712844848634, 0.021209087371826172, 0.021309440612792968, 0.02103232002258301, 0.020996736526489257, 0.020922367095947265, 0.02088755226135254, 0.02093699264526367, 0.02055548858642578, 0.020700319290161133, 0.021110784530639647, 0.02058121681213379, 0.02105548858642578, 0.021198848724365234, 0.021064735412597655, 0.020975967407226563, 0.02099622344970703, 0.022031871795654297, 0.02110700798034668, 0.021010847091674806, 0.02109644889831543, 0.021034528732299804, 0.02124835205078125, 0.020961280822753905, 0.020905376434326172, 0.021148256301879883, 0.02103891181945801, 0.02094099235534668, 0.021523584365844728, 0.021480415344238283, 0.021116256713867188, 0.020944799423217773, 0.02095580863952637, 0.020944896697998046, 0.02151628875732422, 0.02109443283081055, 0.021051359176635743, 0.021235712051391603, 0.021016576766967773, 0.02101068878173828, 0.02089049530029297, 0.02106662368774414, 0.021137344360351563, 0.020957279205322265, 0.021012447357177735, 0.021362176895141603, 0.021256704330444336, 0.02097727966308594, 0.020906368255615235, 0.020951040267944337, 0.020776256561279297, 0.020839103698730467, 0.020950559616088868, 0.02085321617126465, 0.021179840087890624, 0.021162559509277343, 0.02105465507507324, 0.02100716781616211, 0.020960704803466797, 0.02099827194213867, 0.02081167984008789, 0.020846303939819337, 0.020785087585449218, 0.02076144027709961, 0.021436288833618165, 0.02103321647644043, 0.020999807357788086, 0.021063968658447264, 0.02097385597229004, 0.020872800827026368, 0.020930688858032228, 0.02088960075378418, 0.021100191116333007, 0.02098601531982422, 0.021020063400268553, 0.020968223571777345, 0.020941951751708984, 0.021212095260620116, 0.021241792678833006, 0.020951040267944337, 0.02072985649108887, 0.020568063735961914, 0.020763904571533202, 0.020595455169677736, 0.02073798370361328, 0.02094179153442383, 0.02105548858642578, 0.02089593505859375, 0.02115155220031738, 0.020891008377075197, 0.0209619197845459, 0.021011775970458984, 0.02120979118347168, 0.020760576248168947, 0.020758367538452147, 0.021228992462158203, 0.021272512435913087, 0.021158687591552733, 0.021071231842041016, 0.02115238380432129, 0.02105548858642578, 0.021157888412475585, 0.021129215240478515, 0.021188608169555666, 0.02121014404296875, 0.021246015548706056, 0.021061824798583983, 0.021017311096191406, 0.021016576766967773, 0.02090943908691406, 0.021265024185180663, 0.020725759506225586, 0.02184828758239746, 0.021962528228759767, 0.02106883239746094, 0.021023296356201173, 0.02131395149230957, 0.021251583099365236, 0.020910591125488282, 0.020842144012451172, 0.020782943725585937, 0.020466272354125976, 0.020527008056640626, 0.020671743392944336, 0.020656896591186524, 0.020639360427856444, 0.020590976715087892, 0.02063564872741699, 0.02101862335205078, 0.02066227149963379, 0.020756479263305663, 0.020744192123413087, 0.02086499214172363, 0.0209715518951416, 0.021200895309448242, 0.021444223403930665, 0.020832704544067382, 0.021155296325683595, 0.021078496932983398, 0.021028480529785155, 0.020983776092529296, 0.020869535446166994, 0.021110784530639647, 0.0209815673828125, 0.020721855163574218, 0.02080486488342285, 0.020568704605102538, 0.020717727661132813, 0.020752992630004883, 0.020731103897094726, 0.020685600280761718, 0.020612415313720704, 0.02085139274597168, 0.020483776092529295, 0.020627775192260743, 0.020531200408935548, 0.021258207321166993, 0.021257280349731445, 0.020579296112060545, 0.020551679611206054, 0.020962848663330078, 0.02097609519958496, 0.020636991500854494, 0.02047648048400879, 0.02060710334777832, 0.020700384140014648, 0.02068355178833008, 0.020574207305908202, 0.020587648391723633, 0.020505472183227538, 0.02064588737487793, 0.020739776611328125, 0.02051513671875, 0.02065433692932129, 0.020795072555541992, 0.0211661434173584, 0.02088140869140625, 0.020647136688232422, 0.02085148811340332, 0.021153791427612305, 0.021284160614013673, 0.021045951843261718, 0.021292095184326173, 0.02120182418823242, 0.021118175506591796, 0.02118943977355957, 0.02108415985107422, 0.021137407302856445, 0.020985855102539062, 0.021118080139160156, 0.021222015380859376, 0.021134719848632813, 0.02297702407836914, 0.023402143478393554, 0.02119548797607422, 0.021141504287719725, 0.02172438430786133, 0.021424768447875976, 0.021090463638305666, 0.02127872085571289, 0.021196512222290038, 0.021288448333740235, 0.021366880416870116, 0.02143712043762207, 0.021191743850708007, 0.02135545539855957, 0.02110873603820801, 0.02112054443359375, 0.021226976394653322, 0.021029888153076173, 0.02122137641906738, 0.021130720138549806, 0.02106422424316406, 0.020915296554565428, 0.0209366397857666, 0.021226720809936525, 0.021135007858276367, 0.021095903396606445, 0.021015167236328125, 0.021186016082763673, 0.02110518455505371, 0.021086208343505858, 0.020981279373168946, 0.021204832077026368, 0.020979711532592774, 0.021181055068969726, 0.02099567985534668, 0.021156543731689452, 0.021100255966186525, 0.020989952087402345, 0.021338111877441408, 0.021346303939819337, 0.021014303207397462, 0.020955360412597657, 0.020813695907592772, 0.020944063186645507, 0.020884288787841796, 0.020983936309814454, 0.020918272018432618, 0.021164031982421876, 0.02100169563293457, 0.021025312423706054, 0.020973567962646485, 0.022478496551513672, 0.0210883846282959, 0.02113148880004883, 0.021165887832641603, 0.02077033615112305, 0.021172895431518554, 0.021301248550415038, 0.021159936904907226, 0.02159993553161621, 0.021086208343505858, 0.021263839721679688, 0.02109014320373535, 0.021152767181396484, 0.02088707160949707, 0.02089219284057617, 0.021107776641845703, 0.021039424896240236, 0.02105196762084961, 0.020973472595214843, 0.020862335205078124, 0.02087731170654297, 0.02116387176513672, 0.021119808197021483, 0.021264032363891603, 0.021094816207885742, 0.021003744125366212, 0.021641408920288086, 0.02107145690917969, 0.021463327407836914, 0.021072063446044922, 0.02181353569030762, 0.021118976593017577, 0.02106368064880371, 0.021004287719726563, 0.02129305648803711, 0.021025920867919923, 0.020757375717163087, 0.021223424911499023, 0.021141504287719725, 0.021190656661987304, 0.02123788833618164, 0.020848512649536133, 0.020865119934082032, 0.021062944412231447, 0.02130803108215332, 0.02098726463317871, 0.020863616943359375, 0.020983808517456053, 0.02106368064880371, 0.02106368064880371, 0.021126304626464844, 0.020957504272460938, 0.02071401596069336, 0.021174272537231444, 0.02119411277770996, 0.020829919815063477, 0.020660863876342774, 0.02079977607727051, 0.020815872192382814, 0.02086911964416504, 0.020914176940917968, 0.020997472763061523, 0.020768960952758788, 0.02086960029602051, 0.020570175170898437, 0.02060076713562012, 0.02065203285217285, 0.020559648513793945, 0.020464096069335937, 0.02050534439086914, 0.0202926082611084, 0.020352575302124025, 0.02063961601257324, 0.020547264099121092, 0.020590976715087892, 0.02059913635253906, 0.020692928314208985, 0.020623231887817384, 0.021147775650024413, 0.020610464096069335, 0.02085327911376953, 0.02086742401123047, 0.022033407211303712, 0.021386175155639647, 0.020756479263305663, 0.020999231338500977, 0.020482559204101563, 0.020644512176513672, 0.020692768096923827, 0.02065344047546387, 0.021655263900756835, 0.02179574394226074, 0.02090598487854004, 0.02077516746520996, 0.02072313690185547, 0.02077948760986328, 0.020815328598022462, 0.020680351257324217, 0.020628416061401367, 0.02059267234802246, 0.020736000061035157, 0.020951040267944337, 0.020602880477905275, 0.02046976089477539, 0.02041801643371582, 0.020910144805908203, 0.020738079071044923, 0.02066486358642578, 0.02071072006225586, 0.02058700752258301, 0.020574304580688478, 0.020512960433959962, 0.020383455276489257, 0.02050444793701172, 0.020537567138671876, 0.020669536590576174, 0.020458080291748046, 0.02045574378967285, 0.020371456146240235, 0.020916223526000977, 0.020980768203735352, 0.02090902328491211, 0.020522655487060545, 0.020424896240234375, 0.020766271591186523, 0.020770591735839845, 0.02085750389099121, 0.020574207305908202, 0.020508575439453124, 0.020788831710815428, 0.020699167251586915, 0.020494976043701173, 0.020407712936401368, 0.020338848114013673, 0.020516895294189454, 0.020468128204345702, 0.02047337532043457, 0.020463327407836913, 0.02056012725830078, 0.02071340751647949, 0.020451904296875, 0.020401567459106446, 0.02039664077758789, 0.020508672714233397, 0.020762624740600585, 0.020488191604614257, 0.020330495834350586, 0.020395296096801758, 0.02060969543457031, 0.0208590087890625, 0.020745344161987304, 0.020795743942260744, 0.020732383728027343, 0.020867071151733398, 0.020549503326416016, 0.02058457565307617, 0.02040575981140137, 0.02074425506591797, 0.020609024047851563, 0.020540672302246092, 0.020695808410644532, 0.020545183181762697, 0.020485599517822264, 0.0205402889251709, 0.020602880477905275, 0.02068252754211426, 0.020551776885986327, 0.020881311416625976, 0.02068876838684082, 0.020467136383056642, 0.02041539192199707, 0.02062745666503906, 0.020537343978881836, 0.020649311065673828, 0.020510463714599608, 0.020435903549194338, 0.020663455963134767, 0.020663103103637694, 0.020562112808227537, 0.02059654426574707, 0.020676607131958007, 0.02102272033691406, 0.02097260856628418, 0.021095359802246094, 0.021016031265258788, 0.021221920013427733, 0.02112512016296387, 0.021133312225341795, 0.021098495483398438, 0.02099190330505371, 0.02112870407104492, 0.021363296508789063, 0.021151296615600584, 0.021913055419921876, 0.020964319229125977, 0.021319679260253906, 0.021409887313842774, 0.02133795166015625, 0.021112863540649413, 0.021317087173461913, 0.021297727584838867, 0.021323776245117186, 0.021121023178100586, 0.021300960540771484, 0.021202720642089844, 0.02109491157531738, 0.02124799919128418, 0.021336063385009766, 0.021155839920043946, 0.021120351791381838, 0.021146272659301756, 0.020997343063354493, 0.02129961585998535, 0.021055871963500977, 0.021151199340820312, 0.021424671173095704, 0.021120607376098634, 0.020953088760375976, 0.021729696273803712, 0.027923967361450194, 0.021534496307373047, 0.02133577537536621, 0.021623296737670897, 0.021307008743286133, 0.02121561622619629, 0.02126006317138672, 0.021159999847412108, 0.02136284828186035, 0.021358591079711914, 0.02147737693786621, 0.021180128097534178, 0.021145376205444336, 0.021199359893798828, 0.021036575317382813, 0.021123552322387697, 0.02127667236328125, 0.021086208343505858, 0.021186176300048827, 0.02095552062988281, 0.021204992294311522, 0.021082111358642578, 0.0218787841796875, 0.021235712051391603, 0.021211135864257814, 0.02111510467529297, 0.021208032608032227, 0.021093183517456055, 0.021173664093017578, 0.021135040283203125, 0.021244831085205078, 0.021062719345092774, 0.020973600387573243, 0.02145987129211426, 0.021168127059936523, 0.021166080474853514, 0.02125801658630371, 0.021291231155395506, 0.02122547149658203, 0.021001983642578125, 0.02092460823059082, 0.021215200424194336, 0.021076095581054687, 0.02138502311706543, 0.02113580894470215, 0.021075775146484375, 0.021067487716674806, 0.021385120391845702, 0.02118684768676758, 0.02105094337463379, 0.021217727661132814, 0.02169036865234375, 0.020946943283081054, 0.021184511184692383, 0.021247104644775392, 0.021187456130981445, 0.02109644889831543, 0.02107948875427246, 0.021115455627441406, 0.021202943801879884, 0.02109235191345215, 0.02109596824645996, 0.021063840866088868, 0.02117206382751465]",tokens/s,47.48066892971985,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,836.018176,4675.534848,0.0,4280.287232,4115.121152,s,1,8.0935927734375,8.0935927734375,0.0,8.0935927734375,8.0935927734375,8.0935927734375,8.0935927734375,[8.0935927734375],,kWh,1.0311723070882786e-05,1.130143968225839e-06,4.96389286000487e-06,1.6405759899113496e-05,,MB,1174.614016,4983.816192,0.0,4575.985664,4408.408064,s,10,0.4055550994873047,0.04055550994873047,0.0018906092084394035,0.04021809577941894,0.04169289970397949,0.04372158527374267,0.04534453372955322,"[0.04575027084350586, 0.04020310211181641, 0.040419647216796875, 0.03980409622192383, 0.04040787124633789, 0.03984611129760742, 0.04023308944702148, 0.039516319274902345, 0.04124208068847656, 0.03813251113891602]",tokens/s,6312.335865672272,kWh,1.4787861881734404e-06,1.6299069460646011e-07,9.893933279898385e-07,2.631170210769739e-06,tokens/kWh,97295111.86777543,MB,1203.552256,4983.816192,0.0,4575.985664,4408.410624,s,10,13.95576257324219,1.395576257324219,0.007290252780742853,1.3983818359375,1.4026697021484376,1.40456220703125,1.4060762109375,"[1.38884130859375, 1.4064547119140625, 1.39838720703125, 1.399352294921875, 1.39837646484375, 1.395798828125, 1.38310400390625, 1.38431884765625, 1.4022491455078125, 1.3988797607421875]",tokens/s,45.14264245279712,kWh,4.0314031168912836e-05,4.44631747783814e-06,2.6443989729811237e-05,7.120433837656222e-05,tokens/kWh,884777.5491828349,,s,630,13.953395492553726,0.022148246813577317,0.00035882815347562457,0.02210428810119629,0.022449370384216308,0.022625327491760255,0.023180296020507814,"[0.02265292739868164, 0.022458368301391602, 0.022116352081298828, 0.022278144836425783, 0.022360063552856444, 0.022321151733398437, 0.02226380729675293, 0.022046720504760742, 0.022244543075561524, 0.02204960060119629, 0.022237184524536133, 0.022209920883178712, 0.02222496032714844, 0.022226943969726562, 0.02204444885253906, 0.022170400619506835, 0.022382591247558595, 0.022267520904541014, 0.022343263626098633, 0.022112703323364256, 0.022094079971313477, 0.022316415786743163, 0.022466272354125977, 0.022445056915283205, 0.022089696884155272, 0.02208736038208008, 0.022132863998413087, 0.0219486083984375, 0.021932064056396486, 0.02191564750671387, 0.022009855270385743, 0.02188083267211914, 0.022001440048217774, 0.021975263595581055, 0.02200371170043945, 0.021939807891845704, 0.021969120025634767, 0.021907936096191405, 0.022162431716918944, 0.021955039978027342, 0.022098175048828127, 0.021875999450683595, 0.02200649642944336, 0.022117759704589842, 0.022196863174438478, 0.022147071838378905, 0.02216524887084961, 0.021815200805664063, 0.021801120758056642, 0.021684415817260744, 0.021716575622558593, 0.021722528457641603, 0.021619712829589844, 0.021985279083251954, 0.02159539222717285, 0.021792863845825194, 0.021864511489868163, 0.021800928115844727, 0.021765888214111326, 0.021699455261230467, 0.021681247711181642, 0.021610719680786133, 0.021626815795898438, 0.022214208602905273, 0.022192960739135743, 0.02190889549255371, 0.022146879196166994, 0.021861120223999022, 0.021825536727905274, 0.02194384002685547, 0.02178233528137207, 0.02169923210144043, 0.021954559326171876, 0.022150943756103516, 0.022476287841796876, 0.022123008728027343, 0.02239094352722168, 0.02237392044067383, 0.02225974464416504, 0.02240768051147461, 0.022331552505493166, 0.022352863311767578, 0.022196992874145508, 0.02231513595581055, 0.022515520095825196, 0.022231231689453124, 0.022837247848510742, 0.022284095764160156, 0.022325439453125, 0.022168607711791993, 0.02208867263793945, 0.022433759689331055, 0.02254198455810547, 0.02267788887023926, 0.022437887191772463, 0.02244812774658203, 0.022417407989501953, 0.022109792709350585, 0.02219254493713379, 0.022196224212646484, 0.02241244888305664, 0.022254432678222656, 0.022194496154785158, 0.022791807174682616, 0.022763456344604492, 0.02220044708251953, 0.022527999877929687, 0.022257471084594728, 0.022603391647338867, 0.022628927230834962, 0.022449216842651366, 0.02219919967651367, 0.02423811149597168, 0.02319491195678711, 0.0222194881439209, 0.022235071182250977, 0.02227168083190918, 0.022226783752441408, 0.02242729568481445, 0.02226880073547363, 0.022152671813964842, 0.022128223419189453, 0.022122592926025392, 0.02219036865234375, 0.022274623870849608, 0.0221265926361084, 0.022955743789672852, 0.022495487213134765, 0.02235411262512207, 0.02233718490600586, 0.022134048461914062, 0.02235215950012207, 0.02209404754638672, 0.022074911117553712, 0.022256511688232422, 0.02217363166809082, 0.02226131248474121, 0.022092159271240235, 0.02210147285461426, 0.02214476776123047, 0.022090656280517578, 0.02216655921936035, 0.022069280624389648, 0.02239788818359375, 0.022607872009277344, 0.022380352020263672, 0.02234796714782715, 0.022147071838378905, 0.022273279190063475, 0.022620927810668944, 0.022237184524536133, 0.022084703445434572, 0.02207836723327637, 0.022306528091430664, 0.02234956741333008, 0.022004255294799803, 0.022070816040039062, 0.022214719772338867, 0.022249631881713867, 0.021958335876464844, 0.02227801513671875, 0.022790304183959963, 0.02210256004333496, 0.022212608337402344, 0.02211840057373047, 0.022161407470703123, 0.02230067253112793, 0.021988447189331056, 0.02203887939453125, 0.021936704635620117, 0.022228992462158204, 0.022093856811523437, 0.022091392517089845, 0.021924192428588868, 0.022145023345947267, 0.021914623260498048, 0.022135295867919923, 0.022516223907470705, 0.022202495574951173, 0.02207935905456543, 0.022138879776000975, 0.02202822494506836, 0.022083648681640623, 0.02217091178894043, 0.02201260757446289, 0.02203241539001465, 0.02202956771850586, 0.021977247238159178, 0.021924415588378907, 0.022384639739990234, 0.022173696517944336, 0.02276118469238281, 0.022388832092285156, 0.022042816162109374, 0.02222447967529297, 0.022032800674438476, 0.02247065544128418, 0.02209526443481445, 0.022046304702758788, 0.022004735946655272, 0.02225276756286621, 0.022020896911621093, 0.021859807968139647, 0.021769952774047852, 0.022174528121948242, 0.021968896865844727, 0.021977088928222657, 0.022476512908935545, 0.022429983139038087, 0.02226121520996094, 0.02219910430908203, 0.02221232032775879, 0.022001407623291017, 0.022300928115844727, 0.022155263900756835, 0.02197292709350586, 0.02204579162597656, 0.022340576171875, 0.02244607925415039, 0.02197305679321289, 0.02193939208984375, 0.02189798355102539, 0.022087200164794922, 0.022317535400390626, 0.022386688232421875, 0.022001663208007814, 0.022437887191772463, 0.022466527938842774, 0.02238377571105957, 0.022278848648071288, 0.02201747131347656, 0.022186111450195313, 0.022233728408813477, 0.024420352935791017, 0.02215443229675293, 0.021907360076904296, 0.022097824096679687, 0.022471263885498048, 0.02209833526611328, 0.022478847503662108, 0.02235148811340332, 0.02224985694885254, 0.02223695945739746, 0.02208585548400879, 0.021927967071533203, 0.02222831916809082, 0.02212723159790039, 0.022094976425170897, 0.02211520004272461, 0.02207744026184082, 0.02208358383178711, 0.021825536727905274, 0.02224127960205078, 0.022147071838378905, 0.02266111946105957, 0.022267871856689454, 0.022077472686767578, 0.022180864334106445, 0.022019168853759766, 0.02209721565246582, 0.022114591598510744, 0.02209823989868164, 0.021999616622924805, 0.021958784103393556, 0.022101600646972655, 0.02198361587524414, 0.021899168014526366, 0.02199660873413086, 0.02198624038696289, 0.022955392837524412, 0.025219711303710937, 0.02224892807006836, 0.02210665512084961, 0.02208345603942871, 0.022214784622192382, 0.022169599533081053, 0.0220446720123291, 0.022003679275512694, 0.022013343811035157, 0.022015775680541992, 0.022168415069580078, 0.02205695915222168, 0.02176582336425781, 0.022004032135009767, 0.02287958335876465, 0.022302944183349608, 0.02210652732849121, 0.022126623153686523, 0.02214236831665039, 0.022057567596435547, 0.02209587287902832, 0.02208358383178711, 0.021893119812011717, 0.022255136489868165, 0.022125024795532227, 0.02214499282836914, 0.022173728942871094, 0.022066560745239258, 0.021936128616333008, 0.022063615798950196, 0.02314451217651367, 0.022220191955566407, 0.022147743225097657, 0.022246944427490235, 0.022104543685913088, 0.02239641571044922, 0.022266368865966796, 0.022054784774780272, 0.02197657585144043, 0.022050975799560547, 0.02203286361694336, 0.02230790328979492, 0.022092735290527344, 0.02200371170043945, 0.021759456634521484, 0.02286534309387207, 0.022354496002197265, 0.022920543670654298, 0.023920927047729492, 0.022344064712524415, 0.022279680252075194, 0.022114816665649413, 0.022081087112426758, 0.02207583999633789, 0.022041856765747072, 0.021721216201782228, 0.021979743957519532, 0.022044160842895507, 0.022157855987548828, 0.022300575256347658, 0.022467807769775392, 0.02224371147155762, 0.022094335556030274, 0.022091775894165038, 0.02204857635498047, 0.02233145523071289, 0.02224550437927246, 0.02212224006652832, 0.022069503784179687, 0.022219903945922853, 0.02267225646972656, 0.02209382438659668, 0.02209791946411133, 0.02211020851135254, 0.022171648025512695, 0.022589439392089843, 0.022351295471191406, 0.022153823852539063, 0.02206480026245117, 0.02210028839111328, 0.022345632553100587, 0.022088863372802733, 0.022074304580688476, 0.02187468719482422, 0.022122432708740234, 0.021950527191162108, 0.021807104110717773, 0.02168422317504883, 0.021946176528930664, 0.022071487426757814, 0.021727231979370116, 0.021753856658935547, 0.022136831283569337, 0.022475807189941407, 0.022106271743774414, 0.021881248474121092, 0.02194063949584961, 0.02214816093444824, 0.022157472610473634, 0.022190879821777344, 0.02186444854736328, 0.021999616622924805, 0.022095903396606446, 0.022104032516479494, 0.021796415328979492, 0.021633472442626953, 0.021941568374633787, 0.022097663879394533, 0.022413055419921876, 0.022702655792236327, 0.022215744018554688, 0.022393407821655272, 0.021983135223388673, 0.022109760284423827, 0.022420352935791015, 0.022024192810058595, 0.02188287925720215, 0.021893119812011717, 0.021960704803466798, 0.022023616790771486, 0.02185273551940918, 0.021831424713134765, 0.02197491264343262, 0.022038911819458006, 0.022023359298706056, 0.02199193572998047, 0.022109983444213867, 0.022261632919311523, 0.022151872634887694, 0.02181100845336914, 0.021643423080444337, 0.021516000747680664, 0.02148748779296875, 0.02157814407348633, 0.021399551391601563, 0.021527999877929686, 0.02169913673400879, 0.021677152633666992, 0.021767072677612305, 0.021935840606689454, 0.021911840438842773, 0.021983232498168945, 0.022124544143676757, 0.02201580810546875, 0.021821632385253906, 0.0217509765625, 0.02183865547180176, 0.022749183654785156, 0.021745664596557617, 0.0216944637298584, 0.02175721549987793, 0.02197372817993164, 0.021790464401245116, 0.021740928649902343, 0.021808223724365236, 0.021792543411254882, 0.02185420799255371, 0.02175574493408203, 0.021702816009521484, 0.021829120635986327, 0.021846527099609374, 0.02174550437927246, 0.02152668762207031, 0.021618688583374023, 0.021932031631469725, 0.022018400192260743, 0.02196553611755371, 0.021905887603759767, 0.02292732810974121, 0.023066816329956056, 0.022391103744506837, 0.024497823715209963, 0.022268287658691405, 0.021917535781860353, 0.02193401527404785, 0.02182476806640625, 0.022027103424072266, 0.022128288269042968, 0.02196054458618164, 0.0218240966796875, 0.021904991149902343, 0.02193040084838867, 0.022007167816162108, 0.021792415618896485, 0.021776351928710937, 0.021895999908447265, 0.0220960636138916, 0.02186240005493164, 0.021778432846069336, 0.02168160057067871, 0.02223161506652832, 0.021812223434448243, 0.02182655906677246, 0.021938175201416017, 0.021977088928222657, 0.02212819290161133, 0.021956031799316406, 0.021969919204711915, 0.0219238395690918, 0.02207846450805664, 0.022145792007446288, 0.021777887344360352, 0.021979936599731444, 0.021780479431152345, 0.022159231185913085, 0.021931360244750977, 0.02187696075439453, 0.021791296005249025, 0.021910879135131837, 0.022008480072021483, 0.021704736709594726, 0.021678047180175783, 0.02209334373474121, 0.021869024276733397, 0.021898719787597658, 0.021963167190551757, 0.021907167434692384, 0.022018463134765624, 0.0221441593170166, 0.021826400756835937, 0.021841983795166015, 0.021935359954833984, 0.021893407821655272, 0.021764127731323243, 0.021703039169311523, 0.02173516845703125, 0.021833984375, 0.022009855270385743, 0.02224051284790039, 0.022037248611450195, 0.02202009582519531, 0.021827583312988282, 0.02209382438659668, 0.021747711181640626, 0.021984256744384766, 0.02233907127380371, 0.02180339241027832, 0.02180726432800293, 0.021835136413574218, 0.021851743698120117, 0.02184239959716797, 0.02196329689025879, 0.022013952255249023, 0.02207139205932617, 0.02195027160644531, 0.02209187126159668, 0.021962751388549806, 0.022005184173583985, 0.02206979179382324, 0.021921087265014648, 0.021682912826538087, 0.021587968826293946, 0.021702495574951172, 0.021874528884887695, 0.02190745544433594, 0.02185843276977539, 0.021823680877685547, 0.021884288787841797, 0.021966880798339843, 0.02204323196411133, 0.022189760208129884, 0.02222876739501953, 0.022569503784179688, 0.022642688751220705, 0.022420799255371094, 0.02242953681945801, 0.02245075225830078, 0.022410560607910156, 0.022309856414794924, 0.022280191421508787, 0.02229212760925293, 0.02250992012023926, 0.022355520248413085, 0.02242195129394531, 0.022523551940917968, 0.022483295440673828, 0.02248294448852539, 0.022347776412963868, 0.022433792114257813, 0.022771711349487304, 0.022540288925170897, 0.023017471313476562, 0.025333696365356446, 0.02252364730834961, 0.022434112548828124, 0.022222848892211915, 0.02234377670288086, 0.022306175231933595, 0.02213532829284668, 0.022494207382202147, 0.0222873592376709, 0.022571008682250978, 0.02227609634399414, 0.02231430435180664, 0.022229696273803713, 0.02226585578918457, 0.022278144836425783, 0.02290278434753418, 0.02237811279296875, 0.022718175888061524, 0.02257695960998535, 0.02224006462097168, 0.022392736434936524, 0.022156991958618165, 0.022278560638427734, 0.0221265926361084, 0.021946367263793946, 0.022128639221191407, 0.02232035255432129, 0.02207619285583496, 0.02211020851135254, 0.02225904083251953, 0.022198944091796874, 0.022195711135864257, 0.022424064636230468, 0.02232249641418457, 0.02231091117858887, 0.022221504211425783, 0.02234880065917969, 0.02225868797302246, 0.022220800399780274, 0.022032384872436524, 0.0220897274017334, 0.022347583770751953, 0.022028255462646484, 0.021968704223632812, 0.021718496322631835, 0.02202239990234375, 0.022022848129272462, 0.021807104110717773, 0.02240716743469238, 0.022345727920532226, 0.022360063552856444, 0.022169599533081053, 0.022109344482421876, 0.022214847564697264, 0.022245759963989257, 0.022308416366577148, 0.02216009521484375, 0.02210358428955078, 0.022251840591430663, 0.02206096076965332, 0.02216886329650879, 0.022059999465942382, 0.02201190376281738, 0.02204876708984375, 0.022486272811889647, 0.02236288070678711, 0.022103071212768555, 0.02207423973083496, 0.022132831573486327, 0.021972864151000977, 0.02225369644165039, 0.022024192810058595, 0.022251487731933594, 0.02222012710571289, 0.022184608459472656, 0.022147296905517578, 0.022095680236816406, 0.02214240074157715]",tokens/s,45.150300536969816,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.63296,516.882432,0.0,174.063616,172.57984,s,1,7.257505859375,7.257505859375,0.0,7.257505859375,7.257505859375,7.257505859375,7.257505859375,[7.257505859375],,kWh,4.556478499978311e-06,4.953557472553107e-07,1.993334927993806e-06,7.045169175227427e-06,,MB,1146.044416,642.711552,0.0,234.881024,215.589888,s,32,0.1822226881980896,0.0056944590061903,0.00011450180687880147,0.005671855926513671,0.005745030546188355,0.0059754145860672,0.006084908156394959,"[0.0060908799171447755, 0.005681056022644043, 0.005745632171630859, 0.005715551853179931, 0.005696095943450928, 0.0056877121925354, 0.005663871765136719, 0.00567142391204834, 0.005592832088470459, 0.005616000175476074, 0.005704319953918457, 0.0056828479766845704, 0.005642687797546387, 0.005672287940979004, 0.005628384113311767, 0.005597599983215332, 0.0056349759101867675, 0.006071616172790528, 0.0056938881874084475, 0.005676320075988769, 0.00573961591720581, 0.005615551948547363, 0.005654208183288574, 0.0056230401992797855, 0.005657887935638427, 0.005622687816619873, 0.005689407825469971, 0.0056431999206542965, 0.005635424137115479, 0.005896704196929932, 0.005598720073699951, 0.005680255889892578]",tokens/s,44955.982600227515,kWh,1.6538748928571e-07,1.823924301159991e-08,8.807845922562414e-08,2.7170519152293407e-07,tokens/kWh,942197675.9630357,MB,1174.319104,646.905856,0.0,239.075328,215.592448,s,32,10.018107879638674,0.3130658712387085,0.02405860689388873,0.3079739990234375,0.31471940002441406,0.3182858184814453,0.40618933898925796,"[0.317748779296875, 0.4453873291015625, 0.31407513427734374, 0.31141497802734375, 0.3147909851074219, 0.3109840698242187, 0.31894219970703125, 0.31339761352539064, 0.3118016357421875, 0.3115425109863281, 0.30998098754882814, 0.3064769897460938, 0.30851312255859376, 0.30743487548828125, 0.3044540710449219, 0.3086247863769531, 0.3065010986328125, 0.3053970031738281, 0.3058859558105469, 0.30894720458984376, 0.3051858215332031, 0.3060135498046875, 0.3067017517089844, 0.30892779541015625, 0.3048805541992187, 0.30572418212890623, 0.30633673095703123, 0.30638446044921874, 0.3061524353027344, 0.3095575866699219, 0.30547216796875, 0.3044695129394531]",tokens/s,201.23560498858515,kWh,8.70659662397215e-06,9.601826387654262e-07,3.6837764101080034e-06,1.3350555672845578e-05,tokens/kWh,4718904.706576306,,s,2016,10.003623904705027,0.004962115032095758,0.0029838044893107014,0.0048530719280242916,0.005034496068954468,0.005185472130775451,0.0054494496345520015,"[0.005335008144378662, 0.005156896114349365, 0.006113247871398926, 0.005140511989593506, 0.005128479957580566, 0.005082464218139649, 0.0051625919342041015, 0.005249824047088623, 0.005186944007873535, 0.005190271854400635, 0.00512175989151001, 0.005148960113525391, 0.005062975883483887, 0.0050495038032531735, 0.0050161280632019046, 0.004988255977630615, 0.0049437122344970705, 0.005042975902557373, 0.00505401611328125, 0.00548089599609375, 0.0053678078651428224, 0.005316127777099609, 0.0050973758697509764, 0.005171807765960693, 0.005079296112060547, 0.005083168029785156, 0.00507257604598999, 0.005038047790527344, 0.004941664218902588, 0.004902207851409912, 0.004965472221374512, 0.005025311946868897, 0.005043807983398437, 0.004879007816314698, 0.0049192957878112795, 0.0049146881103515625, 0.004964863777160645, 0.004937727928161621, 0.0049482879638671875, 0.005002943992614746, 0.0050003199577331545, 0.00496892786026001, 0.004914656162261963, 0.004993887901306152, 0.004955935955047608, 0.004902688026428223, 0.00486630392074585, 0.004847775936126709, 0.004883679866790771, 0.005194655895233154, 0.004882463932037354, 0.004879776000976562, 0.004886911869049072, 0.004917439937591553, 0.0049192957878112795, 0.004900864124298096, 0.004859903812408447, 0.004896192073822021, 0.004878880023956299, 0.004853631973266602, 0.004871871948242188, 0.00483292818069458, 0.004859871864318847, 0.004777056217193603, 0.004970335960388183, 0.004855584144592285, 0.0048273601531982424, 0.0048492798805236815, 0.004950751781463623, 0.004964000225067139, 0.004903103828430176, 0.004861440181732178, 0.004894303798675537, 0.00490553617477417, 0.004898975849151612, 0.004882400035858154, 0.004952095985412598, 0.1387552032470703, 0.005401472091674805, 0.005199391841888428, 0.00506060791015625, 0.005013887882232666, 0.005037856101989746, 0.00493395185470581, 0.004888576030731201, 0.004888576030731201, 0.004884223937988281, 0.004909023761749267, 0.004917535781860351, 0.004878335952758789, 0.00495747184753418, 0.005005792140960693, 0.004892928123474121, 0.004951519966125488, 0.004859871864318847, 0.004841311931610108, 0.004903488159179688, 0.00487340784072876, 0.00488044786453247, 0.004862880229949951, 0.004931583881378174, 0.004874239921569825, 0.0050787200927734375, 0.005136703968048096, 0.005147712230682373, 0.0050308480262756345, 0.004970335960388183, 0.0049153599739074705, 0.005026815891265869, 0.00498748779296875, 0.004942240238189698, 0.00493065595626831, 0.0049136638641357425, 0.004888927936553955, 0.004894271850585937, 0.004946176052093506, 0.004860000133514404, 0.004935711860656738, 0.004913280010223389, 0.004900063991546631, 0.004886943817138672, 0.004940159797668457, 0.00496230411529541, 0.004872191905975342, 0.004876287937164306, 0.004867104053497315, 0.004776415824890137, 0.0048989119529724125, 0.004866079807281494, 0.004902400016784668, 0.004902431964874268, 0.0048670082092285155, 0.004904960155487061, 0.004833280086517334, 0.004828288078308106, 0.004885312080383301, 0.004890367984771728, 0.00489299201965332, 0.004855264186859131, 0.0048788161277771, 0.004899199962615967, 0.0048923521041870114, 0.0049716482162475584, 0.004975552082061767, 0.0049558720588684085, 0.005007391929626465, 0.004966591835021973, 0.004997119903564453, 0.0050094079971313476, 0.0049459199905395506, 0.004982912063598633, 0.004900352001190185, 0.004868480205535888, 0.004839456081390381, 0.004927455902099609, 0.004859295845031739, 0.004894911766052246, 0.004909408092498779, 0.004888288021087647, 0.00495958423614502, 0.004885727882385254, 0.0048886399269104, 0.004910816192626953, 0.004890719890594483, 0.004879744052886963, 0.004866591930389404, 0.004873856067657471, 0.004958591938018799, 0.0050516161918640135, 0.005139232158660889, 0.005449728012084961, 0.005341343879699707, 0.005386079788208008, 0.00535964822769165, 0.005425119876861572, 0.00529372787475586, 0.005206431865692139, 0.005208000183105469, 0.005060544013977051, 0.005015552043914795, 0.005081151962280274, 0.0049658560752868655, 0.0049270401000976564, 0.004922336101531983, 0.004934783935546875, 0.0049552001953125, 0.004976672172546387, 0.004896543979644776, 0.005021599769592286, 0.004775167942047119, 0.00504256010055542, 0.005314943790435791, 0.005130239963531494, 0.0049658560752868655, 0.0050795841217041015, 0.005119999885559082, 0.005021376132965088, 0.004997087955474854, 0.004919648170471191, 0.004911104202270508, 0.004874495983123779, 0.004922368049621582, 0.00489577579498291, 0.0048657598495483395, 0.005146368026733399, 0.004888544082641602, 0.004985119819641113, 0.004907040119171142, 0.004890495777130127, 0.004884575843811035, 0.004928800106048584, 0.004899392127990723, 0.005003168106079102, 0.004884736061096191, 0.004861152172088623, 0.004852511882781982, 0.0048919358253479, 0.004856895923614502, 0.004873439788818359, 0.0049064640998840335, 0.005032927989959717, 0.004906527996063233, 0.004858528137207031, 0.00491871976852417, 0.004848000049591065, 0.004907008171081543, 0.004871712207794189, 0.00488047981262207, 0.004825471878051758, 0.00491315221786499, 0.004841567993164063, 0.004867072105407715, 0.004832159996032715, 0.004810272216796875, 0.004864319801330567, 0.0048784961700439455, 0.004875775814056397, 0.004857503890991211, 0.004896607875823975, 0.004891647815704346, 0.004863423824310303, 0.005185344219207764, 0.004884384155273438, 0.004978720188140869, 0.00506544017791748, 0.004985951900482178, 0.005026463985443115, 0.005034336090087891, 0.005033984184265137, 0.004984831809997559, 0.004904096126556397, 0.004926015853881836, 0.004779232025146484, 0.004861887931823731, 0.004993887901306152, 0.004859903812408447, 0.004888576030731201, 0.004925248146057129, 0.0050833277702331545, 0.004875775814056397, 0.004973055839538575, 0.005031199932098389, 0.0049927678108215336, 0.004959487915039062, 0.004952896118164063, 0.004956223964691162, 0.004952735900878906, 0.004950399875640869, 0.004857664108276367, 0.004859903812408447, 0.00492083215713501, 0.004880735874176026, 0.004896927833557129, 0.004878560066223145, 0.004982560157775879, 0.005209504127502441, 0.005435167789459228, 0.005477183818817138, 0.0054579200744628905, 0.005336895942687988, 0.005327040195465088, 0.0053244481086730956, 0.0052820158004760745, 0.005081471920013428, 0.005063456058502197, 0.005081920146942138, 0.005050528049468994, 0.004943871974945068, 0.004941823959350586, 0.004886144161224365, 0.004868480205535888, 0.004836800098419189, 0.004899519920349121, 0.0048536000251770016, 0.004869984149932861, 0.004867712020874024, 0.005083712100982666, 0.0050854082107543944, 0.00502291202545166, 0.005022304058074951, 0.005050687789916992, 0.005029407978057862, 0.005017792224884033, 0.00497046422958374, 0.004941215991973877, 0.004868703842163086, 0.004884479999542236, 0.004855103969573974, 0.004875135898590088, 0.0048453760147094726, 0.004888415813446045, 0.004868256092071533, 0.004853087902069092, 0.004924191951751709, 0.00484768009185791, 0.0047924799919128415, 0.0048815679550170896, 0.004907872200012207, 0.004947840213775635, 0.004896895885467529, 0.005031871795654297, 0.004893760204315186, 0.004850687980651855, 0.004871327877044678, 0.004860511779785156, 0.004903359889984131, 0.004865856170654297, 0.004829504013061523, 0.0048429441452026364, 0.004833280086517334, 0.0048287358283996586, 0.004823647975921631, 0.004849215984344482, 0.004848159790039062, 0.0048352317810058595, 0.004851263999938965, 0.004847392082214356, 0.004864768028259277, 0.00496230411529541, 0.004942016124725342, 0.004910751819610596, 0.004872352123260498, 0.00496614408493042, 0.004954495906829834, 0.004866047859191895, 0.004859583854675293, 0.004808159828186035, 0.00486630392074585, 0.004839903831481933, 0.00488259220123291, 0.004855648040771484, 0.004937568187713623, 0.005093535900115967, 0.005294079780578613, 0.0053821439743042, 0.005304255962371826, 0.005144639968872071, 0.005130239963531494, 0.005158912181854248, 0.005068992137908936, 0.005053279876708984, 0.004993087768554687, 0.005000224113464355, 0.004988800048828125, 0.004879744052886963, 0.004921664237976074, 0.004903232097625733, 0.00493990421295166, 0.004837247848510742, 0.004883999824523926, 0.004854527950286866, 0.004877312183380127, 0.004850399971008301, 0.0048455681800842285, 0.004880352020263672, 0.004895040035247803, 0.004951168060302735, 0.004911712169647217, 0.004885183811187744, 0.004841311931610108, 0.004872096061706543, 0.004892672061920166, 0.005029664039611816, 0.005042367935180664, 0.004912447929382324, 0.004960959911346436, 0.004972576141357422, 0.004853759765625, 0.004853759765625, 0.004988704204559326, 0.0049268159866333, 0.005001440048217774, 0.005194399833679199, 0.005349376201629639, 0.005293407917022705, 0.005184160232543945, 0.005146240234375, 0.0052893757820129395, 0.00502678394317627, 0.004906303882598877, 0.0049220480918884275, 0.004894720077514648, 0.004911104202270508, 0.0048323521614074706, 0.005189568042755127, 0.00492412805557251, 0.005044479846954345, 0.004865439891815185, 0.00498748779296875, 0.004904960155487061, 0.005332992076873779, 0.005281792163848877, 0.00542310380935669, 0.005691391944885254, 0.005476352214813233, 0.005134335994720459, 0.0050360321998596195, 0.005217311859130859, 0.005098080158233643, 0.004952544212341308, 0.004929567813873291, 0.004934783935546875, 0.004932608127593994, 0.004952159881591797, 0.004933631896972656, 0.00488150405883789, 0.004872767925262451, 0.00487014389038086, 0.004859903812408447, 0.004886271953582763, 0.004986911773681641, 0.005221727848052978, 0.0054997758865356445, 0.005306367874145508, 0.005302432060241699, 0.0052420802116394044, 0.005090943813323975, 0.0052070398330688475, 0.00509881591796875, 0.004974783897399903, 0.004966911792755127, 0.0047422399520874025, 0.0048278717994689946, 0.004958399772644043, 0.005199391841888428, 0.00519212818145752, 0.005202303886413575, 0.005253056049346924, 0.005142271995544434, 0.005160927772521973, 0.0052219839096069335, 0.005035871982574463, 0.004983391761779785, 0.004949440002441406, 0.004975135803222656, 0.005004543781280518, 0.005116672039031983, 0.005691391944885254, 0.005136223793029785, 0.0050136961936950684, 0.004914368152618408, 0.00494601583480835, 0.004949984073638916, 0.004943840026855469, 0.004915967941284179, 0.00480460786819458, 0.004968448162078858, 0.005094592094421387, 0.005009759902954101, 0.004880544185638428, 0.004903232097625733, 0.0050011839866638185, 0.004976319789886475, 0.004882783889770508, 0.0048594241142272945, 0.004881120204925537, 0.00491212797164917, 0.004966879844665527, 0.004878623962402344, 0.004888576030731201, 0.004875584125518799, 0.0048789758682250975, 0.004879648208618164, 0.004901663780212402, 0.004896096229553223, 0.004868671894073486, 0.004851295948028565, 0.004833631992340088, 0.004868256092071533, 0.004896383762359619, 0.004859936237335205, 0.004866240024566651, 0.004868256092071533, 0.005447872161865234, 0.005324672222137452, 0.004810688018798828, 0.0048230400085449215, 0.004820096015930176, 0.004880288124084473, 0.0048178877830505375, 0.004824192047119141, 0.00483622407913208, 0.00481279993057251, 0.004828159809112549, 0.004894879817962647, 0.004803904056549072, 0.004807360172271728, 0.004788064002990723, 0.004824543952941894, 0.00490550422668457, 0.005205632209777832, 0.005427584171295166, 0.0053821439743042, 0.005293248176574707, 0.005235519886016846, 0.005395584106445313, 0.005395328044891358, 0.005154816150665284, 0.004943295955657959, 0.004862527847290039, 0.0049502401351928715, 0.004984608173370361, 0.004933695793151855, 0.004855743885040283, 0.004865568161010742, 0.004853439807891846, 0.004901023864746093, 0.004846208095550537, 0.004859903812408447, 0.004872128009796143, 0.004947616100311279, 0.004856224060058594, 0.004896768093109131, 0.0052286720275878905, 0.005323775768280029, 0.005218207836151123, 0.004967520236968994, 0.004932544231414795, 0.004946879863739014, 0.0048865280151367185, 0.00488431978225708, 0.004849823951721191, 0.004833280086517334, 0.004825088024139404, 0.004877376079559326, 0.004826272010803222, 0.004810656070709228, 0.004810111999511719, 0.004993152141571045, 0.004907264232635498, 0.004906784057617188, 0.004843008041381836, 0.0048382081985473635, 0.004835360050201416, 0.004833280086517334, 0.004872223854064941, 0.004896736145019532, 0.004820991992950439, 0.004845119953155517, 0.004848063945770264, 0.004820799827575684, 0.004800511837005615, 0.004806496143341064, 0.004845823764801025, 0.004829279899597168, 0.004834911823272705, 0.0048206720352172855, 0.004733503818511963, 0.004800672054290771, 0.004829184055328369, 0.004903103828430176, 0.0048822398185729985, 0.004831232070922851, 0.004796544075012207, 0.00482857608795166, 0.004834047794342041, 0.004822688102722168, 0.004811007976531983, 0.004806464195251465, 0.004959392070770264, 0.00494268798828125, 0.004925407886505127, 0.00482860803604126, 0.004813504219055176, 0.004954368114471436, 0.004824192047119141, 0.004841216087341308, 0.00495900821685791, 0.004872159957885742, 0.004839200019836426, 0.004888832092285156, 0.004988927841186524, 0.005177184104919434, 0.005365568161010742, 0.005357920169830322, 0.0053821439743042, 0.005298175811767578, 0.005251071929931641, 0.0052657599449157716, 0.005213247776031494, 0.00498748779296875, 0.004883552074432373, 0.004864927768707276, 0.005111711978912354, 0.004991231918334961, 0.005007199764251709, 0.004849823951721191, 0.0048776321411132815, 0.004899360179901123, 0.00485587215423584, 0.0048657598495483395, 0.0049151678085327145, 0.004880640029907226, 0.004898816108703613, 0.004907008171081543, 0.005060895919799805, 0.005137856006622314, 0.004915487766265869, 0.004865183830261231, 0.004803423881530762, 0.004856895923614502, 0.004865024089813232, 0.004874176025390625, 0.004843200206756592, 0.00485203218460083, 0.004925439834594727, 0.0048865280151367185, 0.0048540477752685544, 0.004857567787170411, 0.004872191905975342, 0.004791296005249023, 0.00489024019241333, 0.0048676800727844236, 0.004862016201019287, 0.004868832111358643, 0.004947968006134033, 0.0049695358276367185, 0.004878335952758789, 0.004869056224822998, 0.004859903812408447, 0.004857855796813965, 0.004856128215789795, 0.0048453760147094726, 0.004840991973876953, 0.004832704067230225, 0.0048362560272216795, 0.004894720077514648, 0.0048148479461669925, 0.004836991786956787, 0.004810688018798828, 0.004962687969207764, 0.004868447780609131, 0.004861216068267823, 0.005134143829345703, 0.005034656047821045, 0.004894527912139892, 0.0048594560623168945, 0.0049016962051391605, 0.004902431964874268, 0.004864575862884521, 0.00493126392364502, 0.004941408157348633, 0.005024288177490234, 0.0049580798149108886, 0.004927487850189209, 0.004913055896759033, 0.004931680202484131, 0.004878335952758789, 0.00486191987991333, 0.004920928001403808, 0.004977087974548339, 0.004930848121643066, 0.004904704093933105, 0.004996064186096191, 0.005031775951385498, 0.005140160083770752, 0.005144159793853759, 0.005094207763671875, 0.005064223766326904, 0.0050507521629333495, 0.004993184089660645, 0.004871840000152588, 0.004860256195068359, 0.004863423824310303, 0.004864575862884521, 0.0049192957878112795, 0.004835328102111816, 0.004810207843780517, 0.00489731216430664, 0.0048351998329162595, 0.004856063842773438, 0.004904895782470703, 0.0048700799942016605, 0.004744991779327392, 0.004850272178649902, 0.00493065595626831, 0.004864927768707276, 0.00483948802947998, 0.004831168174743652, 0.00487014389038086, 0.004843520164489746, 0.004883840084075928, 0.0048154878616333005, 0.004827072143554687, 0.004952127933502197, 0.004839104175567627, 0.004837567806243896, 0.004816927909851074, 0.004816864013671875, 0.0048388481140136715, 0.004821695804595947, 0.0048189439773559575, 0.004911104202270508, 0.004888063907623291, 0.004927455902099609, 0.004896736145019532, 0.004862592220306396, 0.004857791900634766, 0.004970176219940186, 0.004866367816925049, 0.004851295948028565, 0.004825407981872558, 0.004843616008758545, 0.0048537921905517575, 0.004937600135803222, 0.004956223964691162, 0.004833312034606934, 0.004833280086517334, 0.00482915210723877, 0.00487446403503418, 0.004832575798034668, 0.004815328121185303, 0.004855743885040283, 0.004849152088165283, 0.004895328044891357, 0.004947968006134033, 0.004887936115264893, 0.004843520164489746, 0.004976895809173584, 0.004830719947814942, 0.0048065919876098635, 0.004856768131256103, 0.004825088024139404, 0.004834368228912354, 0.004817855834960937, 0.004810751914978028, 0.0049666881561279295, 0.004859615802764892, 0.004812384128570556, 0.004790463924407959, 0.004876480102539062, 0.004820256233215332, 0.004854527950286866, 0.004841599941253662, 0.0048167681694030766, 0.004845471858978272, 0.0047545919418334965, 0.004825952053070068, 0.004820320129394531, 0.004847328186035157, 0.0048232641220092775, 0.004845439910888672, 0.004860256195068359, 0.004853888034820557, 0.004823808193206787, 0.0048986878395080565, 0.004826879978179931, 0.004843552112579346, 0.004859871864318847, 0.004837376117706299, 0.004820991992950439, 0.004834400177001953, 0.0048650879859924315, 0.004818784236907959, 0.004831232070922851, 0.004841311931610108, 0.004852960109710694, 0.004858816146850586, 0.004836703777313232, 0.004870463848114013, 0.004878015995025635, 0.004854432106018066, 0.004868095874786377, 0.004820991992950439, 0.004847424030303955, 0.0048215041160583495, 0.004996096134185791, 0.005114560127258301, 0.004857855796813965, 0.0048455681800842285, 0.004956352233886719, 0.004862815856933594, 0.0048752322196960445, 0.004837376117706299, 0.004843520164489746, 0.004877952098846436, 0.004841856002807617, 0.004827136039733886, 0.004845471858978272, 0.004856927871704102, 0.004837952136993408, 0.004905248165130615, 0.004864160060882568, 0.004841311931610108, 0.004935840129852295, 0.00486195182800293, 0.004861728191375732, 0.0051216320991516115, 0.00487283182144165, 0.004997375965118408, 0.0048873920440673825, 0.004862751960754395, 0.004866399765014648, 0.004968128204345703, 0.004890912055969239, 0.0048529281616210935, 0.004958847999572754, 0.005185855865478516, 0.005813024044036865, 0.004753536224365235, 0.004802432060241699, 0.004860032081604004, 0.004859776020050049, 0.0047924799919128415, 0.004882431983947754, 0.00485152006149292, 0.004867743968963623, 0.0048336639404296874, 0.004803711891174316, 0.004806848049163819, 0.0049276800155639644, 0.005423808097839355, 0.005127999782562256, 0.005007359981536865, 0.004865407943725586, 0.004967040061950684, 0.0048187518119812015, 0.004857183933258057, 0.00481932783126831, 0.00482147216796875, 0.004833343982696533, 0.004825024127960205, 0.00478985595703125, 0.004813216209411621, 0.0048230400085449215, 0.004901887893676758, 0.004936384201049805, 0.004802400112152099, 0.004796256065368652, 0.0048362560272216795, 0.00486195182800293, 0.004847712039947509, 0.00486953592300415, 0.004993472099304199, 0.004837215900421143, 0.0048427839279174805, 0.004879007816314698, 0.005195551872253418, 0.0050210561752319334, 0.0048377919197082515, 0.004868671894073486, 0.004845600128173828, 0.0048167362213134765, 0.004884479999542236, 0.004822879791259766, 0.0048041920661926266, 0.0048063678741455075, 0.004899871826171875, 0.004818816184997559, 0.004848576068878173, 0.004814943790435791, 0.00490777587890625, 0.004841824054718017, 0.004831168174743652, 0.004830912113189697, 0.0048232321739196775, 0.004838943958282471, 0.004877920150756836, 0.004806943893432617, 0.004970528125762939, 0.004903647899627685, 0.004816256046295166, 0.00468998384475708, 0.004793983936309814, 0.0048195838928222655, 0.004779935836791992, 0.004928832054138184, 0.004837887763977051, 0.004775936126708984, 0.004841472148895264, 0.004841407775878906, 0.004810815811157227, 0.004843008041381836, 0.004796351909637451, 0.0048023362159729, 0.004807487964630127, 0.004783584117889404, 0.0047940478324890136, 0.004794847965240478, 0.0048254399299621584, 0.0048512320518493655, 0.004788864135742188, 0.004819104194641113, 0.004824704170227051, 0.004817984104156494, 0.004868256092071533, 0.004798719882965088, 0.004919904232025147, 0.004802559852600098, 0.004841472148895264, 0.004860095977783203, 0.004819104194641113, 0.00484227180480957, 0.004843391895294189, 0.004784895896911621, 0.004828832149505615, 0.0049014720916748045, 0.004803808212280274, 0.004827936172485351, 0.004792031764984131, 0.004814720153808594, 0.004880959987640381, 0.0048067841529846195, 0.004855008125305176, 0.004829504013061523, 0.004777152061462403, 0.004896895885467529, 0.004795392036437988, 0.004939167976379394, 0.004818975925445557, 0.004766143798828125, 0.004794367790222168, 0.0049090561866760255, 0.004843232154846192, 0.004861343860626221, 0.004811456203460693, 0.004886176109313965, 0.004813216209411621, 0.00478220796585083, 0.004810688018798828, 0.004900928020477295, 0.004787231922149658, 0.004835296154022217, 0.00479744005203247, 0.0047983360290527344, 0.004910624027252198, 0.004785632133483887, 0.004878399848937988, 0.0048772478103637695, 0.0051849279403686525, 0.0052436161041259765, 0.004848703861236572, 0.005105567932128906, 0.005626783847808838, 0.004949440002441406, 0.004882688045501709, 0.005509119987487793, 0.0051840639114379885, 0.004828927993774414, 0.004816895961761475, 0.004844927787780761, 0.004864640235900879, 0.004819295883178711, 0.004820608139038086, 0.004783967971801758, 0.004839615821838379, 0.004871424198150635, 0.004807104110717773, 0.00485641622543335, 0.004779744148254395, 0.004827424049377442, 0.004867519855499268, 0.004858143806457519, 0.004825088024139404, 0.004833280086517334, 0.004856128215789795, 0.004845471858978272, 0.004856991767883301, 0.0048616318702697755, 0.004864607810974121, 0.004846303939819336, 0.004838592052459717, 0.004816927909851074, 0.0048306241035461425, 0.0048642559051513675, 0.004834047794342041, 0.004849184036254883, 0.004837408065795899, 0.004835775852203369, 0.0052583680152893065, 0.004831967830657959, 0.00487340784072876, 0.0048644161224365235, 0.0048596482276916505, 0.004806655883789063, 0.0048156800270080565, 0.004826560020446778, 0.004801087856292725, 0.0048455681800842285, 0.004808832168579102, 0.004876160144805909, 0.004822751998901367, 0.004855264186859131, 0.004821824073791504, 0.004818975925445557, 0.00480998420715332, 0.004804800033569336, 0.004815392017364502, 0.004708320140838623, 0.005086495876312256, 0.004821760177612305, 0.005007455825805664, 0.004842688083648681, 0.004822847843170166, 0.0048546562194824215, 0.004832799911499023, 0.0049275197982788085, 0.004831711769104004, 0.004819071769714355, 0.004844863891601562, 0.004810400009155273, 0.004814911842346192, 0.0048607678413391114, 0.004784128189086914, 0.00484281587600708, 0.004821695804595947, 0.004816383838653565, 0.004790592193603516, 0.004966591835021973, 0.004880544185638428, 0.004829216003417969, 0.0048652801513671875, 0.0048371520042419435, 0.004877088069915772, 0.0048189439773559575, 0.004843776226043701, 0.004821760177612305, 0.005183680057525635, 0.004848512172698974, 0.004827072143554687, 0.0048429760932922365, 0.004827936172485351, 0.004840928077697754, 0.004829472064971924, 0.004857567787170411, 0.0048063998222351075, 0.004810368061065674, 0.004825056076049805, 0.00483843183517456, 0.004845471858978272, 0.004830880165100098, 0.0048150081634521485, 0.004872128009796143, 0.004853055953979492, 0.0048438720703125, 0.004805215835571289, 0.0050728960037231445, 0.004912255764007568, 0.004840320110321045, 0.004810751914978028, 0.004806943893432617, 0.004830495834350586, 0.0048540477752685544, 0.004804224014282227, 0.004807199954986572, 0.00479747200012207, 0.004830143928527832, 0.004865056037902832, 0.004809728145599365, 0.0048022718429565426, 0.00521449613571167, 0.004769792079925537, 0.0050421757698059086, 0.0048865280151367185, 0.004841472148895264, 0.004882431983947754, 0.00483513593673706, 0.00487443208694458, 0.004827136039733886, 0.004816256046295166, 0.00482367992401123, 0.004869311809539795, 0.0048525438308715824, 0.004906752109527588, 0.004856063842773438, 0.004794367790222168, 0.004808703899383545, 0.004842688083648681, 0.00487446403503418, 0.004876895904541016, 0.004833280086517334, 0.004828383922576904, 0.004856607913970947, 0.004843520164489746, 0.004831232070922851, 0.0048332161903381346, 0.004818496227264405, 0.004827583789825439, 0.004821407794952393, 0.004832096099853515, 0.004897280216217041, 0.004836991786956787, 0.004811744213104248, 0.004810463905334472, 0.00480460786819458, 0.004847616195678711, 0.004836544036865235, 0.004796895980834961, 0.004796768188476562, 0.004769792079925537, 0.004941343784332275, 0.004855423927307129, 0.0048607678413391114, 0.0048455681800842285, 0.0048063998222351075, 0.004876543998718261, 0.00479859209060669, 0.004806528091430664, 0.004888576030731201, 0.004837376117706299, 0.004777791976928711, 0.004800479888916015, 0.004838719844818116, 0.004791200160980225, 0.004814879894256592, 0.0048793601989746095, 0.004793312072753906, 0.004775328159332275, 0.0049049282073974606, 0.00482367992401123, 0.004827136039733886, 0.004908512115478516, 0.004823391914367676, 0.004837376117706299, 0.004709824085235596, 0.004787327766418457, 0.004840735912322998, 0.004894464015960693, 0.004802591800689697, 0.004817279815673828, 0.004810336112976074, 0.004934495925903321, 0.004810656070709228, 0.004777503967285156, 0.004814943790435791, 0.004841631889343262, 0.004831456184387207, 0.004825088024139404, 0.004800511837005615, 0.0047961602210998535, 0.004819200038909912, 0.004902495861053467, 0.004808576107025147, 0.004821792125701905, 0.0048005437850952145, 0.004953695774078369, 0.004810592174530029, 0.004898560047149658, 0.004811295986175537, 0.00481279993057251, 0.004835328102111816, 0.004820127964019775, 0.0048091840744018555, 0.004809088230133056, 0.004810751914978028, 0.004874239921569825, 0.004834368228912354, 0.00486191987991333, 0.004797023773193359, 0.004798719882965088, 0.004833407878875732, 0.004810751914978028, 0.0049469761848449706, 0.0048273601531982424, 0.004794496059417725, 0.004821631908416748, 0.004833280086517334, 0.004845056056976318, 0.00490070390701294, 0.0048401918411254885, 0.004822688102722168, 0.00545743989944458, 0.004885216236114502, 0.004857855796813965, 0.004822783946990967, 0.0048063678741455075, 0.004801055908203125, 0.004800352096557617, 0.004831615924835205, 0.0048781118392944335, 0.00482038402557373, 0.004790143966674805, 0.00524502420425415, 0.0049015040397644045, 0.004833600044250488, 0.004810592174530029, 0.004839263916015625, 0.005285791873931885, 0.006253119945526123, 0.005736063957214355, 0.005308095932006836, 0.0048668160438537595, 0.004861887931823731, 0.004863999843597412, 0.004855519771575928, 0.004882143974304199, 0.004848095893859863, 0.004814432144165039, 0.00481331205368042, 0.0047923197746276855, 0.0048865280151367185, 0.004837088108062744, 0.004798751831054687, 0.005115903854370117, 0.004847904205322265, 0.004998784065246582, 0.004850880146026611, 0.0048419198989868165, 0.0049361600875854495, 0.004872352123260498, 0.004820831775665283, 0.004849088191986084, 0.004840223789215088, 0.004839200019836426, 0.0048189439773559575, 0.0048139519691467285, 0.004811647891998291, 0.00482476806640625, 0.004833600044250488, 0.004824351787567138, 0.0048278717994689946, 0.004816895961761475, 0.004783711910247803, 0.004894271850585937, 0.004860191822052002, 0.00483513593673706, 0.00482585620880127, 0.004825088024139404, 0.004841760158538818, 0.004990687847137451, 0.0048393278121948245, 0.004886623859405518, 0.004833119869232178, 0.004813183784484863, 0.004882400035858154, 0.004804416179656983, 0.004804895877838135, 0.0048198080062866215, 0.004818848133087158, 0.004834239959716797, 0.004810624122619629, 0.004806240081787109, 0.004821568012237549, 0.004820864200592041, 0.0048211197853088375, 0.004837376117706299, 0.004829184055328369, 0.004841472148895264, 0.004830751895904541, 0.004805088043212891, 0.005019680023193359, 0.004822847843170166, 0.004798912048339844, 0.004802495956420899, 0.004826591968536377, 0.004860256195068359, 0.0048230400085449215, 0.004835487842559815, 0.004847424030303955, 0.004836415767669678, 0.0048670401573181156, 0.004806655883789063, 0.00486195182800293, 0.004874239921569825, 0.004931583881378174, 0.004832704067230225, 0.004815231800079345, 0.004874591827392578, 0.00481660795211792, 0.00482316780090332, 0.004840576171875, 0.004825215816497802, 0.004823808193206787, 0.004911104202270508, 0.004825088024139404, 0.004814271926879883, 0.004813375949859619, 0.00480460786819458, 0.004876639842987061, 0.0048165440559387205, 0.004798463821411133, 0.004819136142730713, 0.004828767776489258, 0.004869984149932861, 0.004835872173309326, 0.0048122878074646, 0.004809055805206299, 0.004836959838867188, 0.004825632095336914, 0.004867263793945312, 0.004818848133087158, 0.004791071891784668, 0.0048189439773559575, 0.004792223930358887, 0.004825471878051758, 0.004807807922363281, 0.0048249921798706056, 0.00482374382019043, 0.004802527904510498, 0.0047916479110717775, 0.004795072078704834, 0.004857855796813965, 0.004799647808074951, 0.004887296199798584, 0.004831327915191651, 0.004910560131072998, 0.004919839859008789, 0.004827104091644287, 0.004816927909851074, 0.00480460786819458, 0.004886911869049072, 0.004824704170227051, 0.004845695972442627, 0.004706783771514893, 0.004873951911926269, 0.004812640190124512, 0.004815135955810547, 0.005631552219390869, 0.004862559795379639, 0.004838912010192871, 0.004838751792907715, 0.004798431873321533, 0.004873119831085205, 0.004884736061096191, 0.004832640171051026, 0.004804704189300537, 0.004795904159545898, 0.004846367835998535, 0.0048023362159729, 0.004812960147857666, 0.004808767795562744, 0.004811071872711182, 0.004838719844818116, 0.004885119915008545, 0.004818784236907959, 0.0048065600395202634, 0.004808703899383545, 0.004833439826965332, 0.004820703983306885, 0.0048100481033325196, 0.004819424152374268, 0.00480291223526001, 0.004843200206756592, 0.004847712039947509, 0.004816415786743164, 0.004866112232208252, 0.00488047981262207, 0.0048215360641479495, 0.004839424133300781, 0.004829184055328369, 0.00481603193283081, 0.004859072208404541, 0.004808224201202392, 0.004817024230957031, 0.0048100161552429195, 0.004822944164276123, 0.0048791680335998535, 0.004881887912750244, 0.004823359966278076, 0.00483571195602417, 0.004827328205108642, 0.004852896213531494, 0.004812992095947265, 0.0048455362319946285, 0.00482751989364624, 0.0047918400764465335, 0.004819392204284668, 0.004866047859191895, 0.004835328102111816, 0.004845695972442627, 0.004861792087554931, 0.004836959838867188, 0.004816959857940674, 0.004802976131439209, 0.004808383941650391, 0.005316319942474365, 0.004695744037628174, 0.004822624206542969, 0.004883520126342774, 0.004779679775238037, 0.004794464111328125, 0.004794271945953369, 0.0048189439773559575, 0.005156864166259765, 0.00488969612121582, 0.004840352058410644, 0.004875679969787597, 0.004822847843170166, 0.004848095893859863, 0.004876607894897461, 0.004834400177001953, 0.004856224060058594, 0.004825600147247314, 0.004792255878448486, 0.004821216106414795, 0.004780032157897949, 0.004834464073181152, 0.004819168090820313, 0.004808640003204346, 0.00485430383682251, 0.004823359966278076, 0.004832863807678223, 0.004811103820800781, 0.004853119850158691, 0.004936255931854248, 0.004894527912139892, 0.004831232070922851, 0.004858176231384278, 0.005182784080505371, 0.004954495906829834, 0.005048319816589355, 0.004918591976165771, 0.004860608100891113, 0.004833280086517334, 0.004876287937164306, 0.004845791816711425, 0.004831007957458496, 0.004831232070922851, 0.004817152023315429, 0.004858880043029785, 0.0050525121688842775, 0.004844255924224853, 0.004835264205932617, 0.004885791778564453, 0.004809440135955811, 0.0048949441909790035, 0.00483622407913208, 0.004864319801330567, 0.004901631832122803, 0.004855552196502686, 0.004851808071136474, 0.004810751914978028, 0.004945184230804443, 0.004827231884002686, 0.004893311977386475, 0.004811967849731445, 0.004796576023101806, 0.004830143928527832, 0.004888288021087647, 0.004709727764129638, 0.004865920066833496, 0.00482751989364624, 0.0048287038803100585, 0.004807007789611816, 0.004914944171905518, 0.00480950403213501, 0.004843520164489746, 0.004833280086517334, 0.00482092809677124, 0.0049845118522644045, 0.004863967895507812, 0.00482480001449585, 0.00482371187210083, 0.004818975925445557, 0.004820831775665283, 0.004862112045288086, 0.0048130879402160645, 0.004791808128356933, 0.005223775863647461, 0.0050338878631591795, 0.0053851838111877446, 0.005140192031860351, 0.006209695816040039, 0.005349696159362793, 0.004919104099273682, 0.004935679912567138, 0.004900288105010986, 0.004850560188293457, 0.00484284782409668, 0.004955967903137207, 0.0048624958992004395, 0.004834911823272705, 0.004834784030914307, 0.004862239837646484, 0.004838335990905762, 0.0048323521614074706, 0.004816832065582276, 0.004840415954589844, 0.004826655864715576, 0.004832960128784179, 0.0048009281158447265, 0.004786240100860596, 0.0048005437850952145, 0.004868256092071533, 0.004806496143341064, 0.004872191905975342, 0.004797952175140381, 0.004827136039733886, 0.004811200141906738, 0.004872255802154541, 0.004797984123229981, 0.004796224117279053, 0.004811423778533936, 0.004891871929168701, 0.004853631973266602, 0.004886879920959473, 0.004948031902313232, 0.004857376098632813, 0.004864160060882568, 0.004809535980224609, 0.004887743949890137, 0.004821375846862793, 0.004702208042144776, 0.0047923197746276855, 0.004854976177215576, 0.004903744220733643, 0.004824543952941894, 0.004817440032958985, 0.004802559852600098, 0.004838719844818116, 0.0048698558807373045, 0.00481990385055542, 0.004811903953552246, 0.004823328018188477, 0.004844160079956055, 0.004825088024139404, 0.004789504051208496, 0.004801568031311035, 0.0047983360290527344, 0.004791296005249023, 0.004892831802368164, 0.004827712059020996, 0.004855936050415039, 0.004832992076873779, 0.004902751922607422, 0.004821663856506348, 0.004886303901672363, 0.004840991973876953, 0.004847775936126709, 0.004833183765411377, 0.004899231910705566, 0.00484768009185791, 0.004822271823883057, 0.004889440059661865, 0.004839360237121582, 0.004824575901031494, 0.004833983898162842, 0.004833151817321777, 0.00483516788482666, 0.00482860803604126, 0.004832960128784179, 0.004872735977172852, 0.004808735847473144, 0.004803199768066407, 0.0048906559944152835, 0.004795231819152832, 0.004892928123474121, 0.004846144199371338, 0.004833280086517334, 0.004830880165100098, 0.004817376136779785, 0.004800032138824463, 0.004819295883178711, 0.004800672054290771, 0.004796512126922607, 0.004810495853424072, 0.004876287937164306, 0.004837376117706299, 0.004837279796600342, 0.0048026561737060545, 0.004825344085693359, 0.004798208236694336, 0.004820991992950439, 0.004802559852600098, 0.004882431983947754, 0.0047291841506958004, 0.004978271961212158, 0.004796319961547851, 0.004812928199768066, 0.00480787181854248, 0.00483619213104248, 0.0048089919090270996, 0.004797152042388916, 0.004806943893432617, 0.004825664043426514, 0.004796576023101806, 0.004802559852600098, 0.0048570241928100584, 0.004801343917846679, 0.004839424133300781, 0.0048083200454711915, 0.004796095848083496, 0.0047909760475158695, 0.004806655883789063, 0.004838687896728516, 0.005176032066345215, 0.004835328102111816, 0.004926720142364502, 0.004932352066040039, 0.0048369278907775875, 0.0048355841636657714, 0.004838784217834472, 0.004823872089385986, 0.004848800182342529, 0.004833792209625244, 0.00480291223526001, 0.0048211197853088375, 0.004841119766235352, 0.004804927825927735, 0.004831039905548095, 0.0048434557914733885, 0.004814112186431884, 0.004823935985565186, 0.00481279993057251, 0.0048232641220092775, 0.004810431957244873, 0.004792352199554443, 0.004889984130859375, 0.004819647789001465, 0.00486300802230835, 0.004791264057159424, 0.004855519771575928, 0.00495849609375, 0.0049500160217285155, 0.004958208084106445, 0.0048269758224487305, 0.004876416206359863, 0.004888607978820801, 0.0048438401222229005, 0.004856959819793701, 0.004823488235473633, 0.004863903999328613, 0.004951295852661133, 0.004813504219055176, 0.004837024211883545, 0.00482367992401123, 0.004876319885253906, 0.00486191987991333, 0.004763775825500488, 0.004837503910064697, 0.004849535942077637, 0.004947968006134033, 0.005063007831573487, 0.004880095958709717, 0.004831168174743652, 0.004839424133300781, 0.004872096061706543, 0.0048776321411132815, 0.004823840141296386, 0.0048189439773559575, 0.004820991992950439, 0.004798463821411133, 0.0048455681800842285, 0.004829184055328369, 0.004878335952758789, 0.004818208217620849, 0.004819680213928223, 0.004890399932861328, 0.004825376033782959, 0.004830719947814942, 0.004833439826965332, 0.004899168014526367, 0.0048065919876098635, 0.004809823989868164, 0.005219232082366943, 0.004922368049621582, 0.004840447902679444, 0.004867263793945312, 0.004852128028869629, 0.004974431991577148, 0.004844096183776856, 0.004840672016143799, 0.00481279993057251, 0.004800511837005615, 0.004813600063323975, 0.004827136039733886, 0.004884672164916992, 0.004820223808288574, 0.004913472175598145, 0.004798304080963134, 0.004856224060058594, 0.004875775814056397, 0.004845183849334717, 0.00482751989364624, 0.004838175773620605, 0.004818431854248047, 0.004835103988647461, 0.004808703899383545, 0.0048685441017150876, 0.004863296031951904, 0.004840223789215088, 0.004824416160583496, 0.0048379840850830075, 0.004826816082000732, 0.004839456081390381, 0.004835519790649414, 0.004785344123840332, 0.004797311782836914, 0.004831232070922851, 0.004988287925720215, 0.004913792133331298, 0.0047820801734924315, 0.004894720077514648, 0.004829184055328369, 0.0048559679985046384, 0.004806623935699463, 0.004853024005889893, 0.004842144012451172, 0.004827072143554687, 0.004867072105407715, 0.004834080219268799, 0.004849215984344482, 0.004841856002807617, 0.004818336009979248, 0.005354432106018067, 0.004846528053283692, 0.00486297607421875, 0.004843520164489746, 0.0049081602096557615, 0.004889472007751465, 0.004935328006744385, 0.004905280113220215, 0.004861983776092529, 0.004853824138641358, 0.00486630392074585, 0.004838496208190918, 0.0048198080062866215, 0.004830592155456543, 0.004875840187072754, 0.004830111980438232, 0.0048369278907775875, 0.004827487945556641, 0.004880383968353271, 0.004905280113220215, 0.004823999881744385, 0.004810527801513672, 0.004833920001983643, 0.004808032035827637, 0.0048895998001098635, 0.004919616222381592, 0.004810175895690918, 0.004812863826751709, 0.004827328205108642, 0.004918879985809326, 0.004838943958282471, 0.004883488178253174, 0.004806496143341064, 0.004843103885650635, 0.0048410239219665524, 0.004807168006896972, 0.0048213438987731935, 0.004829311847686767, 0.004810272216796875, 0.0047926721572875974, 0.004841567993164063, 0.004795616149902344, 0.004803552150726319, 0.004878047943115234, 0.004826464176177978, 0.00492633581161499, 0.0048412480354309085, 0.004829184055328369, 0.004868095874786377, 0.004923391819000244, 0.004818816184997559, 0.004837503910064697, 0.004840479850769043, 0.004807712078094483, 0.004860928058624267, 0.004817279815673828, 0.004856256008148194, 0.004825215816497802, 0.004821280002593994, 0.004851391792297363, 0.004822080135345459, 0.004831232070922851, 0.004886943817138672, 0.004856383800506592, 0.004848735809326172, 0.004961184024810791, 0.004856128215789795, 0.0048263039588928225, 0.004837887763977051, 0.0048496642112731934, 0.0048223681449890135, 0.004792992115020752, 0.00483241605758667, 0.004952799797058105, 0.004814976215362548, 0.004901216030120849, 0.004808351993560791, 0.004820576190948486, 0.004837471961975098, 0.004837056159973144, 0.004807104110717773, 0.004810080051422119, 0.004821856021881104, 0.004837376117706299, 0.005093311786651612, 0.004849728107452393, 0.004878464221954345, 0.004855040073394775, 0.00482367992401123, 0.004853919982910156, 0.00481990385055542, 0.004825632095336914, 0.004831999778747559, 0.004826752185821534, 0.004827040195465088, 0.004816991806030273, 0.004860191822052002, 0.004833343982696533, 0.004808191776275635, 0.004812320232391357, 0.004811391830444336, 0.004814271926879883, 0.004858431816101075, 0.004810751914978028, 0.0051363840103149415, 0.0049909758567810054, 0.004907072067260742, 0.004895008087158203, 0.004843391895294189, 0.0048364801406860355, 0.004821663856506348, 0.004851712226867676, 0.004907008171081543, 0.004811135768890381, 0.004956192016601563, 0.0048373441696166995, 0.004835360050201416, 0.004937983989715576, 0.004839136123657227, 0.004837376117706299, 0.004825088024139404, 0.004810751914978028, 0.004839424133300781, 0.004843520164489746, 0.004827136039733886, 0.004845727920532226, 0.004910943984985351, 0.004804224014282227, 0.005154304027557373, 0.0050553598403930665, 0.005570047855377197, 0.005091839790344238, 0.005136288166046142, 0.005193888187408447, 0.004902016162872314, 0.004913983821868896, 0.0050360321998596195, 0.004851583957672119, 0.005140096187591553, 0.004944543838500976, 0.004873184204101563, 0.004862847805023193, 0.004892672061920166, 0.004863296031951904, 0.004850240230560303, 0.004849760055541992, 0.004863647937774658, 0.004884191989898682, 0.00589024019241333, 0.004908927917480469, 0.004919904232025147, 0.004843520164489746, 0.0048384637832641604, 0.0048438081741333005, 0.004823135852813721, 0.004822688102722168, 0.004868864059448242, 0.004791808128356933, 0.004795040130615234, 0.004809055805206299, 0.004830239772796631, 0.004856063842773438, 0.004798848152160645, 0.004808576107025147, 0.0048100481033325196, 0.004877120018005371, 0.004820991992950439, 0.004838784217834472, 0.004815392017364502, 0.004819039821624756, 0.00481279993057251, 0.004839744091033936, 0.004822720050811768, 0.004861311912536621, 0.0048380160331726076, 0.004806816101074219, 0.004679456233978272, 0.004821407794952393, 0.00482147216796875, 0.0048496642112731934, 0.004833183765411377, 0.0048661441802978515, 0.004816895961761475, 0.004884416103363037, 0.0048429760932922365, 0.004815584182739258, 0.004863776206970215, 0.00482092809677124, 0.004826687812805176, 0.004835487842559815, 0.004822624206542969, 0.004842336177825928, 0.0048475837707519535, 0.004818975925445557, 0.004863999843597412, 0.004845791816711425, 0.004847392082214356, 0.004839424133300781, 0.004859776020050049, 0.004810368061065674, 0.004798880100250244, 0.00483951997756958, 0.004811808109283448, 0.004815839767456055, 0.004800511837005615, 0.004845183849334717, 0.004845503807067871, 0.004839744091033936, 0.004812928199768066, 0.004818848133087158, 0.0048323521614074706, 0.004810976028442383, 0.004814655780792237, 0.004795040130615234, 0.004818655967712402, 0.004830912113189697, 0.0047578558921813965, 0.004840159893035888, 0.004796256065368652, 0.004788127899169922, 0.004816991806030273, 0.004794591903686523, 0.004818816184997559, 0.00478607988357544, 0.004780223846435547, 0.004830048084259033, 0.004852255821228027, 0.0049008002281188965, 0.004860095977783203, 0.0048269758224487305, 0.004856063842773438, 0.00482528018951416, 0.0048373441696166995, 0.004810815811157227, 0.005746304035186767, 0.004872416019439697, 0.004806303977966308, 0.00480947208404541, 0.004912896156311035, 0.004710400104522705, 0.004851712226867676, 0.00481385612487793, 0.0048273601531982424, 0.004797183990478516, 0.004810751914978028, 0.0048323521614074706, 0.004838592052459717, 0.004847328186035157, 0.004823135852813721, 0.004796319961547851, 0.004890624046325683, 0.004793824195861817, 0.00485152006149292, 0.004799488067626953, 0.004812511920928955, 0.0048371520042419435, 0.004814208030700684, 0.004807519912719727, 0.004811808109283448, 0.004828383922576904, 0.004832511901855469, 0.004895423889160156, 0.00482425594329834, 0.004809343814849853, 0.004822783946990967, 0.004853759765625, 0.004858463764190674, 0.004836319923400879, 0.0048146238327026365, 0.004789152145385742, 0.004841087818145752, 0.004837024211883545, 0.004815584182739258, 0.004873856067657471, 0.004823488235473633, 0.004827072143554687, 0.00482912015914917, 0.004812064170837402, 0.004809055805206299, 0.004832992076873779, 0.004791007995605469, 0.004832863807678223, 0.004816864013671875, 0.004876832008361817, 0.0048616318702697755, 0.004810143947601318, 0.004967232227325439, 0.004816991806030273, 0.004803616046905518, 0.004844639778137207, 0.004811679840087891, 0.004819839954376221, 0.004802432060241699, 0.004788352012634277, 0.004820991992950439, 0.004885600090026855, 0.004860703945159912, 0.004777919769287109, 0.004788288116455078, 0.004812928199768066, 0.004820543766021729, 0.004788447856903076]",tokens/s,201.52696854704882,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,938.31168,6533.61152,0.0,6138.363904,6060.931072,s,1,6.96679052734375,6.96679052734375,0.0,6.96679052734375,6.96679052734375,6.96679052734375,6.96679052734375,[6.96679052734375],,kWh,5.085252083328366e-06,5.534977879280402e-07,1.185278726001382e-06,6.824028597257788e-06,,MB,1379.749888,6558.777344,0.0,6150.946816,5419.87328,s,10,0.6020774116516113,0.06020774116516113,0.0018453734187563265,0.05972475242614746,0.0627747314453125,0.0633516544342041,0.06381319282531737,"[0.06264652633666992, 0.0578870735168457, 0.061093887329101565, 0.05857558441162109, 0.058743072509765626, 0.05876416015625, 0.05997062301635742, 0.0639285774230957, 0.0594788818359375, 0.060989025115966794]",tokens/s,4251.944933422165,kWh,1.9568901477222326e-06,2.157116488981847e-07,1.3030325239066221e-06,3.475634320527039e-06,tokens/kWh,73655619.77796923,MB,1423.233024,6558.777344,0.0,6150.946816,5419.87584,s,10,15.658351074218748,1.5658351074218753,0.003458824543806209,1.5653187866210936,1.570976623535156,1.5718140563964844,1.5724840026855469,"[1.562399658203125, 1.5664119873046876, 1.57079052734375, 1.56212451171875, 1.562656494140625, 1.5660498046875, 1.56763232421875, 1.5726514892578125, 1.5645877685546874, 1.5630465087890626]",tokens/s,40.23412152492135,kWh,4.5555347025613113e-05,5.024060454978115e-06,3.0192659339294274e-05,8.07720668198855e-05,tokens/kWh,779972.6128153236,,s,630,15.656249246597312,0.02485118928031316,0.0005200062189687375,0.024734368324279783,0.024987286949157715,0.02553708686828613,0.027211076564788824,"[0.02589961624145508, 0.025453664779663085, 0.025055807113647462, 0.02475379180908203, 0.024621280670166015, 0.024597024917602538, 0.02693120002746582, 0.024621055603027343, 0.025525312423706054, 0.02461996841430664, 0.024572128295898436, 0.0245645751953125, 0.02460358428955078, 0.024588287353515623, 0.02454528045654297, 0.024649152755737303, 0.024604736328125, 0.02458995246887207, 0.024578943252563476, 0.024739103317260744, 0.024786943435668944, 0.024641599655151367, 0.024608640670776366, 0.02457270431518555, 0.024598272323608398, 0.02468016052246094, 0.024723520278930666, 0.02468707275390625, 0.02473369598388672, 0.02488675117492676, 0.02481376075744629, 0.02470070457458496, 0.02467487907409668, 0.02473574447631836, 0.02473779106140137, 0.02465702438354492, 0.024648576736450194, 0.024795007705688477, 0.024915935516357422, 0.024885408401489256, 0.024890720367431642, 0.024992416381835938, 0.024915807723999022, 0.024882911682128906, 0.024833663940429688, 0.02467238426208496, 0.024756927490234375, 0.024734848022460936, 0.024695104598999023, 0.024650304794311524, 0.024794879913330077, 0.024837888717651368, 0.024746496200561522, 0.024659584045410157, 0.024710880279541016, 0.024648351669311522, 0.024645631790161132, 0.024692703247070312, 0.024684576034545897, 0.024821760177612305, 0.02480678367614746, 0.02478054428100586, 0.024736576080322266, 0.027100543975830078, 0.026265567779541015, 0.025604768753051756, 0.024977407455444335, 0.024818880081176758, 0.02469055938720703, 0.024673183441162108, 0.02463542366027832, 0.024713216781616212, 0.024696832656860353, 0.024678367614746094, 0.024653823852539062, 0.02466975975036621, 0.024691167831420897, 0.02481718444824219, 0.024760799407958986, 0.024780799865722656, 0.02474188804626465, 0.02488662338256836, 0.024891424179077148, 0.02485107231140137, 0.024722719192504884, 0.024698879241943358, 0.024777664184570312, 0.024726495742797852, 0.024656095504760743, 0.02465167999267578, 0.024742591857910157, 0.024784320831298827, 0.02466649627685547, 0.02463148880004883, 0.024611967086791992, 0.02471820831298828, 0.024811519622802734, 0.024707199096679688, 0.024614784240722658, 0.02484223937988281, 0.024793088912963866, 0.024814943313598632, 0.02482806396484375, 0.024821983337402344, 0.02490380859375, 0.025909151077270508, 0.024884767532348633, 0.024715967178344726, 0.024842208862304687, 0.024804479598999025, 0.02483296012878418, 0.024719263076782228, 0.024809568405151368, 0.024782079696655274, 0.024779552459716796, 0.02477052879333496, 0.02467430305480957, 0.02484223937988281, 0.024795135498046874, 0.02480316734313965, 0.02487516784667969, 0.024986976623535155, 0.024924095153808595, 0.024838720321655273, 0.024752288818359374, 0.024735008239746094, 0.02897635269165039, 0.026788639068603515, 0.02592095947265625, 0.025365055084228514, 0.024923519134521486, 0.02474015998840332, 0.024615232467651366, 0.024626720428466798, 0.025155359268188477, 0.02463587188720703, 0.024582143783569335, 0.024803552627563476, 0.024756223678588866, 0.024729600906372072, 0.024622304916381836, 0.025016191482543946, 0.025057376861572264, 0.025029024124145507, 0.024600992202758788, 0.02454528045654297, 0.024755456924438476, 0.024787712097167968, 0.024807424545288087, 0.02471731185913086, 0.02476144027709961, 0.02468342399597168, 0.0247193603515625, 0.024672256469726563, 0.024664064407348633, 0.02466953659057617, 0.024640159606933595, 0.024662015914916992, 0.024624895095825196, 0.024754432678222655, 0.024767839431762695, 0.024668832778930665, 0.024637632369995117, 0.024651584625244142, 0.024832000732421877, 0.02488502311706543, 0.024913280487060548, 0.024861312866210936, 0.025026784896850587, 0.0250817928314209, 0.025004095077514648, 0.024813343048095703, 0.02486617660522461, 0.024793952941894532, 0.02478489685058594, 0.02476348876953125, 0.024644384384155272, 0.024776063919067382, 0.02476089668273926, 0.02464787292480469, 0.024688512802124023, 0.02459823989868164, 0.024654239654541017, 0.027891712188720705, 0.02455904006958008, 0.024582719802856444, 0.024649311065673828, 0.024654239654541017, 0.024696832656860353, 0.02725622367858887, 0.026458751678466796, 0.025546720504760742, 0.02507161521911621, 0.024815616607666017, 0.024705024719238283, 0.02467840003967285, 0.024557567596435546, 0.02451456069946289, 0.02454652786254883, 0.02459219169616699, 0.024527103424072265, 0.024511199951171875, 0.024596479415893553, 0.024767936706542967, 0.02479136085510254, 0.024764543533325196, 0.02471232032775879, 0.02489241600036621, 0.024846336364746095, 0.024868928909301757, 0.024704896926879882, 0.02465184020996094, 0.024650751113891603, 0.024642559051513673, 0.024647680282592774, 0.024606719970703125, 0.024816736221313477, 0.024858720779418947, 0.024733888626098634, 0.02459679985046387, 0.02455379295349121, 0.024647327423095704, 0.024746335983276368, 0.02466815948486328, 0.024608415603637697, 0.024565664291381836, 0.024580543518066406, 0.02464748764038086, 0.02474732780456543, 0.024896064758300782, 0.024867231369018555, 0.02488515281677246, 0.024841888427734375, 0.02480073547363281, 0.02475916862487793, 0.02474345588684082, 0.024723936080932617, 0.02468822479248047, 0.024621055603027343, 0.024717727661132814, 0.024686111450195312, 0.02462918472290039, 0.024616992950439454, 0.024817472457885743, 0.024820415496826172, 0.02479523277282715, 0.02472857666015625, 0.024738719940185547, 0.024729248046875, 0.024789215087890625, 0.024730880737304686, 0.024593151092529297, 0.026720991134643556, 0.026142431259155274, 0.025415903091430665, 0.02491360092163086, 0.024781375885009765, 0.024657920837402345, 0.024610815048217775, 0.02456985664367676, 0.02458620834350586, 0.024649087905883788, 0.024699071884155273, 0.024633440017700195, 0.02458457565307617, 0.024688640594482423, 0.024656959533691406, 0.024687551498413087, 0.02466396713256836, 0.024645727157592775, 0.024713216781616212, 0.024707071304321288, 0.024633344650268556, 0.024589920043945314, 0.024749664306640624, 0.02479801559448242, 0.024731359481811523, 0.024583488464355468, 0.024562656402587892, 0.024731647491455077, 0.02474095916748047, 0.02474838447570801, 0.024635488510131837, 0.024739871978759764, 0.024795583724975586, 0.024827648162841796, 0.024756479263305663, 0.02468454360961914, 0.024696832656860353, 0.024687967300415038, 0.024861343383789064, 0.02484003257751465, 0.024998048782348632, 0.024978912353515625, 0.02492880058288574, 0.02486016082763672, 0.024788639068603516, 0.024765151977539063, 0.024718496322631837, 0.024710111618041992, 0.024688640594482423, 0.024867904663085937, 0.024901567459106447, 0.02476255989074707, 0.024680383682250978, 0.024685440063476564, 0.024729183197021484, 0.024703359603881835, 0.024719392776489258, 0.024780832290649413, 0.0248502082824707, 0.024871103286743163, 0.024876928329467772, 0.024676319122314452, 0.024771839141845702, 0.028639135360717775, 0.026617952346801758, 0.02574332809448242, 0.02521887969970703, 0.024971136093139647, 0.024803647994995116, 0.02471116828918457, 0.024696832656860353, 0.024705024719238283, 0.02465177536010742, 0.024590335845947265, 0.024594432830810548, 0.024641151428222655, 0.02465830421447754, 0.024620927810668946, 0.024670335769653322, 0.024808736801147462, 0.02476233673095703, 0.024747840881347655, 0.024621856689453124, 0.024591808319091798, 0.024789728164672852, 0.024780799865722656, 0.024647680282592774, 0.024628543853759767, 0.02467910385131836, 0.024690944671630858, 0.024669952392578125, 0.02462054443359375, 0.024619520187377928, 0.024735519409179688, 0.02478102493286133, 0.024790719985961916, 0.024707391738891603, 0.024749216079711915, 0.024766656875610353, 0.024760160446166992, 0.024671039581298827, 0.02473958396911621, 0.02487731170654297, 0.024930303573608398, 0.0248668155670166, 0.024913375854492187, 0.025012767791748047, 0.02496512031555176, 0.024911136627197267, 0.02486297607421875, 0.024822240829467775, 0.02471228790283203, 0.024630176544189454, 0.024643680572509766, 0.024675487518310547, 0.024723295211791993, 0.02481577682495117, 0.024759040832519532, 0.024702016830444335, 0.024642688751220703, 0.024743743896484375, 0.024739072799682616, 0.024722175598144533, 0.0246824951171875, 0.02475164794921875, 0.024841760635375975, 0.02667728042602539, 0.025870336532592773, 0.025402528762817383, 0.024990079879760742, 0.024744384765625, 0.024623104095458984, 0.02470297622680664, 0.02464358329772949, 0.024589376449584963, 0.024562559127807616, 0.0245863037109375, 0.024618240356445314, 0.024684799194335937, 0.024644096374511718, 0.02465177536010742, 0.0247314567565918, 0.02473289680480957, 0.026029024124145508, 0.027024959564208983, 0.026042816162109374, 0.024751840591430666, 0.024758367538452147, 0.025274560928344725, 0.025042943954467774, 0.02479871940612793, 0.024773216247558592, 0.024813472747802736, 0.0246824951171875, 0.024731647491455077, 0.024672256469726563, 0.02466815948486328, 0.024624319076538087, 0.02480620765686035, 0.02469478416442871, 0.02472550392150879, 0.024667423248291017, 0.024543968200683594, 0.024628608703613282, 0.024724096298217774, 0.024747936248779297, 0.02473583984375, 0.025027999877929686, 0.024910432815551758, 0.02492006492614746, 0.024813440322875975, 0.02470515251159668, 0.02467856025695801, 0.024679744720458984, 0.024642080307006837, 0.024649599075317382, 0.024735872268676757, 0.024750080108642578, 0.024753759384155274, 0.024703392028808592, 0.024813568115234375, 0.024839744567871094, 0.024824256896972655, 0.02487843132019043, 0.024799007415771485, 0.024761215209960937, 0.02469193649291992, 0.02471811294555664, 0.024711200714111328, 0.030552448272705077, 0.027061792373657228, 0.02592201614379883, 0.025268224716186522, 0.024975135803222658, 0.02471667289733887, 0.024724319458007814, 0.024610815048217775, 0.02742787170410156, 0.024673248291015627, 0.024657920837402345, 0.02473369598388672, 0.024664064407348633, 0.02456729507446289, 0.024549888610839843, 0.024673887252807617, 0.024681024551391602, 0.024655712127685546, 0.024610815048217775, 0.02472540855407715, 0.024749311447143554, 0.02465059280395508, 0.02456515121459961, 0.02460732841491699, 0.02480710411071777, 0.024701311111450196, 0.024610591888427735, 0.02458576011657715, 0.024899967193603517, 0.024733184814453125, 0.024702720642089844, 0.024687616348266602, 0.024624767303466796, 0.024680831909179687, 0.02468832015991211, 0.024680320739746093, 0.024625600814819334, 0.024803136825561522, 0.024856767654418944, 0.02491596794128418, 0.024919040679931642, 0.024869888305664063, 0.02486182403564453, 0.024894336700439453, 0.024961023330688475, 0.02472697639465332, 0.024803903579711913, 0.024807424545288087, 0.024795135498046874, 0.0247926082611084, 0.024771039962768554, 0.026279775619506836, 0.025005983352661132, 0.024690944671630858, 0.024659423828125, 0.02476700782775879, 0.024756223678588866, 0.024705120086669922, 0.024696735382080077, 0.02483404731750488, 0.02477670478820801, 0.024760160446166992, 0.02468675231933594, 0.02684761619567871, 0.02618684768676758, 0.025475263595581055, 0.024950912475585937, 0.024838623046875, 0.02471494483947754, 0.024861312866210936, 0.02458399963378906, 0.024522687911987303, 0.024588319778442384, 0.024600095748901367, 0.024652448654174805, 0.02463052749633789, 0.024797760009765624, 0.02484182357788086, 0.024778911590576172, 0.024727712631225585, 0.02471664047241211, 0.02479529571533203, 0.024698944091796876, 0.02466431999206543, 0.024710527420043947, 0.024832639694213867, 0.02485068893432617, 0.024757856369018554, 0.024660480499267577, 0.024698848724365233, 0.02472137641906738, 0.0247459831237793, 0.02464681625366211, 0.024591199874877928, 0.024755935668945312, 0.02475811195373535, 0.024807872772216796, 0.024722944259643553, 0.02480400085449219, 0.024743839263916014, 0.02471529579162598, 0.0247459831237793, 0.024847776412963866, 0.025034528732299804, 0.025047775268554687, 0.02505846405029297, 0.02489792060852051, 0.024963552474975587, 0.024893440246582032, 0.02496614456176758, 0.024738815307617186, 0.024612863540649413, 0.02469808006286621, 0.024683296203613283, 0.024659135818481445, 0.02463212776184082, 0.0248668155670166, 0.0248668155670166, 0.024786943435668944, 0.02471664047241211, 0.024741695404052733, 0.02499260711669922, 0.024669984817504882, 0.02466633605957031, 0.024702016830444335, 0.024894399642944334, 0.0277040958404541, 0.026216447830200194, 0.025458688735961913, 0.0250548152923584, 0.024836448669433592, 0.024700159072875975, 0.024628032684326173, 0.024706111907958985, 0.024667072296142577, 0.024559616088867187, 0.02453494453430176, 0.024590463638305665, 0.02456163215637207, 0.024537216186523436, 0.024616191864013672, 0.024650367736816406, 0.02473561668395996, 0.02473516845703125, 0.024715967178344726, 0.024608768463134766, 0.02465996742248535, 0.02468822479248047, 0.024674720764160156, 0.024640703201293947, 0.024585023880004882, 0.02483363151550293, 0.02482329559326172, 0.024755264282226564, 0.024717151641845705, 0.024780799865722656, 0.02477004814147949, 0.02487945556640625, 0.02466524887084961, 0.024624128341674805, 0.024698879241943358, 0.024733631134033204, 0.02464348793029785, 0.024720735549926758, 0.02487993621826172, 0.024986751556396486, 0.02488105583190918, 0.024859136581420898, 0.024803808212280273, 0.02479292869567871, 0.024780319213867186, 0.024809600830078125, 0.024774528503417968, 0.024813663482666014, 0.02476291275024414, 0.024721408843994142, 0.02465407943725586, 0.024590080261230468, 0.0247459831237793, 0.024710559844970705, 0.02462371253967285, 0.024630847930908202, 0.024818111419677734, 0.02486403274536133, 0.02485321617126465, 0.024790016174316407, 0.024624000549316405, 0.024670335769653322, 0.024677984237670897]",tokens/s,40.23952289447125,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 990, in __init__ self.model = GemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 775, in __init__ [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 775, in [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 565, in __init__ self.mlp = GemmaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 140, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 138545 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1234, in __init__ self.transformer = DbrxModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1009, in __init__ self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1009, in self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 788, in __init__ self.ffn = DbrxFFN(config=config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 765, in __init__ self.experts = DbrxExperts( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 704, in __init__ self.mlp = DbrxExpertGLU( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 682, in __init__ self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 110247 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1133, in __init__ self.model = StableLmModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 914, in __init__ [StableLmDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 914, in [StableLmDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 688, in __init__ self.self_attn = ATTENTION_CLASSES[config._attn_implementation](config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 339, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.use_qkv_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 14.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 190015 has 14.73 GiB memory in use. Of the allocated memory 14.54 GiB is allocated by PyTorch, and 78.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 743, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 471, in __init__ super().__init__(config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 175, in __init__ self.dense = nn.Linear(config.hidden_size, config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 41391 has 14.73 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 20.86 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,789.311488,763.232256,0.0,360.710144,345.493504,s,1,7.39702685546875,7.39702685546875,0.0,7.39702685546875,7.39702685546875,7.39702685546875,7.39702685546875,[7.39702685546875],,kWh,2.7277249416708098e-06,2.9335219539704244e-07,8.836118179922359e-07,3.904688955060088e-06,,MB,1223.122944,777.91232,0.0,362.807296,319.011328,s,21,0.18567612743377687,0.008841720353989375,0.00041130362173639914,0.00875984001159668,0.008996543884277343,0.009055264472961426,0.010262893104553222,"[0.009055264472961426, 0.010564800262451171, 0.008581664085388184, 0.008663647651672364, 0.00875491237640381, 0.008412863731384277, 0.008820416450500489, 0.008789823532104493, 0.008889568328857423, 0.008834527969360352, 0.008996543884277343, 0.008748319625854492, 0.008762911796569824, 0.008603679656982421, 0.00853996753692627, 0.008843392372131347, 0.00875001621246338, 0.00875158405303955, 0.008883999824523927, 0.008668383598327637, 0.00875984001159668]",tokens/s,28953.641344751766,kWh,2.560060039290848e-07,2.8232766431298103e-08,1.636648482806962e-07,4.4790361864107903e-07,tokens/kWh,571551533.2889994,MB,1269.817344,800.980992,0.0,385.875968,319.013888,s,21,10.171160644531248,0.48434098307291656,0.00506694934171791,0.4830084228515625,0.49083590698242185,0.49147677612304685,0.4942448913574219,"[0.49147677612304685, 0.4875665283203125, 0.47912213134765624, 0.49022024536132813, 0.4801130676269531, 0.4775665283203125, 0.48218267822265626, 0.4815361938476562, 0.49493692016601565, 0.4830084228515625, 0.4873675537109375, 0.49083590698242185, 0.4800626525878906, 0.47378375244140625, 0.48235464477539064, 0.48208047485351563, 0.4869407653808594, 0.4887667541503906, 0.4836106262207031, 0.481802734375, 0.48582528686523435]",tokens/s,130.07365100573261,kWh,1.3983660207180393e-05,1.5421575923130398e-06,5.837027782957436e-06,2.1362845582450867e-05,tokens/kWh,2949045.3299795035,,s,1323,10.157830680370324,0.0076778765535679,0.00022719342536665828,0.007666336059570312,0.007849932956695557,0.00793497290611267,0.00843162815093994,"[0.0076053118705749516, 0.007822303771972657, 0.007880671977996826, 0.00781657600402832, 0.007864672183990479, 0.007911231994628906, 0.007862751960754394, 0.007915296077728272, 0.008441280364990235, 0.008014623641967773, 0.00785203218460083, 0.007780096054077148, 0.007850240230560303, 0.007796000003814698, 0.007743231773376465, 0.007713056087493896, 0.007802879810333252, 0.0076622400283813475, 0.007688352108001709, 0.0077840638160705565, 0.007776063919067383, 0.007721407890319824, 0.0076902399063110355, 0.007937088012695312, 0.007838655948638916, 0.007645343780517578, 0.007640672206878662, 0.007723072052001953, 0.007690432071685791, 0.007716832160949707, 0.007700511932373047, 0.007693344116210937, 0.008076031684875489, 0.0077693438529968266, 0.0076195521354675295, 0.0076943359375, 0.007605375766754151, 0.007712831974029541, 0.007645984172821045, 0.007690271854400635, 0.007745632171630859, 0.007740447998046875, 0.007779391765594482, 0.00769209623336792, 0.007702527999877929, 0.0076631360054016115, 0.008083935737609863, 0.007745535850524903, 0.007741695880889893, 0.00768998384475708, 0.007982592105865478, 0.007721471786499024, 0.007660863876342773, 0.007641791820526123, 0.007763967990875244, 0.007798272132873535, 0.007859712123870849, 0.007819712162017822, 0.007793087959289551, 0.007715231895446777, 0.007820288181304931, 0.008119168281555176, 0.008150879859924316, 0.00749241590499878, 0.007725088119506836, 0.007737535953521729, 0.0077974720001220705, 0.007687104225158691, 0.007825407981872558, 0.007792640209197998, 0.007829055786132813, 0.007852479934692383, 0.007890272140502929, 0.007815040111541749, 0.007782815933227539, 0.007753568172454834, 0.007806975841522217, 0.007833504199981689, 0.007699423789978027, 0.00780457592010498, 0.007808127880096436, 0.007772255897521973, 0.007705215930938721, 0.007983200073242188, 0.007978367805480956, 0.008403648376464843, 0.00856704044342041, 0.008474559783935546, 0.008439519882202148, 0.008249728202819825, 0.007780159950256348, 0.007689248085021973, 0.0077619199752807615, 0.00766860818862915, 0.007619872093200684, 0.0076295361518859866, 0.007749631881713868, 0.007628992080688477, 0.007552768230438232, 0.007524608135223388, 0.0076614079475402835, 0.007661759853363037, 0.007878592014312744, 0.00757155179977417, 0.007583583831787109, 0.007745440006256103, 0.007581696033477783, 0.007553023815155029, 0.0075632638931274416, 0.007542719841003418, 0.007462080001831055, 0.0073240318298339845, 0.007481855869293213, 0.007514111995697022, 0.007501215934753418, 0.007447135925292969, 0.007468639850616455, 0.007509664058685302, 0.007949056148529053, 0.007573503971099854, 0.0077844481468200685, 0.0075874881744384766, 0.007559711933135986, 0.007468863964080811, 0.007455872058868408, 0.007432191848754883, 0.007035103797912598, 0.007583744049072265, 0.007525824069976807, 0.0074651198387146, 0.007519807815551758, 0.0075641279220581056, 0.00742793607711792, 0.007483551979064942, 0.007423776149749756, 0.00749180793762207, 0.007553215980529785, 0.007498976230621338, 0.007475967884063721, 0.0076648321151733395, 0.007667520046234131, 0.007524640083312988, 0.007639584064483643, 0.008353280067443849, 0.007488031864166259, 0.007477407932281494, 0.0075055999755859375, 0.0073567042350769045, 0.007304128170013428, 0.007312128067016602, 0.007274687767028809, 0.0073565120697021485, 0.0074198079109191895, 0.007536608219146729, 0.007586016178131104, 0.007666816234588623, 0.007682464122772217, 0.0076884799003601074, 0.007850016117095947, 0.007641056060791016, 0.007714176177978515, 0.007612736225128174, 0.007631167888641357, 0.007688191890716553, 0.007636767864227295, 0.007679456233978271, 0.0076028800010681154, 0.007685664176940918, 0.007680543899536133, 0.007624703884124756, 0.007718912124633789, 0.007636384010314941, 0.007706975936889649, 0.007884960174560546, 0.007660639762878418, 0.007631807804107666, 0.007642655849456787, 0.007602719783782959, 0.007907392024993897, 0.007597599983215332, 0.007641056060791016, 0.007721407890319824, 0.007649280071258545, 0.007571584224700928, 0.007577439785003662, 0.007720479965209961, 0.007666336059570312, 0.007714655876159668, 0.007630815982818603, 0.007325856208801269, 0.007679840087890625, 0.0075345921516418455, 0.00762172794342041, 0.007597119808197022, 0.0075979199409484865, 0.0075632638931274416, 0.007571455955505371, 0.007686272144317627, 0.007652383804321289, 0.0076911039352416995, 0.007647232055664062, 0.007669760227203369, 0.007860223770141601, 0.007615744113922119, 0.007705535888671875, 0.007706655979156494, 0.007657375812530518, 0.007616000175476074, 0.007618944168090821, 0.007560351848602295, 0.007693151950836182, 0.007667295932769776, 0.007605792045593262, 0.007889599800109863, 0.007618752002716065, 0.007823359966278077, 0.007702303886413574, 0.007687679767608643, 0.007663487911224365, 0.007604095935821533, 0.007620575904846191, 0.007569503784179687, 0.007549856185913086, 0.007614048004150391, 0.007620607852935791, 0.007618239879608154, 0.007555808067321777, 0.007734623908996582, 0.007586656093597412, 0.007626560211181641, 0.007554912090301514, 0.007529952049255371, 0.00759171199798584, 0.007551360130310059, 0.0075203518867492675, 0.0077807998657226565, 0.007559167861938477, 0.007586016178131104, 0.007662623882293701, 0.007615392208099365, 0.007579487800598144, 0.00752569580078125, 0.007531424045562744, 0.008271295547485352, 0.007768544197082519, 0.01076416015625, 0.010278911590576171, 0.009643136024475097, 0.008155136108398438, 0.0076928319931030276, 0.0075738558769226074, 0.007630815982818603, 0.007219039916992188, 0.007631008148193359, 0.007690368175506591, 0.007685664176940918, 0.007576223850250244, 0.007513216018676758, 0.007552735805511475, 0.0075682239532470705, 0.007589888095855713, 0.007526336193084716, 0.00783571195602417, 0.007724256038665772, 0.007627552032470703, 0.007800159931182861, 0.007707168102264404, 0.007708799839019775, 0.007742559909820557, 0.007606847763061524, 0.007647583961486817, 0.007659615993499756, 0.00785155200958252, 0.0077807998657226565, 0.0075959677696228025, 0.007587327957153321, 0.007579808235168457, 0.007647552013397217, 0.0076184639930725094, 0.007556320190429688, 0.00747430419921875, 0.007554399967193604, 0.0074767680168151855, 0.007534880161285401, 0.007553887844085693, 0.007452447891235351, 0.0074403839111328125, 0.007487711906433106, 0.0076080961227417, 0.007495264053344727, 0.007532959938049317, 0.007935999870300293, 0.00873788833618164, 0.008067999839782715, 0.008252767562866211, 0.007451072216033936, 0.007506144046783447, 0.007712768077850342, 0.007641088008880615, 0.007612224102020264, 0.0075054078102111815, 0.007439040184020996, 0.007484511852264404, 0.007439295768737793, 0.007524064064025879, 0.00786252784729004, 0.007556575775146485, 0.007461440086364746, 0.007505887985229492, 0.0074403839111328125, 0.007647232055664062, 0.0074584641456604, 0.007268479824066162, 0.007268576145172119, 0.007304543972015381, 0.006889472007751465, 0.0072434239387512206, 0.007243199825286865, 0.007218048095703125, 0.007213088035583496, 0.00725164794921875, 0.007256383895874023, 0.007314559936523438, 0.007431039810180664, 0.007354080200195312, 0.0074366722106933595, 0.007514111995697022, 0.007583648204803467, 0.007513887882232666, 0.007483615875244141, 0.007551136016845703, 0.007671648025512695, 0.007772160053253174, 0.007423999786376953, 0.007475168228149414, 0.007467040061950684, 0.007548128128051758, 0.0076244797706604, 0.0077135682106018065, 0.007680480003356934, 0.007646240234375, 0.00780291223526001, 0.007604928016662598, 0.007575551986694336, 0.007667679786682129, 0.007704607963562012, 0.007556767940521241, 0.007627103805541992, 0.007808608055114746, 0.007578015804290772, 0.007518208026885987, 0.007524352073669433, 0.007495071887969971, 0.007579391956329346, 0.007524384021759033, 0.007664447784423828, 0.007571455955505371, 0.007487264156341553, 0.007657055854797363, 0.007685984134674072, 0.007748223781585693, 0.0076861119270324706, 0.007704768180847168, 0.00773529577255249, 0.007831552028656007, 0.007609536170959472, 0.0076377601623535156, 0.00755344009399414, 0.0075560321807861324, 0.00782966423034668, 0.007913695812225343, 0.007761343955993653, 0.007686048030853271, 0.007605247974395752, 0.007618559837341309, 0.00786636781692505, 0.007739039897918701, 0.007702432155609131, 0.007311359882354736, 0.007654528141021729, 0.007691135883331299, 0.007597631931304931, 0.007582143783569336, 0.008132608413696289, 0.007792640209197998, 0.007909247875213623, 0.007809279918670654, 0.0077200961112976075, 0.007645919799804688, 0.007757215976715088, 0.007711328029632568, 0.007859871864318848, 0.007634463787078857, 0.007638976097106933, 0.0077053442001342775, 0.007624383926391602, 0.007590496063232422, 0.007675680160522461, 0.0076340799331665035, 0.007743807792663574, 0.007722784042358399, 0.0076975679397583005, 0.007663392066955567, 0.007636799812316895, 0.007823200225830078, 0.00782972812652588, 0.007634943962097168, 0.007579648017883301, 0.007656928062438965, 0.007619103908538818, 0.007589888095855713, 0.007553152084350586, 0.007585663795471192, 0.007667200088500976, 0.007677536010742187, 0.007674880027770996, 0.0076574721336364745, 0.007736671924591065, 0.0076332159042358395, 0.007567296028137207, 0.007452288150787354, 0.00748140811920166, 0.007543168067932129, 0.007556992053985596, 0.007631231784820556, 0.007600128173828125, 0.007692287921905518, 0.007575551986694336, 0.007608672142028808, 0.007794335842132568, 0.0074522562026977535, 0.007462592124938965, 0.007500063896179199, 0.007536704063415527, 0.00759168004989624, 0.007889567852020263, 0.007604191780090332, 0.007589248180389405, 0.007418496131896973, 0.007464960098266602, 0.0075038719177246095, 0.007248703956604004, 0.007523615837097168, 0.007500256061553955, 0.007468832015991211, 0.00750377607345581, 0.007501823902130127, 0.007408192157745361, 0.0074702401161193845, 0.007411968231201172, 0.007497888088226318, 0.00765996789932251, 0.007417952060699463, 0.007437695980072021, 0.007543583869934082, 0.007571424007415771, 0.007646399974822998, 0.007534719944000244, 0.007602655887603759, 0.007489568233489991, 0.007478655815124512, 0.007615071773529053, 0.007455904006958008, 0.007659904003143311, 0.007606751918792724, 0.00760591983795166, 0.007588543891906738, 0.007726751804351807, 0.007639296054840088, 0.00761407995223999, 0.00808358383178711, 0.007979008197784423, 0.007780255794525147, 0.007661791801452636, 0.007681663990020752, 0.007698880195617676, 0.007611711978912353, 0.007631360054016113, 0.007688416004180908, 0.007630911827087403, 0.007752416133880616, 0.007612512111663818, 0.007704768180847168, 0.007609024047851563, 0.0076165437698364254, 0.007812320232391357, 0.007674272060394287, 0.007653984069824218, 0.007691872119903564, 0.007817408084869385, 0.007743552207946778, 0.007712992191314697, 0.007782112121582032, 0.007776512145996094, 0.007732607841491699, 0.007643519878387451, 0.0076531519889831544, 0.007776480197906494, 0.007746816158294678, 0.007793407917022705, 0.0076941437721252445, 0.007612127780914307, 0.007655903816223144, 0.007702527999877929, 0.007277279853820801, 0.0077164478302001955, 0.007661888122558594, 0.007755839824676513, 0.007786272048950195, 0.007751840114593506, 0.007776256084442139, 0.007860223770141601, 0.007787551879882813, 0.008128671646118165, 0.007889056205749511, 0.007869311809539795, 0.007836832046508788, 0.007818912029266357, 0.007688511848449707, 0.007643072128295898, 0.008092160224914552, 0.007714335918426514, 0.00784604787826538, 0.0075699520111083985, 0.007804927825927735, 0.0076917438507080075, 0.00768668794631958, 0.007669055938720703, 0.007707327842712402, 0.007755775928497314, 0.007766016006469726, 0.0077313919067382815, 0.00767961597442627, 0.00770681619644165, 0.00778652811050415, 0.007847904205322265, 0.007751391887664795, 0.008160608291625977, 0.007723775863647461, 0.00788908815383911, 0.007845888137817383, 0.008177663803100586, 0.007696383953094482, 0.007790304183959961, 0.007679296016693115, 0.007721951961517334, 0.007764128208160401, 0.0077248959541320805, 0.007763360023498535, 0.00784444808959961, 0.007864575862884521, 0.0076715521812438965, 0.007730751991271973, 0.007688992023468018, 0.007749279975891113, 0.00793833589553833, 0.00782863998413086, 0.007795008182525634, 0.007704832077026367, 0.007737023830413818, 0.007733535766601562, 0.007818943977355957, 0.00817187213897705, 0.009616928100585937, 0.008939104080200195, 0.008653696060180665, 0.007827455997467042, 0.007301983833312989, 0.0077536959648132325, 0.007675104141235352, 0.007668384075164795, 0.007665023803710937, 0.007684895992279053, 0.007660575866699219, 0.007893983840942382, 0.007600319862365723, 0.007638463973999024, 0.007701888084411621, 0.007684160232543946, 0.007561791896820069, 0.007489920139312744, 0.00773641586303711, 0.007621535778045654, 0.007585536003112793, 0.007558944225311279, 0.007477727890014649, 0.007487743854522705, 0.007597824096679687, 0.007544991970062256, 0.007626016139984131, 0.007586719989776611, 0.00750710391998291, 0.007505824089050293, 0.007510623931884766, 0.0074683837890625, 0.007738016128540039, 0.008005215644836425, 0.007794784069061279, 0.007872831821441651, 0.007772160053253174, 0.007716063976287842, 0.007674655914306641, 0.007743616104125977, 0.007688064098358155, 0.007628511905670166, 0.007692575931549072, 0.007673855781555176, 0.007722176074981689, 0.007562304019927978, 0.007881984233856202, 0.007672319889068604, 0.007658944129943847, 0.007675680160522461, 0.007598879814147949, 0.007583744049072265, 0.007610367774963379, 0.007646751880645752, 0.007581600189208984, 0.0075589118003845214, 0.007674752235412597, 0.007697504043579101, 0.007723872184753418, 0.007787744045257569, 0.007639391899108887, 0.007733695983886719, 0.007759903907775879, 0.007655392169952393, 0.0076444802284240726, 0.007757887840270996, 0.007756639957427978, 0.007301407814025879, 0.007664159774780273, 0.007698783874511719, 0.007710527896881103, 0.00765337610244751, 0.007795775890350342, 0.007719103813171387, 0.007785024166107178, 0.00767952013015747, 0.007622623920440674, 0.007630559921264648, 0.008205280303955079, 0.007849984169006348, 0.00785203218460083, 0.007675807952880859, 0.007678016185760498, 0.007673312187194825, 0.007771840095520019, 0.007738239765167236, 0.007729248046875, 0.007720863819122315, 0.007727359771728516, 0.007953248023986816, 0.007694431781768799, 0.007670400142669678, 0.007694015979766846, 0.007716351985931396, 0.007714975833892823, 0.0077259202003479, 0.007649504184722901, 0.007636735916137696, 0.007720992088317871, 0.007747583866119385, 0.007960544109344483, 0.007749663829803467, 0.007649184226989746, 0.007623807907104492, 0.007688864231109619, 0.007581664085388184, 0.007647583961486817, 0.007712768077850342, 0.007806335926055908, 0.007747712135314941, 0.00791321611404419, 0.007729919910430908, 0.007792640209197998, 0.007693855762481689, 0.0076743998527526855, 0.007729087829589844, 0.0076670398712158205, 0.007670176029205322, 0.007785727977752685, 0.007669856071472168, 0.00782371187210083, 0.00767190408706665, 0.0076763520240783695, 0.007655456066131592, 0.007553023815155029, 0.007565311908721924, 0.00800723171234131, 0.007847839832305909, 0.007684832096099854, 0.00793497610092163, 0.007265952110290528, 0.0077315840721130374, 0.00775984001159668, 0.007745535850524903, 0.007685279846191407, 0.007640160083770752, 0.00774838399887085, 0.007736288070678711, 0.00792572784423828, 0.007800447940826416, 0.00773526382446289, 0.007700928211212158, 0.00786636781692505, 0.008026111602783203, 0.007821311950683594, 0.00780083179473877, 0.007835584163665772, 0.007651391983032226, 0.007651328086853027, 0.007720479965209961, 0.007699071884155273, 0.007788383960723877, 0.007732704162597657, 0.007807519912719727, 0.007856031894683839, 0.007837215900421142, 0.00771721601486206, 0.0076097922325134276, 0.007697184085845948, 0.007753952026367188, 0.007934944152832031, 0.007763807773590088, 0.00787507200241089, 0.008040608406066895, 0.007876927852630616, 0.007788544178009033, 0.007753183841705322, 0.00776582384109497, 0.007754816055297852, 0.007952032089233398, 0.007983039855957032, 0.007919680118560792, 0.007767231941223145, 0.007764800071716309, 0.007826655864715577, 0.007858975887298584, 0.00792137622833252, 0.007763775825500488, 0.007815648078918457, 0.007815167903900147, 0.007712607860565186, 0.007825568199157714, 0.007874559879302979, 0.007796031951904297, 0.007741280078887939, 0.007740255832672119, 0.007706624031066894, 0.007700479984283447, 0.007847936153411865, 0.007772160053253174, 0.007749631881713868, 0.007772511959075927, 0.007677375793457031, 0.00730185604095459, 0.007723104000091552, 0.007597983837127686, 0.007561215877532959, 0.007614687919616699, 0.007681824207305908, 0.007663616180419922, 0.007689856052398682, 0.007696767807006836, 0.0076902399063110355, 0.007804927825927735, 0.007661568164825439, 0.007636991977691651, 0.007719999790191651, 0.0077545919418334965, 0.008181344032287598, 0.0077153282165527345, 0.00813203239440918, 0.007680575847625732, 0.00764463996887207, 0.007604832172393799, 0.007594143867492676, 0.007499008178710937, 0.00769488000869751, 0.0075979199409484865, 0.007608479976654052, 0.007597951889038086, 0.00763862419128418, 0.007559743881225586, 0.007489408016204834, 0.0077023677825927735, 0.0075012798309326174, 0.007582399845123291, 0.0076217598915100096, 0.007584928035736084, 0.007533792018890381, 0.0075556797981262205, 0.007479199886322022, 0.0075428800582885746, 0.007585055828094482, 0.007373536109924316, 0.007519904136657715, 0.00755951976776123, 0.0075447998046875, 0.007641119956970215, 0.007426047801971435, 0.0074058880805969235, 0.007523168087005616, 0.007541600227355957, 0.0075939841270446775, 0.007521791934967041, 0.007472640037536621, 0.007643295764923096, 0.007592800140380859, 0.007632991790771484, 0.007712448120117187, 0.0076044478416442875, 0.007525375843048096, 0.007656447887420655, 0.007558432102203369, 0.007395167827606201, 0.0077300481796264645, 0.007550079822540284, 0.007132991790771485, 0.0074960322380065916, 0.007485055923461914, 0.007438560009002685, 0.007444416046142578, 0.007415328025817871, 0.0074184641838073734, 0.007357632160186768, 0.007400191783905029, 0.007362207889556885, 0.007335519790649414, 0.007235936164855957, 0.007410079956054688, 0.00727455997467041, 0.0073441600799560545, 0.007298975944519043, 0.007188127994537353, 0.007158112049102783, 0.007294976234436036, 0.0072726402282714845, 0.007333695888519287, 0.007356416225433349, 0.007306591987609863, 0.0074494719505310055, 0.007484384059906006, 0.007520256042480469, 0.007567615985870362, 0.0075372161865234375, 0.007432191848754883, 0.007436511993408203, 0.007374623775482178, 0.007344351768493652, 0.007332736015319824, 0.0077480320930480955, 0.007790048122406006, 0.007758848190307617, 0.007732511997222901, 0.0076715202331542965, 0.007694623947143555, 0.007690976142883301, 0.007561471939086914, 0.00768998384475708, 0.007622655868530274, 0.00763702392578125, 0.007704544067382813, 0.007619775772094726, 0.007627488136291504, 0.007588223934173584, 0.007685791969299316, 0.007628096103668213, 0.007588511943817139, 0.007723104000091552, 0.00756547212600708, 0.0075753917694091795, 0.007654528141021729, 0.007746431827545166, 0.007573503971099854, 0.00765337610244751, 0.007681600093841553, 0.007702976226806641, 0.007792992115020752, 0.0076550722122192385, 0.0075671358108520504, 0.007145472049713135, 0.007591616153717041, 0.007471392154693604, 0.007546463966369629, 0.00776204776763916, 0.007546336174011231, 0.007578527927398682, 0.007516096115112305, 0.007742847919464111, 0.007557759761810303, 0.007575712203979492, 0.007589727878570557, 0.00758131217956543, 0.007666111946105957, 0.007649312019348145, 0.007604159832000732, 0.007712128162384033, 0.007674464225769043, 0.007628799915313721, 0.007902527809143066, 0.007701183795928955, 0.007610367774963379, 0.0075400638580322265, 0.007547552108764648, 0.007595200061798096, 0.007553855895996094, 0.007544832229614258, 0.007508992195129394, 0.007615488052368164, 0.007582719802856445, 0.007519231796264648, 0.007679999828338623, 0.007685120105743408, 0.0075807681083679196, 0.007534495830535889, 0.007616096019744873, 0.007614880084991455, 0.007633952140808106, 0.007934207916259766, 0.007688352108001709, 0.007723423957824707, 0.00771008014678955, 0.007686944007873535, 0.007622655868530274, 0.007642496109008789, 0.0077341761589050295, 0.007533408164978027, 0.007670080184936524, 0.00767852783203125, 0.007667391777038574, 0.007692671775817871, 0.007897024154663085, 0.007737343788146973, 0.007661568164825439, 0.007753727912902832, 0.007757120132446289, 0.007752064228057861, 0.007772480010986328, 0.0077123198509216305, 0.007807136058807373, 0.007645247936248779, 0.007706367969512939, 0.007772640228271484, 0.007384128093719482, 0.007958335876464844, 0.007733248233795166, 0.007600128173828125, 0.007657120227813721, 0.0075697598457336425, 0.007553023815155029, 0.007548543930053711, 0.007626976013183594, 0.007640672206878662, 0.007647808074951172, 0.007688191890716553, 0.007634943962097168, 0.007646912097930908, 0.0075935997962951664, 0.007875423908233642, 0.007765727996826172, 0.007641215801239013, 0.007619999885559082, 0.007617119789123535, 0.007637152194976807, 0.007720799922943116, 0.007628799915313721, 0.007556159973144531, 0.007470367908477783, 0.00750867223739624, 0.0075879359245300294, 0.0074494719505310055, 0.0075038719177246095, 0.0075606398582458495, 0.007696959972381592, 0.007636384010314941, 0.007612800121307373, 0.007622879981994629, 0.007516160011291504, 0.007593472003936768, 0.0077257280349731445, 0.007534527778625488, 0.007540639877319336, 0.007579552173614502, 0.0075120959281921385, 0.007510144233703614, 0.007532576084136963, 0.007554975986480713, 0.0078080959320068356, 0.007612800121307373, 0.008017888069152832, 0.007644896030426025, 0.007663519859313965, 0.007660223960876465, 0.00770470380783081, 0.007734528064727783, 0.007674560070037841, 0.007649087905883789, 0.007751872062683105, 0.007952703952789306, 0.007728288173675537, 0.007602911949157715, 0.007800384044647217, 0.00764137601852417, 0.007674015998840332, 0.007675168037414551, 0.007635680198669434, 0.007263936042785644, 0.007665472030639648, 0.007537600040435791, 0.007520256042480469, 0.007527775764465332, 0.007596767902374267, 0.0075980801582336424, 0.007542719841003418, 0.0075467519760131834, 0.0075697598457336425, 0.007623807907104492, 0.007557536125183105, 0.007768415927886963, 0.0077452797889709475, 0.00788700819015503, 0.007767807960510254, 0.007794976234436035, 0.007662975788116455, 0.007872735977172852, 0.007711071968078614, 0.007700511932373047, 0.007610176086425781, 0.007627071857452393, 0.008511360168457032, 0.008123519897460938, 0.0077872958183288574, 0.007702623844146729, 0.007767551898956298, 0.007713280200958252, 0.007659520149230957, 0.007769728183746338, 0.007707007884979248, 0.007712768077850342, 0.007750815868377685, 0.007644000053405761, 0.007739200115203858, 0.0076146559715271, 0.007684447765350342, 0.007707456111907959, 0.007756735801696777, 0.007643231868743897, 0.007692192077636719, 0.007729055881500244, 0.007644991874694824, 0.007657663822174072, 0.007684095859527588, 0.007915200233459473, 0.007825727939605714, 0.007779551982879639, 0.007796703815460205, 0.007765920162200928, 0.00776694393157959, 0.007789663791656494, 0.007696479797363281, 0.007658559799194336, 0.00769974422454834, 0.00779695987701416, 0.007887296199798584, 0.0077554240226745605, 0.007880864143371582, 0.007691648006439209, 0.007758272171020508, 0.007721151828765869, 0.007311359882354736, 0.0077927041053771975, 0.0077183680534362795, 0.009275872230529785, 0.007577600002288819, 0.007679999828338623, 0.007473440170288086, 0.007547711849212647, 0.007549856185913086, 0.007737343788146973, 0.007618527889251709, 0.007495967864990234, 0.007492991924285888, 0.007510015964508057, 0.00756774377822876, 0.007612095832824707, 0.007604576110839843, 0.007579616069793701, 0.0076943359375, 0.007709983825683594, 0.007590911865234375, 0.007624288082122802, 0.00768012809753418, 0.007673727989196777, 0.007645311832427978, 0.007722911834716797, 0.00784822416305542, 0.007745344161987305, 0.007716479778289795, 0.007690656185150147, 0.007715807914733887, 0.007721983909606934, 0.007763391971588135, 0.007659584045410156, 0.00783196783065796, 0.007849728107452393, 0.00770681619644165, 0.00782147216796875, 0.007712768077850342, 0.00774348783493042, 0.007719103813171387, 0.007738592147827149, 0.007927775859832764, 0.007913248062133789, 0.00788976001739502, 0.007770112037658691, 0.0077844481468200685, 0.007740608215332031, 0.007698368072509765, 0.007888063907623291, 0.0077898879051208495, 0.007786208152770996, 0.007772831916809082, 0.007802271842956543, 0.007796671867370606, 0.0077545919418334965, 0.00796447992324829, 0.007946239948272706, 0.007860223770141601, 0.00798095989227295, 0.007788640022277832, 0.007960576057434082, 0.007788544178009033, 0.007354176044464112, 0.007724256038665772, 0.007687136173248291, 0.007840960025787354, 0.007785344123840332, 0.007859488010406495, 0.0083854398727417, 0.008054304122924804, 0.00798467206954956, 0.00784278392791748, 0.00781283187866211, 0.007743135929107666, 0.007731552124023437, 0.007706624031066894, 0.007673344135284424, 0.0077296638488769534, 0.00790937614440918, 0.00764851188659668, 0.0076620478630065915, 0.0076967039108276365, 0.007645088195800781, 0.0076431999206542965, 0.0075673599243164065, 0.007603487968444824, 0.0076070079803466795, 0.00832102394104004, 0.007573599815368652, 0.007515583992004395, 0.007451104164123535, 0.007526656150817871, 0.0076632962226867675, 0.007471168041229248, 0.007735328197479248, 0.007697951793670654, 0.007565760135650635, 0.007589888095855713, 0.007564767837524414, 0.007542304039001465, 0.007511295795440674, 0.007476384162902832, 0.007493631839752197, 0.00753110408782959, 0.007539999961853027, 0.007592671871185302, 0.0076299839019775394, 0.007655488014221191, 0.007514944076538086, 0.007773471832275391, 0.007645887851715088, 0.007616511821746826, 0.0076574721336364745, 0.007639039993286132, 0.007607808113098144, 0.007608672142028808, 0.007633056163787842, 0.0076390719413757325, 0.007521344184875488, 0.007500703811645508, 0.007522304058074952, 0.007653535842895508, 0.007536287784576416, 0.00776313591003418, 0.007600416183471679, 0.007120416164398193, 0.0074719681739807125, 0.007658495903015137, 0.00756771183013916, 0.007711552143096924, 0.0075138559341430666, 0.00765337610244751, 0.007486847877502442, 0.007501952171325684, 0.007544672012329102, 0.0074923839569091795, 0.007440512180328369, 0.007478464126586914, 0.0077420158386230465, 0.007514111995697022, 0.007431871891021728, 0.0075021438598632815, 0.007423808097839355, 0.007422143936157226, 0.007518208026885987, 0.0073768959045410155, 0.007433824062347412, 0.00740835189819336, 0.007450592041015625, 0.007468768119812012, 0.007642591953277588, 0.007641568183898926, 0.007594143867492676, 0.007727007865905761, 0.007734975814819336, 0.0077786240577697756, 0.007658783912658691, 0.007535552024841309, 0.007656896114349365, 0.007661503791809082, 0.0075718722343444825, 0.00755951976776123, 0.007831007957458495, 0.007620831966400146, 0.007795872211456299, 0.0076624321937561034, 0.0077270717620849605, 0.007681632041931152, 0.007612095832824707, 0.007612607955932617, 0.008829471588134765, 0.00772492790222168, 0.0078067522048950195, 0.007907968044281006, 0.007771872043609619, 0.007677951812744141, 0.007739200115203858, 0.007714144229888916, 0.007701280117034912, 0.007620672225952149, 0.007763967990875244, 0.007817215919494629, 0.0075980801582336424, 0.0076490240097045895, 0.007932159900665283, 0.00796614408493042, 0.007707200050354004, 0.007611551761627197, 0.0072624959945678715, 0.007654592037200928, 0.007656256198883057, 0.007725056171417236, 0.007747583866119385, 0.007706624031066894, 0.007749631881713868, 0.007673151969909668, 0.007598144054412842, 0.007727744102478028, 0.007634943962097168, 0.007614463806152344, 0.007602240085601807, 0.007657120227813721, 0.007723296165466308, 0.007890944004058837, 0.0076984319686889645, 0.007675871849060058, 0.007692512035369873, 0.007882431983947754, 0.007657599925994873, 0.007618559837341309, 0.007648320198059082, 0.007769023895263672, 0.007682240009307862, 0.007574495792388916, 0.007746399879455567, 0.007666719913482666, 0.0078119997978210445, 0.007708576202392578, 0.007657792091369629, 0.007720799922943116, 0.0077413439750671385, 0.007683199882507324, 0.007664224147796631, 0.007581567764282227, 0.007631392002105713, 0.007964735984802246, 0.0076856322288513185, 0.007712287902832031, 0.007620704174041748, 0.007676095962524414, 0.007620960235595703, 0.007716608047485351, 0.007690144062042236, 0.007766176223754883, 0.007731647968292237, 0.007723008155822754, 0.007727104187011719, 0.007794943809509278, 0.007722752094268799, 0.007727200031280517, 0.007622687816619873, 0.007873536109924317, 0.007807775974273682, 0.007747744083404541, 0.007794079780578614, 0.0077209601402282715, 0.007707456111907959, 0.007735136032104492, 0.0076269121170043945, 0.007861951828002929, 0.007671103954315185]",tokens/s,130.24434464699763,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.214592,1326.383104,0.0,931.135488,917.648384,s,1,7.24930126953125,7.24930126953125,0.0,7.24930126953125,7.24930126953125,7.24930126953125,7.24930126953125,[7.24930126953125],,kWh,9.300030833340618e-06,1.0187963388527686e-06,3.1194469399947744e-06,1.343827411218816e-05,,MB,1148.239872,1458.50368,0.0,1050.673152,1018.330112,s,10,0.17473472023010256,0.017473472023010257,0.00032826587233633533,0.017637968063354492,0.017802262306213377,0.017810283184051513,0.01781669988632202,"[0.01692780876159668, 0.01697609519958496, 0.017071264266967773, 0.017644447326660158, 0.017800479888916015, 0.017631488800048827, 0.017719167709350586, 0.017500288009643556, 0.01781830406188965, 0.017645376205444336]",tokens/s,14650.780317894565,kWh,5.129729545540996e-07,5.6536604880475513e-08,3.414330216842004e-07,9.109425811187756e-07,tokens/kWh,281027591.97576773,MB,1176.51456,1475.280896,0.0,1067.450368,1033.282048,s,10,10.554966979980469,1.055496697998047,0.019562259450065147,1.0613449096679688,1.0746231079101562,1.076492706298828,1.0779883850097656,"[1.0216134643554688, 1.019472412109375, 1.0551478271484376, 1.0742076416015625, 1.071249755859375, 1.0658709716796875, 1.06161083984375, 1.046352783203125, 1.0783623046875, 1.0610789794921875]",tokens/s,59.68753869101784,kWh,2.952359973044379e-05,3.255993862737637e-06,1.4851884688515072e-05,4.7631478281696486e-05,tokens/kWh,1322654.7290305123,,s,630,10.548605572700493,0.016743818369365872,0.0004883465879551693,0.016839056015014647,0.017199705696105957,0.017319140911102295,0.017867099857330326,"[0.015735648155212403, 0.016029695510864257, 0.0160328311920166, 0.01604243278503418, 0.015974464416503905, 0.016023296356201172, 0.016036544799804688, 0.017674495697021484, 0.016321632385253908, 0.016054943084716798, 0.016043455123901366, 0.016112192153930664, 0.01601740837097168, 0.016089088439941408, 0.016088607788085938, 0.016128032684326173, 0.01617145538330078, 0.016051200866699217, 0.01614463996887207, 0.016037664413452148, 0.016076959609985352, 0.01610016059875488, 0.016183296203613282, 0.01608697509765625, 0.016150592803955078, 0.016082239151000977, 0.01610406494140625, 0.016195648193359374, 0.016128000259399415, 0.016496639251708984, 0.01596726417541504, 0.016001535415649415, 0.016041439056396486, 0.016153600692749022, 0.01612953567504883, 0.016041696548461912, 0.016371679306030274, 0.016505727767944334, 0.016426944732666017, 0.016332799911499024, 0.016385791778564453, 0.016278976440429686, 0.016296768188476564, 0.01621401596069336, 0.016183296203613282, 0.016060415267944335, 0.01600223922729492, 0.015999808311462402, 0.016017055511474608, 0.016078624725341797, 0.016155199050903322, 0.015984095573425294, 0.01602409553527832, 0.016262208938598633, 0.016014272689819337, 0.01603379249572754, 0.01602681541442871, 0.016895872116088867, 0.0175031681060791, 0.01760665512084961, 0.016152576446533205, 0.016281183242797852, 0.01628758430480957, 0.015768608093261718, 0.015981535911560058, 0.015935487747192383, 0.01598591995239258, 0.01641753578186035, 0.016683008193969725, 0.016441408157348632, 0.016324256896972655, 0.016214303970336914, 0.016297536849975584, 0.016320415496826172, 0.016322816848754883, 0.016174976348876952, 0.016126495361328125, 0.016121248245239257, 0.016017887115478517, 0.016273344039916992, 0.016175167083740234, 0.016039039611816405, 0.01624153518676758, 0.016107519149780272, 0.016018463134765626, 0.01605116844177246, 0.016006816864013673, 0.016021856307983397, 0.017209344863891602, 0.01609884834289551, 0.016044448852539063, 0.01607686424255371, 0.016088096618652344, 0.01603843116760254, 0.01605062484741211, 0.01605788803100586, 0.016019264221191407, 0.016368127822875975, 0.016300191879272462, 0.016652288436889647, 0.016696672439575195, 0.01655695915222168, 0.016360864639282227, 0.01609539222717285, 0.016152767181396483, 0.016205087661743164, 0.015977184295654298, 0.016004831314086913, 0.015994303703308104, 0.016048896789550782, 0.016133567810058595, 0.016111679077148437, 0.016028255462646485, 0.016164735794067384, 0.016054048538208007, 0.016007104873657228, 0.01603830337524414, 0.016138240814208983, 0.016111616134643555, 0.016039743423461914, 0.01643846321105957, 0.016089696884155274, 0.01625904083251953, 0.01607651138305664, 0.016032480239868165, 0.016090208053588868, 0.0157260799407959, 0.015909055709838867, 0.016009536743164063, 0.016273408889770507, 0.016209312438964844, 0.016144256591796875, 0.016095968246459962, 0.0160296630859375, 0.016172607421875, 0.016134431838989258, 0.0161112003326416, 0.016074783325195314, 0.016102079391479493, 0.0160515193939209, 0.016048704147338867, 0.016150527954101563, 0.016072704315185548, 0.016225791931152343, 0.0160317440032959, 0.01606447982788086, 0.016022048950195312, 0.01619055938720703, 0.016560415267944335, 0.016674720764160156, 0.017380191802978517, 0.017051519393920897, 0.01705743980407715, 0.017076095581054686, 0.01695996856689453, 0.017227327346801758, 0.017111488342285156, 0.017641471862792968, 0.01706972885131836, 0.0172445125579834, 0.017063936233520507, 0.01726438331604004, 0.01700057601928711, 0.01697385597229004, 0.017112159729003908, 0.01703424072265625, 0.016903999328613282, 0.016742591857910157, 0.01728102493286133, 0.016887807846069337, 0.01721958351135254, 0.017137664794921875, 0.016859136581420898, 0.01696076774597168, 0.01703785514831543, 0.017029056549072264, 0.017166624069213866, 0.017133567810058595, 0.017176607131958007, 0.017237535476684572, 0.017308095932006835, 0.01717612838745117, 0.017160640716552735, 0.01717625617980957, 0.016970048904418944, 0.01680179214477539, 0.017479679107666016, 0.017108991622924806, 0.01713961601257324, 0.01660412788391113, 0.01707414436340332, 0.01686960029602051, 0.016716960906982423, 0.0169007682800293, 0.016889856338500975, 0.016732160568237304, 0.01656559944152832, 0.01684342384338379, 0.016699392318725585, 0.01659699249267578, 0.016658143997192384, 0.01705603218078613, 0.016842815399169923, 0.01669865608215332, 0.016691583633422852, 0.016787839889526368, 0.01712268829345703, 0.01745952033996582, 0.01720137596130371, 0.017499391555786132, 0.019490848541259764, 0.018039199829101564, 0.017139936447143556, 0.016836511611938478, 0.01686137580871582, 0.016791263580322267, 0.016924224853515624, 0.01713385581970215, 0.017101247787475585, 0.016881023406982422, 0.017132192611694335, 0.017317855834960937, 0.016922399520874022, 0.01707766342163086, 0.016802623748779298, 0.017004512786865236, 0.01697932815551758, 0.016927391052246093, 0.016970848083496092, 0.017133600234985353, 0.017019392013549805, 0.01694963264465332, 0.01701888084411621, 0.01689616012573242, 0.016933984756469726, 0.017889919281005858, 0.016887935638427734, 0.017308832168579102, 0.017179519653320312, 0.017304832458496094, 0.01675075149536133, 0.017304128646850585, 0.017059743881225584, 0.017016927719116212, 0.017035263061523438, 0.016920576095581053, 0.01691788864135742, 0.017207679748535157, 0.01695155143737793, 0.01704960060119629, 0.017016000747680664, 0.016941343307495117, 0.01683008003234863, 0.016934688568115235, 0.017101343154907227, 0.017596063613891603, 0.017020448684692383, 0.01705068778991699, 0.01714156723022461, 0.017049407958984374, 0.017006175994873047, 0.01715827178955078, 0.01697849655151367, 0.0171560001373291, 0.017129472732543945, 0.017449184417724608, 0.0171428165435791, 0.01698508834838867, 0.017168127059936523, 0.01710470390319824, 0.01690812873840332, 0.017310047149658205, 0.016863231658935548, 0.016689151763916017, 0.016855039596557618, 0.017303552627563477, 0.01717043113708496, 0.017145856857299805, 0.01717180824279785, 0.017076896667480468, 0.016988256454467773, 0.01695120048522949, 0.01707241630554199, 0.016762239456176758, 0.01715439987182617, 0.016772512435913087, 0.016833120346069336, 0.017077312469482422, 0.016853952407836915, 0.01687548828125, 0.016947328567504884, 0.016959327697753906, 0.017223743438720702, 0.017158432006835936, 0.017413856506347657, 0.016990207672119142, 0.016986112594604492, 0.016949247360229493, 0.01661948776245117, 0.01665827178955078, 0.016977407455444335, 0.01674934387207031, 0.016746400833129883, 0.016785408020019533, 0.016855039596557618, 0.01681203269958496, 0.01682841682434082, 0.017004831314086914, 0.017100511550903322, 0.016942975997924804, 0.016630975723266602, 0.01633523178100586, 0.0169451847076416, 0.017103391647338866, 0.01703340721130371, 0.016616800308227538, 0.016629663467407227, 0.016738496780395507, 0.016527103424072265, 0.016677343368530273, 0.016856704711914063, 0.017048383712768556, 0.01727065658569336, 0.016943136215209962, 0.016651872634887696, 0.01661292839050293, 0.01673616027832031, 0.01655072021484375, 0.016924736022949218, 0.01669126319885254, 0.016363519668579102, 0.016007104873657228, 0.016673887252807617, 0.017052223205566406, 0.017199520111083985, 0.016957504272460937, 0.016935935974121095, 0.01691334342956543, 0.016670719146728515, 0.016515071868896485, 0.016594112396240233, 0.016783327102661134, 0.0168723201751709, 0.01683987236022949, 0.016950368881225586, 0.016844480514526368, 0.016918527603149415, 0.01683046340942383, 0.016702911376953126, 0.016877695083618163, 0.01683705520629883, 0.016821535110473632, 0.017025760650634766, 0.01735856056213379, 0.017192960739135742, 0.017168031692504884, 0.016917119979858397, 0.017004032135009766, 0.016648544311523437, 0.01672412872314453, 0.016930240631103516, 0.01694927978515625, 0.01683305549621582, 0.01737107276916504, 0.01671993637084961, 0.016801055908203126, 0.01647177505493164, 0.01691766357421875, 0.016973087310791016, 0.016917055130004882, 0.016871423721313478, 0.01695088005065918, 0.019883712768554686, 0.017387840270996095, 0.017113664627075195, 0.017219423294067383, 0.017301055908203126, 0.016910783767700194, 0.016424800872802733, 0.016643360137939454, 0.016883392333984375, 0.017431936264038085, 0.017148672103881837, 0.016998176574707032, 0.016732160568237304, 0.01664009666442871, 0.017137664794921875, 0.016626848220825195, 0.016675615310668947, 0.016680288314819335, 0.016858112335205077, 0.01693462371826172, 0.016982015609741212, 0.016744447708129884, 0.016644096374511717, 0.01702911949157715, 0.01699260711669922, 0.016887584686279298, 0.017104448318481444, 0.016875839233398436, 0.016975360870361327, 0.016861919403076173, 0.01681545639038086, 0.01687366485595703, 0.01711568069458008, 0.016803455352783204, 0.016765024185180662, 0.01676255989074707, 0.01688960075378418, 0.016937311172485352, 0.016838239669799804, 0.016856767654418944, 0.016697568893432616, 0.016454368591308593, 0.016539264678955078, 0.01675916862487793, 0.016584447860717774, 0.01634124755859375, 0.016725120544433595, 0.01714441680908203, 0.017104639053344726, 0.016986879348754882, 0.016903968811035158, 0.016805919647216797, 0.01687548828125, 0.016955392837524414, 0.016846975326538085, 0.016885631561279296, 0.01697587203979492, 0.01686083221435547, 0.016890207290649415, 0.016873472213745116, 0.01659699249267578, 0.016670400619506837, 0.016777151107788085, 0.016510847091674805, 0.01657907295227051, 0.0163492488861084, 0.01676691246032715, 0.018147327423095702, 0.01682431983947754, 0.016661792755126952, 0.01697455978393555, 0.016766016006469726, 0.016810815811157228, 0.016449151992797853, 0.016224544525146486, 0.016248128890991212, 0.01618217658996582, 0.016162080764770506, 0.016253664016723634, 0.016476415634155275, 0.01689356803894043, 0.016647359848022462, 0.0163624324798584, 0.01617100715637207, 0.016174240112304686, 0.016190303802490234, 0.016154624938964843, 0.016670719146728515, 0.016717824935913086, 0.016862464904785157, 0.016742719650268554, 0.016726463317871094, 0.016447488784790038, 0.016321760177612304, 0.016083711624145507, 0.01606768035888672, 0.016177951812744142, 0.0162653751373291, 0.016338815689086916, 0.01617897605895996, 0.016385759353637695, 0.01608768081665039, 0.01601068878173828, 0.01624940872192383, 0.016044031143188475, 0.016022880554199218, 0.016165536880493166, 0.016166431427001953, 0.016058847427368163, 0.016106527328491212, 0.015979616165161133, 0.016150400161743163, 0.01598259162902832, 0.01642300796508789, 0.0165579833984375, 0.01741823959350586, 0.01635103988647461, 0.016734399795532227, 0.01706710433959961, 0.017089439392089845, 0.016941247940063478, 0.01781123161315918, 0.020127071380615234, 0.017105855941772462, 0.017173728942871093, 0.016986848831176758, 0.017149728775024416, 0.01705561637878418, 0.017591615676879883, 0.017134687423706055, 0.017103904724121093, 0.0170133113861084, 0.01682044792175293, 0.017222911834716796, 0.017117727279663087, 0.01699430465698242, 0.01697590446472168, 0.01714787292480469, 0.01700399971008301, 0.01722217559814453, 0.01730294418334961, 0.017320192337036133, 0.017471839904785156, 0.017056896209716798, 0.01705459213256836, 0.01696767997741699, 0.01697567939758301, 0.017123199462890624, 0.017099071502685546, 0.017098751068115235, 0.01721139144897461, 0.017139360427856444, 0.01708847999572754, 0.017071935653686525, 0.017068607330322266, 0.017131519317626954, 0.017242271423339842, 0.01707811164855957, 0.017257984161376954, 0.01709926414489746, 0.01710652732849121, 0.01733286476135254, 0.017198879241943358, 0.017238016128540038, 0.01705513572692871, 0.01700271987915039, 0.016953344345092772, 0.017184160232543946, 0.017066976547241212, 0.017039039611816405, 0.017090879440307617, 0.0175916805267334, 0.016969600677490235, 0.016978015899658205, 0.01715852737426758, 0.01703753662109375, 0.017006784439086913, 0.01703327941894531, 0.01719043159484863, 0.017170400619506837, 0.017434944152832033, 0.017268735885620116, 0.017196928024291992, 0.017165760040283203, 0.017553951263427733, 0.01718492889404297, 0.01679302406311035, 0.016891456604003905, 0.016806848526000978, 0.01701091194152832, 0.017106880187988283, 0.01687558364868164, 0.01675257682800293, 0.016844736099243165, 0.017045408248901366, 0.0165513916015625, 0.016777759552001954, 0.017000064849853516, 0.01718729591369629, 0.0173156795501709, 0.017073568344116212, 0.01687558364868164, 0.016775775909423828, 0.01679545593261719, 0.01692185592651367, 0.017187776565551757, 0.01690118408203125, 0.01694611167907715, 0.016781343460083007, 0.016730112075805666, 0.016586143493652342, 0.016627328872680664, 0.016755264282226564, 0.016714111328125, 0.01661952018737793, 0.01662156867980957, 0.01682784080505371, 0.017351232528686523, 0.017133663177490235, 0.01688675117492676, 0.016792768478393554, 0.01682815933227539, 0.01820240020751953, 0.017045728683471678, 0.016780799865722656, 0.016765439987182617, 0.016857311248779296, 0.016879295349121092, 0.016852575302124022, 0.016908191680908204, 0.016750688552856444, 0.016695327758789062, 0.01670569610595703, 0.01668947219848633, 0.01635327911376953, 0.01613804817199707, 0.0166627197265625, 0.017133567810058595, 0.017024896621704103, 0.016902496337890625, 0.016841535568237306, 0.01692720031738281, 0.01693084716796875, 0.017205024719238283, 0.01684883117675781, 0.016841215133666994, 0.01676313591003418, 0.016742399215698242, 0.01658470344543457, 0.016695295333862305, 0.01664204788208008, 0.016582656860351562, 0.016711679458618164, 0.016514688491821288, 0.01621343994140625, 0.016183935165405273, 0.016951616287231446, 0.017260671615600586]",tokens/s,59.723533661209466,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-7B,Qwen/Qwen-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-66b,facebook/opt-66b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-7b,stabilityai/stablelm-base-alpha-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1121, in __init__ self.embed_out = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 592.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 300.12 MiB is free. Process 194395 has 14.45 GiB memory in use. Of the allocated memory 14.33 GiB is allocated by PyTorch, and 1.52 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-2.7B,EleutherAI/gpt-neo-2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-7b,tiiuae/falcon-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-7b-hf,meta-llama/Llama-2-7b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.388288,13880.918016,0.0,13478.395904,13476.849152,s,1,7.7051328125,7.7051328125,0.0,7.7051328125,7.7051328125,7.7051328125,7.7051328125,[7.7051328125],,kWh,8.676565100032955e-06,9.494122711540021e-07,4.813892739999326e-06,1.4439870111186283e-05,,MB,1214.7712,14113.701888,0.0,13700.694016,13671.637504,s,10,12.242638549804687,1.2242638549804687,0.005052518134177237,1.2261983032226564,1.2286045776367187,1.2293965270996094,1.2300300866699219,"[1.212276611328125, 1.2208096923828125, 1.22063720703125, 1.222546875, 1.2258533935546876, 1.226543212890625, 1.2278807373046876, 1.2284285888671875, 1.2301884765625, 1.2274737548828125]",tokens/s,209.10525043972984,kWh,3.5707227094583985e-05,3.938024676978379e-06,2.3627435568599613e-05,6.327268734016199e-05,tokens/kWh,4045979.564985308,MB,1262.911488,14115.79904,0.0,13702.791168,13671.640064,s,10,37.7516875,3.77516875,0.0007961397691178017,3.77528173828125,3.77609013671875,3.776096948242188,3.776102397460938,"[3.773620849609375, 3.774213623046875, 3.774939453125, 3.775175048828125, 3.776088623046875, 3.7746103515625, 3.775486328125, 3.77606103515625, 3.775388427734375, 3.776103759765625]",tokens/s,16.687995735289977,kWh,0.00011027927078291893,1.216346727562795e-05,7.340491983500105e-05,0.0001958476578935479,tokens/kWh,321678.59793474455,,s,630,37.74766536712648,0.059916929154168984,0.0002804739375384271,0.059898752212524414,0.06021040840148926,0.06027617588043213,0.06102690738677978,"[0.06122908782958984, 0.05975830459594727, 0.05956576156616211, 0.05949292755126953, 0.059477344512939456, 0.05962736129760742, 0.05951724624633789, 0.059664894104003906, 0.059576190948486325, 0.05958054351806641, 0.059807743072509766, 0.05965619277954102, 0.05973763275146484, 0.05959932708740234, 0.05960025787353516, 0.05970185470581055, 0.05982825469970703, 0.05987523269653321, 0.059799457550048826, 0.05973215866088867, 0.05963561630249024, 0.05968505477905273, 0.059652000427246096, 0.05970943832397461, 0.05977849578857422, 0.05984934234619141, 0.059929790496826174, 0.05981801605224609, 0.05983715057373047, 0.05991161727905273, 0.05982265472412109, 0.05989990234375, 0.059870208740234375, 0.06015663909912109, 0.05991452789306641, 0.06007324981689453, 0.06008310317993164, 0.059968734741210936, 0.05971974563598633, 0.05977142333984375, 0.059779041290283205, 0.05986617660522461, 0.059870174407958984, 0.05991424179077148, 0.05991219329833984, 0.060058719635009764, 0.05996867370605469, 0.06003276824951172, 0.060034175872802735, 0.06009487915039063, 0.060000225067138674, 0.06015139389038086, 0.0601732177734375, 0.06013270568847656, 0.06002880096435547, 0.059983840942382814, 0.06003523254394531, 0.060267265319824216, 0.06012713623046875, 0.060229633331298826, 0.06012268829345703, 0.06026079940795898, 0.060233055114746095, 0.06106959915161133, 0.0596440658569336, 0.059481601715087894, 0.059431262969970707, 0.05948422241210938, 0.059617183685302735, 0.05956108856201172, 0.05958544158935547, 0.05949849700927735, 0.05979471969604492, 0.059644641876220705, 0.05969510269165039, 0.059690303802490234, 0.05981184005737305, 0.059724414825439456, 0.05991398239135742, 0.059959617614746094, 0.0600002555847168, 0.05984431838989258, 0.05982646560668945, 0.05957593536376953, 0.05960515213012695, 0.05959088134765625, 0.05960704040527344, 0.05960195159912109, 0.05979235076904297, 0.05969094467163086, 0.059838401794433595, 0.05977836990356445, 0.05985260772705078, 0.05986406326293945, 0.0598590087890625, 0.059951038360595704, 0.0600590705871582, 0.06003993606567383, 0.060069534301757814, 0.059932510375976564, 0.05992070388793945, 0.05977017593383789, 0.059754432678222655, 0.059840320587158206, 0.05991929626464844, 0.06010835266113281, 0.05999987030029297, 0.0600010871887207, 0.06012313461303711, 0.06010620880126953, 0.060107295989990234, 0.060229633331298826, 0.06028611373901367, 0.06024483108520508, 0.06036684799194336, 0.06021017456054688, 0.060197887420654295, 0.060163841247558594, 0.060006656646728516, 0.05999375915527344, 0.06018396759033203, 0.0599947509765625, 0.059993953704833985, 0.06000076675415039, 0.06015350341796875, 0.06016159820556641, 0.06138265609741211, 0.0599035530090332, 0.05954742431640625, 0.05949507141113281, 0.0594507827758789, 0.05965884780883789, 0.059535358428955076, 0.059563232421875, 0.059450145721435546, 0.05962137603759766, 0.059486209869384764, 0.05952716827392578, 0.059619327545166016, 0.0597391357421875, 0.05970175933837891, 0.059740577697753906, 0.059851070404052735, 0.05993247985839844, 0.059788734436035156, 0.059720222473144534, 0.059651966094970706, 0.05971267318725586, 0.05975270462036133, 0.059669441223144534, 0.05972150421142578, 0.05982822418212891, 0.0599101448059082, 0.059803489685058595, 0.05983865737915039, 0.06003084945678711, 0.059854656219482424, 0.0599752311706543, 0.059978015899658205, 0.06019635009765625, 0.05996761703491211, 0.05989868927001953, 0.059797119140625, 0.05984096145629883, 0.05992031860351563, 0.059842559814453126, 0.05988556671142578, 0.059889663696289064, 0.059850753784179686, 0.059837631225585934, 0.06003385543823242, 0.05996063995361328, 0.05989779281616211, 0.06002969741821289, 0.060157630920410154, 0.060279102325439454, 0.06023088073730469, 0.060222240447998045, 0.06013337707519531, 0.06016204833984375, 0.06001676940917969, 0.06012051010131836, 0.060116928100585935, 0.06092031860351563, 0.060101760864257815, 0.06020780944824219, 0.06024415969848633, 0.060155487060546874, 0.06015631866455078, 0.06099548721313477, 0.05967545700073242, 0.059512832641601565, 0.05949849700927735, 0.05945718383789062, 0.05953571319580078, 0.05953740692138672, 0.05946739196777344, 0.05945177459716797, 0.059609153747558596, 0.05953529739379883, 0.05981990432739258, 0.05974848175048828, 0.06033401489257813, 0.05987129592895508, 0.05985424041748047, 0.06010121536254883, 0.060170238494873046, 0.05991017532348633, 0.059850719451904295, 0.059696384429931644, 0.05980153656005859, 0.05976147079467774, 0.059840511322021485, 0.059772926330566405, 0.05977206420898438, 0.059657215118408206, 0.059735904693603514, 0.05961222457885742, 0.05988240051269531, 0.05993270492553711, 0.05981603240966797, 0.05997353744506836, 0.06004121780395508, 0.059957248687744144, 0.05996134567260742, 0.05986713409423828, 0.05989580917358398, 0.059770687103271485, 0.05983865737915039, 0.05976678466796875, 0.05993881607055664, 0.05990326309204102, 0.06006201553344727, 0.0598798713684082, 0.06006496047973633, 0.060010398864746094, 0.060181537628173826, 0.060148735046386716, 0.060364990234375, 0.060140190124511717, 0.060144927978515624, 0.060109249114990236, 0.06040195083618164, 0.06013443374633789, 0.06005014419555664, 0.05995135879516601, 0.06013542556762695, 0.06011084747314453, 0.060217342376708984, 0.06012313461303711, 0.06031148910522461, 0.060133438110351566, 0.06101132965087891, 0.05972681427001953, 0.05943654251098633, 0.05944076919555664, 0.059431808471679684, 0.05950259017944336, 0.059701248168945314, 0.05961558532714844, 0.05952067184448242, 0.05966985702514648, 0.059550369262695316, 0.05975244903564453, 0.05969820785522461, 0.05979852676391602, 0.05977084732055664, 0.059856895446777345, 0.060055072784423826, 0.06008835220336914, 0.05998179244995117, 0.05984713745117187, 0.059571487426757816, 0.05967536163330078, 0.059625247955322265, 0.059627742767333985, 0.05970534515380859, 0.05977088165283203, 0.0598169937133789, 0.059762657165527346, 0.05974662399291992, 0.059833023071289064, 0.05997158432006836, 0.059837696075439456, 0.059927295684814454, 0.060530017852783204, 0.06001935958862305, 0.06004326248168945, 0.059934688568115235, 0.05990374374389648, 0.059799808502197266, 0.05982137680053711, 0.059897983551025394, 0.060007007598876956, 0.05987958526611328, 0.060015518188476565, 0.059896926879882816, 0.06007791900634766, 0.06009036636352539, 0.06026399993896484, 0.060117057800292965, 0.06024639892578125, 0.06014156723022461, 0.06038297653198242, 0.06021350479125977, 0.06023187255859375, 0.06008607864379883, 0.059996158599853515, 0.06013132858276367, 0.0610296630859375, 0.06008086395263672, 0.06016425704956055, 0.06003081512451172, 0.06011904144287109, 0.060133182525634765, 0.061208576202392576, 0.05984460830688477, 0.05956403350830078, 0.05949193572998047, 0.05955215835571289, 0.059644031524658206, 0.059543422698974606, 0.05954953765869141, 0.05956828689575195, 0.05987120056152344, 0.05962716674804688, 0.05966886520385742, 0.059582366943359374, 0.05975664138793945, 0.05973811340332031, 0.05982374572753906, 0.059808128356933596, 0.05978060913085938, 0.05967103958129883, 0.05966614532470703, 0.05961091232299805, 0.05974256134033203, 0.05965596771240234, 0.05972825622558594, 0.05982003021240234, 0.059794750213623044, 0.059799774169921875, 0.05974883270263672, 0.05986678314208985, 0.05978771209716797, 0.059953086853027346, 0.060123104095458985, 0.0600491828918457, 0.060104927062988284, 0.06006579208374024, 0.05996073532104492, 0.05985667037963867, 0.059906879425048826, 0.05993267059326172, 0.059906047821044923, 0.05994496154785156, 0.06004121780395508, 0.059930271148681644, 0.059955551147460935, 0.05988687896728516, 0.06000044631958008, 0.059902496337890625, 0.059972671508789065, 0.059996768951416014, 0.060221790313720706, 0.06014374542236328, 0.06024521636962891, 0.06002463912963867, 0.06033187103271485, 0.05999497604370117, 0.06005161666870117, 0.06008367919921875, 0.060227134704589846, 0.06013542556762695, 0.06016508865356445, 0.06009980773925781, 0.06027548980712891, 0.060247039794921874, 0.061302719116210935, 0.05975983810424805, 0.05958467102050781, 0.059603649139404295, 0.059510784149169924, 0.05961033630371094, 0.059611934661865235, 0.05953887939453125, 0.05948579025268555, 0.059582847595214844, 0.059533920288085934, 0.05965311813354492, 0.059714561462402345, 0.05980313491821289, 0.059700897216796875, 0.059741024017333985, 0.05990579223632812, 0.05992473602294922, 0.05973382568359375, 0.059736255645751954, 0.05971148681640625, 0.05987942504882812, 0.05982361602783203, 0.0597283821105957, 0.059757793426513675, 0.059789215087890625, 0.05984960174560547, 0.05980339050292969, 0.060028575897216795, 0.059931232452392576, 0.05975878524780273, 0.0598169937133789, 0.05980976104736328, 0.060047969818115235, 0.05987919998168945, 0.05992287826538086, 0.05978112030029297, 0.060228801727294924, 0.059830528259277344, 0.0598469123840332, 0.05978889465332031, 0.05976063919067383, 0.059947521209716796, 0.05990627288818359, 0.059991455078125, 0.060276737213134764, 0.06012998580932617, 0.060037025451660155, 0.06011014556884765, 0.060170944213867185, 0.06018182373046875, 0.06020140838623047, 0.060088577270507815, 0.060212512969970704, 0.0601192626953125, 0.06010428619384765, 0.0602646713256836, 0.06054991912841797, 0.060241695404052734, 0.060170368194580076, 0.06011398315429688, 0.060236160278320315, 0.06019059371948242, 0.061020160675048826, 0.059815937042236325, 0.059531105041503905, 0.05948329544067383, 0.05937811279296875, 0.059524799346923826, 0.05950249481201172, 0.059675617218017576, 0.05964595031738281, 0.059848705291748044, 0.05973974227905274, 0.05982044982910156, 0.05971148681640625, 0.059824127197265625, 0.05975203323364258, 0.05997200012207031, 0.06001663970947266, 0.06005254364013672, 0.05983440017700195, 0.05971036911010742, 0.05958041763305664, 0.059731040954589844, 0.0596409912109375, 0.05972675323486328, 0.059808609008789065, 0.05976063919067383, 0.05976473617553711, 0.059832321166992185, 0.059979774475097655, 0.05986099243164063, 0.05992243194580078, 0.06004121780395508, 0.06011904144287109, 0.06010675048828125, 0.06004326248168945, 0.06012911987304687, 0.06001475143432617, 0.059901569366455076, 0.05985670471191406, 0.05983699035644531, 0.059807743072509766, 0.05990118408203125, 0.05995750427246094, 0.060029441833496094, 0.060032382965087894, 0.060102817535400394, 0.059960929870605466, 0.060093311309814455, 0.0602184944152832, 0.060493824005126956, 0.06019571304321289, 0.06016819381713867, 0.06009001541137695, 0.06013167953491211, 0.060061695098876954, 0.06013337707519531, 0.06005680084228516, 0.060189472198486325, 0.05996729660034179, 0.06008966445922852, 0.060097694396972656, 0.060296928405761716, 0.06010841751098633, 0.0610186882019043, 0.059599231719970704, 0.05940163040161133, 0.059574878692626954, 0.059557823181152346, 0.059503681182861326, 0.05953023910522461, 0.05969689559936524, 0.059574527740478514, 0.059706687927246094, 0.059727615356445315, 0.05971561431884766, 0.05965414428710938, 0.05974691009521484, 0.059728191375732424, 0.059815937042236325, 0.05984572982788086, 0.0599185905456543, 0.059627647399902346, 0.05975913619995117, 0.05968297576904297, 0.05973996734619141, 0.05967001724243164, 0.059737918853759765, 0.05973075103759766, 0.05981174468994141, 0.05992879867553711, 0.0598546257019043, 0.05992572784423828, 0.05988227081298828, 0.06002687835693359, 0.06002435302734375, 0.060014591217041016, 0.06019939041137695, 0.06001776123046875, 0.05998470306396484, 0.059952831268310545, 0.05982044982910156, 0.059875328063964846, 0.05975353622436524, 0.060295486450195314, 0.05978995132446289, 0.05986918258666992, 0.05988169479370117, 0.059898815155029296, 0.06012115097045898, 0.060068641662597656, 0.0600780143737793, 0.06017030334472656, 0.06040281677246094, 0.06013833618164063, 0.06007401657104492, 0.06001049423217773, 0.06015385437011719, 0.06017638397216797, 0.06015590286254883, 0.06006803131103516, 0.060413761138916014, 0.06005759811401367, 0.06024806213378906, 0.06006784057617188, 0.06026031875610351, 0.06022966384887695, 0.061293087005615234, 0.06036070251464844, 0.0595599365234375, 0.05948806381225586, 0.05944294357299805, 0.05966089630126953, 0.05954099273681641, 0.059711841583251955, 0.05971113586425781, 0.059711841583251955, 0.05955740737915039, 0.05952764892578125, 0.05947596740722656, 0.059757984161376954, 0.059738143920898434, 0.059684864044189455, 0.059974079132080076, 0.059967456817626955, 0.05977500915527344, 0.05978230285644531, 0.05974512100219727, 0.05974630355834961, 0.059695297241210935, 0.05971756744384766, 0.059840511322021485, 0.059977760314941404, 0.05979878234863281, 0.05982255935668945, 0.05983046340942383, 0.060076095581054687, 0.05995536041259766, 0.05991622543334961, 0.059905952453613284, 0.06013747024536133, 0.05993267059326172, 0.059990016937255856, 0.059891712188720705, 0.059993343353271486, 0.06000921630859375, 0.059908096313476565, 0.05991433715820312, 0.06000614547729492, 0.059869342803955075, 0.059944992065429685, 0.05997974395751953, 0.060083198547363284, 0.05998080062866211, 0.05996054458618164, 0.060033470153808596, 0.06019107055664062, 0.06013337707519531, 0.060092575073242185, 0.06006163024902344, 0.06008127975463867, 0.060068641662597656, 0.060104705810546874, 0.06032320022583008, 0.06021971130371094, 0.060247776031494144, 0.06013337707519531, 0.06012374496459961, 0.06030745697021484, 0.06024736022949219]",tokens/s,16.68977389390158,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-rw-1b,tiiuae/falcon-rw-1b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-4B,Qwen/Qwen1.5-4B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,837.615616,9637.39648,0.0,9242.148864,8603.568128,s,1,7.7355849609375,7.7355849609375,0.0,7.7355849609375,7.7355849609375,7.7355849609375,7.7355849609375,[7.7355849609375],,kWh,1.466184626665381e-05,1.4242632171994635e-06,7.069727877992427e-06,2.31558373618457e-05,,MB,1139.339264,9886.957568,0.0,9481.224192,8972.090368,s,10,6.982478088378905,0.6982478088378905,0.0036517305287179706,0.6994198913574219,0.7011476623535157,0.7023103729248047,0.7032405413818359,"[0.69003857421875, 0.697864501953125, 0.6987088012695313, 0.6942640380859375, 0.6964507446289062, 0.700400634765625, 0.7002574462890625, 0.7034730834960937, 0.7001309814453125, 0.7008892822265625]",tokens/s,366.6320133908713,kWh,2.0463702589164413e-05,2.256788519096522e-06,1.354058490653324e-05,3.6261076014794174e-05,tokens/kWh,7059911.843089114,MB,1161.076736,9891.151872,0.0,9485.418496,8972.092928,s,10,24.507166992187496,2.45071669921875,0.0028596785383194555,2.45123291015625,2.453633666992187,2.4545537719726562,2.455289855957031,"[2.450625244140625, 2.447038330078125, 2.445943115234375, 2.45255517578125, 2.448777099609375, 2.44886376953125, 2.451840576171875, 2.45342919921875, 2.45262060546875, 2.455473876953125]",tokens/s,25.706765706572046,kWh,7.17809645554217e-05,7.917776365660735e-06,4.777901970466671e-05,0.00012747776062574915,tokens/kWh,494203.84928910236,,s,630,24.50396546554564,0.03889518327864389,0.0005780801186598636,0.03882415962219238,0.039298535919189456,0.03955597248077392,0.0418481328201294,"[0.04240982437133789, 0.03919449615478516, 0.038448894500732425, 0.03843920135498047, 0.038400318145751955, 0.03821561431884766, 0.038188255310058594, 0.03821200180053711, 0.038166912078857425, 0.03850604629516602, 0.03846115112304688, 0.0384453125, 0.038309761047363285, 0.03864358520507812, 0.03840892791748047, 0.03843600082397461, 0.03828412628173828, 0.03838771057128906, 0.03832371139526367, 0.038511104583740234, 0.041850879669189454, 0.03854441452026367, 0.038468257904052734, 0.03840646362304687, 0.03894681549072266, 0.039301185607910155, 0.03922323226928711, 0.03886297607421875, 0.03875212860107422, 0.03869465637207031, 0.03862963104248047, 0.03875859069824219, 0.038529983520507814, 0.038953857421875, 0.03902822494506836, 0.03879542541503906, 0.03858003234863281, 0.03874256134033203, 0.03874611282348633, 0.03896115112304688, 0.03889126586914062, 0.03896275329589844, 0.03880422210693359, 0.03891299057006836, 0.03892937469482422, 0.03910041427612305, 0.038809600830078124, 0.038768638610839845, 0.039041023254394534, 0.03917004776000976, 0.03905331039428711, 0.039137279510498044, 0.03910041427612305, 0.039375873565673826, 0.03939430236816406, 0.03921820831298828, 0.03953142547607422, 0.03900831985473633, 0.03985203170776367, 0.038983680725097655, 0.03911398315429687, 0.03894553756713867, 0.039024063110351566, 0.04228028869628906, 0.039467681884765626, 0.03859199905395508, 0.03863798522949219, 0.038174110412597655, 0.03832048034667969, 0.0382081298828125, 0.0384983024597168, 0.038449153900146485, 0.03842067337036133, 0.03831788635253906, 0.03850239944458008, 0.03843699264526367, 0.038504318237304686, 0.03838137435913086, 0.038328033447265625, 0.03826736068725586, 0.03842639923095703, 0.04023945617675781, 0.03832819366455078, 0.03829945755004883, 0.03852921676635742, 0.03851059341430664, 0.03851676940917969, 0.03917820739746094, 0.039392799377441404, 0.039156192779541014, 0.038991870880126955, 0.03890176010131836, 0.03886489486694336, 0.03877814483642578, 0.03855228805541992, 0.03853094482421875, 0.038678657531738284, 0.038649406433105465, 0.03873427200317383, 0.03861420822143555, 0.03869164657592773, 0.03888947296142578, 0.03896934509277344, 0.038778881072998046, 0.03892950439453125, 0.03887401580810547, 0.03883974456787109, 0.038812095642089844, 0.03884431838989258, 0.0386992301940918, 0.03865190505981445, 0.03917004776000976, 0.03892428970336914, 0.039000064849853515, 0.03933388900756836, 0.03938508987426758, 0.03971072006225586, 0.038909217834472654, 0.03890454483032227, 0.038979167938232424, 0.03910083389282227, 0.03894236755371094, 0.03895449447631836, 0.03881260681152344, 0.038983585357666016, 0.03899955368041992, 0.04168294525146484, 0.03928044891357422, 0.03848211288452148, 0.038397823333740234, 0.03837145614624023, 0.03829900741577148, 0.03815078353881836, 0.03844230270385742, 0.038287487030029294, 0.038359264373779296, 0.03845119857788086, 0.038693248748779295, 0.03854131317138672, 0.03895817565917969, 0.03849305725097656, 0.038330368041992184, 0.03844707107543945, 0.03850841522216797, 0.03833414459228516, 0.03849264144897461, 0.03854950332641602, 0.03890892791748047, 0.03855683135986328, 0.03847971343994141, 0.03858432006835937, 0.03910438537597656, 0.03901401519775391, 0.03887363052368164, 0.0387806396484375, 0.038785057067871095, 0.03871120071411133, 0.038760353088378906, 0.03867075347900391, 0.03887011337280273, 0.038924320220947266, 0.03873891067504883, 0.03874332809448242, 0.03887776184082031, 0.03867654418945313, 0.038662143707275394, 0.038950912475585936, 0.03869900894165039, 0.03893155288696289, 0.039447456359863284, 0.03983321762084961, 0.038766975402832034, 0.03877478408813476, 0.038715328216552734, 0.0390423698425293, 0.03913324737548828, 0.03901830291748047, 0.03903084945678711, 0.038886207580566406, 0.03893068695068359, 0.038843711853027346, 0.03899951934814453, 0.03906835174560547, 0.039080223083496096, 0.03912908935546875, 0.03902873611450195, 0.03894428634643555, 0.0389964485168457, 0.03914547348022461, 0.04183116912841797, 0.03911676788330078, 0.03839340972900391, 0.03831145477294922, 0.03812035369873047, 0.03829350280761719, 0.03823782348632813, 0.03829983901977539, 0.03849849700927734, 0.03841212844848633, 0.038434814453125, 0.03860617446899414, 0.03827590560913086, 0.04026163101196289, 0.03870719909667969, 0.03842876815795898, 0.03836710357666016, 0.038593727111816405, 0.03867939376831055, 0.03879731369018555, 0.03861836624145508, 0.03873276901245117, 0.0387968635559082, 0.03864303970336914, 0.03889849472045898, 0.03930492782592773, 0.039372638702392576, 0.03893503952026367, 0.038752254486083985, 0.04029439926147461, 0.038621185302734375, 0.0385986557006836, 0.038757633209228516, 0.03874819183349609, 0.03879600143432617, 0.04150476837158203, 0.038416385650634766, 0.0385269775390625, 0.038637569427490234, 0.038823486328125, 0.03871334457397461, 0.03867078399658203, 0.039019519805908204, 0.038816959381103515, 0.03877225494384766, 0.038856990814208986, 0.03897919845581055, 0.03913356781005859, 0.039144927978515626, 0.039119392395019534, 0.03903692626953125, 0.03902246475219726, 0.03912511825561524, 0.03929449462890625, 0.03948191833496094, 0.039147422790527346, 0.03908758544921875, 0.03906000137329101, 0.03908403015136719, 0.03907139205932617, 0.039184703826904296, 0.03899955368041992, 0.03895286560058594, 0.04184140777587891, 0.039497695922851565, 0.03907993698120117, 0.03835811233520508, 0.03825551986694336, 0.03832831954956055, 0.038371326446533204, 0.03828700637817383, 0.038516670227050784, 0.03856835174560547, 0.03844255828857422, 0.03854380798339844, 0.03834864044189453, 0.03848195266723633, 0.03841443252563476, 0.03829558563232422, 0.03884425735473633, 0.03888336181640625, 0.03840134429931641, 0.03855238342285156, 0.03867776107788086, 0.038615806579589844, 0.03846758270263672, 0.03863935852050781, 0.039032161712646486, 0.039371681213378903, 0.039139328002929685, 0.03901440048217773, 0.03904092788696289, 0.03867043304443359, 0.038752254486083985, 0.03890585708618164, 0.03881369781494141, 0.038803455352783206, 0.03864371109008789, 0.0388455696105957, 0.038817790985107424, 0.038806400299072265, 0.03878092956542969, 0.03868832015991211, 0.03870550537109375, 0.03929916763305664, 0.03864780807495117, 0.038788223266601564, 0.038824832916259766, 0.03894268798828125, 0.039077919006347654, 0.03896105575561523, 0.039172191619873044, 0.03911884689331055, 0.03901235198974609, 0.03905535888671875, 0.03905535888671875, 0.03911475372314453, 0.039180286407470705, 0.03903078460693359, 0.03909632110595703, 0.03928387069702149, 0.039023456573486326, 0.038905406951904295, 0.03894931030273437, 0.0391657600402832, 0.03923747253417969, 0.04187583923339844, 0.039359935760498045, 0.03847574234008789, 0.038427169799804685, 0.038367584228515626, 0.03836928176879883, 0.038255615234375, 0.038691841125488284, 0.03841999816894531, 0.03863619232177734, 0.03843462371826172, 0.03851878356933594, 0.03831193542480469, 0.03847568130493164, 0.038506591796875, 0.038338207244873045, 0.03842287826538086, 0.038561790466308594, 0.038413665771484376, 0.03881337738037109, 0.038652671813964846, 0.03866236877441406, 0.0385167350769043, 0.03869081497192383, 0.038834175109863284, 0.038940673828125, 0.0388485107421875, 0.03907583999633789, 0.03894883346557617, 0.039006240844726564, 0.03888483047485351, 0.038725536346435545, 0.038574718475341795, 0.03904512023925781, 0.03900201416015625, 0.03889564895629883, 0.038723777770996094, 0.03876236724853516, 0.03863347244262695, 0.03871091079711914, 0.03869529724121094, 0.039021568298339845, 0.03878806304931641, 0.038819103240966796, 0.038808319091796876, 0.039018497467041016, 0.03906355285644531, 0.03907174301147461, 0.039257377624511716, 0.039254753112792966, 0.039005470275878903, 0.03901103973388672, 0.03900572967529297, 0.03913679885864258, 0.039078208923339845, 0.03920550537109375, 0.03904716873168945, 0.03918048095703125, 0.03927366256713867, 0.03907648086547852, 0.03912089538574219, 0.039569408416748046, 0.039122943878173826, 0.041544288635253904, 0.03916595077514649, 0.03852313613891602, 0.03836883163452148, 0.03838969421386719, 0.03837974548339844, 0.038141632080078126, 0.0385043830871582, 0.0383944320678711, 0.03843609619140625, 0.03854217529296875, 0.03851590347290039, 0.03836521530151367, 0.03840252685546875, 0.03826921463012695, 0.03835644912719727, 0.03848988723754883, 0.03864451217651367, 0.03867824172973633, 0.038508033752441405, 0.03879919815063477, 0.03901740646362305, 0.03867232131958008, 0.03867388916015625, 0.039014209747314454, 0.03933795166015625, 0.03914956665039063, 0.04085228729248047, 0.03866009521484375, 0.038813793182373046, 0.03863951873779297, 0.038596607208251955, 0.03867427062988281, 0.03875446319580078, 0.03879683303833008, 0.03904710388183594, 0.038707744598388674, 0.038742015838623044, 0.03867443084716797, 0.03867567825317383, 0.03883087921142578, 0.03879116821289062, 0.03914547348022461, 0.03884236907958984, 0.038793216705322264, 0.03902054214477539, 0.03889273452758789, 0.038832416534423826, 0.03905795288085938, 0.0396308479309082, 0.042305057525634765, 0.039413951873779295, 0.03916009521484375, 0.039174144744873046, 0.0390709114074707, 0.039008544921875, 0.039019039154052734, 0.03905875015258789, 0.038918846130371096, 0.03890924835205078, 0.038892032623291016, 0.03895856094360352, 0.03892707061767578, 0.042289119720458984, 0.039257984161376956, 0.038289535522460935, 0.03848396682739258, 0.038345951080322266, 0.03889641571044922, 0.038098846435546875, 0.03801094436645508, 0.0381495361328125, 0.038375518798828126, 0.03826742553710937, 0.03844095993041992, 0.03830579376220703, 0.03846553421020508, 0.038539134979248046, 0.038324352264404296, 0.03925196838378906, 0.03841024017333984, 0.03835299301147461, 0.038743968963623046, 0.0384983024597168, 0.03860070419311523, 0.03845487976074219, 0.038650272369384765, 0.04048691177368164, 0.03898323059082031, 0.03893088150024414, 0.03907993698120117, 0.039239646911621094, 0.03926339340209961, 0.038836544036865234, 0.038591041564941406, 0.03861913681030273, 0.038733631134033206, 0.03861318588256836, 0.03873382568359375, 0.03885670471191406, 0.038760448455810545, 0.038916095733642575, 0.03890995025634766, 0.038874336242675785, 0.03903116989135742, 0.0387977294921875, 0.03885055923461914, 0.03893990325927734, 0.039088897705078125, 0.03906889724731445, 0.03907254409790039, 0.039448673248291016, 0.03914947128295899, 0.03933139038085937, 0.03974803161621094, 0.0392806396484375, 0.039298465728759766, 0.039199329376220705, 0.03995340728759766, 0.039610721588134765, 0.039182815551757816, 0.03916204833984375, 0.03927040100097656, 0.03917571258544922, 0.03916233444213867, 0.03933763122558594, 0.04176278305053711, 0.03926428985595703, 0.03845248031616211, 0.038432960510253904, 0.03846611022949219, 0.03868057632446289, 0.03832831954956055, 0.038388832092285156, 0.03837948989868164, 0.03873174285888672, 0.03843376159667969, 0.038569984436035154, 0.03841203308105469, 0.03857209777832031, 0.03845465469360351, 0.03844384002685547, 0.03842832183837891, 0.03875875091552734, 0.038645759582519534, 0.03867647933959961, 0.03896115112304688, 0.038943870544433594, 0.03871587371826172, 0.03870966339111328, 0.03891404724121094, 0.03907174301147461, 0.03901808166503906, 0.039010719299316404, 0.038828033447265625, 0.03888127899169922, 0.03886016082763672, 0.03882611083984375, 0.03859711837768555, 0.038950912475585936, 0.039024639129638675, 0.03890998458862305, 0.038760257720947267, 0.03876422500610351, 0.038902240753173827, 0.03875743865966797, 0.03901126480102539, 0.03870719909667969, 0.03909222412109375, 0.03882150268554688, 0.03886262512207031, 0.03905187225341797, 0.03906150436401367, 0.03932070541381836, 0.03965574264526367, 0.039745471954345704, 0.039712928771972654, 0.039465152740478515, 0.039112991333007815, 0.03900774383544922, 0.03898121643066406, 0.03892496109008789, 0.03925404739379883, 0.03916003036499023, 0.0391736946105957, 0.039389537811279296, 0.038991966247558595, 0.039061054229736325, 0.03904739379882813, 0.042189983367919924, 0.03953955078125, 0.03873708724975586, 0.03855974578857422, 0.03835481643676758, 0.03852134323120117, 0.038801151275634764, 0.038230270385742185, 0.03829542541503906, 0.03843260955810547, 0.038470497131347654, 0.038594017028808596, 0.03839215850830078, 0.038856769561767576, 0.038499679565429684, 0.03836937713623047, 0.03883875274658203, 0.03846358489990234, 0.03857408142089844, 0.03860595321655273, 0.038614974975585935, 0.03986928176879883, 0.0385081901550293, 0.03874246215820312, 0.03907353591918945, 0.03920111846923828, 0.03945257568359375, 0.039479263305664064, 0.03925747299194336, 0.03923830413818359, 0.03870505523681641, 0.038638847351074215, 0.03867324829101562, 0.038727294921875, 0.03866457748413086, 0.03866419219970703, 0.0389939193725586, 0.03876051330566406, 0.0386682243347168, 0.03870719909667969, 0.03889152145385742, 0.04039475250244141, 0.0388853759765625, 0.03886284637451172, 0.038776641845703126, 0.03885689544677735, 0.038950912475585936, 0.03937497711181641, 0.03889753723144531, 0.03926015853881836, 0.03914080047607422, 0.03937068939208985, 0.0393939208984375, 0.03947315216064453, 0.039103488922119144, 0.039136257171630856, 0.03924991989135742, 0.03964313507080078, 0.03922534561157227, 0.03917619323730469, 0.038957054138183594, 0.03916799926757813, 0.038960289001464844]",tokens/s,25.710124383166697,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,deci,Deci/DeciLM-7B,Deci/DeciLM-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: DeciLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-3b-4e1t,stabilityai/stablelm-3b-4e1t,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,883.621888,6232.604672,0.0,5830.08256,5627.341824,s,1,7.68834326171875,7.68834326171875,0.0,7.68834326171875,7.68834326171875,7.68834326171875,7.68834326171875,[7.68834326171875],,kWh,6.926628508373748e-06,7.566438413463331e-07,2.0375016300117066e-06,9.720773979731788e-06,,MB,1239.670784,6293.42208,0.0,5880.414208,5763.868672,s,10,4.775058685302735,0.4775058685302735,0.002625333812755116,0.47774868774414064,0.4801435119628906,0.48022887573242184,0.48029716674804684,"[0.4707603454589844, 0.4758587341308594, 0.4773282165527344, 0.47699267578125, 0.47793374633789065, 0.4801245422363281, 0.47756362915039063, 0.4803142395019531, 0.47861386108398435, 0.47956869506835936]",tokens/s,536.119065484888,kWh,1.393879980555539e-05,1.5371889149086738e-06,9.214756049047556e-06,2.4690744769511622e-05,tokens/kWh,10368257.514698843,MB,1264.922624,6377.30816,0.0,5964.300288,5763.871232,s,10,17.788362182617185,1.7788362182617186,0.002390893148237242,1.778939697265625,1.7818632324218748,1.7820497802734374,1.7821990185546874,"[1.77534765625, 1.7801082763671876, 1.7756986083984374, 1.7765712890625, 1.7777958984375, 1.7809029541015624, 1.78182177734375, 1.7778101806640625, 1.7800692138671874, 1.782236328125]",tokens/s,35.41641403139615,kWh,5.204342882610793e-05,5.7403901259463685e-06,3.4590778995351046e-05,9.237459794740534e-05,tokens/kWh,682005.6747188211,,s,630,17.785190011978138,0.028230460336473258,0.0003424180153548108,0.028232992172241213,0.02852157440185547,0.02863807716369629,0.029817421493530276,"[0.029540191650390624, 0.02848806381225586, 0.028142879486083985, 0.028068384170532226, 0.02801686477661133, 0.027856159210205077, 0.027971712112426758, 0.02800111961364746, 0.027998207092285156, 0.028047359466552735, 0.028014591217041016, 0.028040992736816407, 0.028033119201660156, 0.028149120330810545, 0.028207872390747072, 0.02790809631347656, 0.027661951065063476, 0.027900287628173828, 0.027963392257690428, 0.027878751754760744, 0.02780182456970215, 0.02777734375, 0.027993247985839843, 0.02822243118286133, 0.028235776901245117, 0.027835391998291017, 0.027830656051635743, 0.02813510322570801, 0.0280196475982666, 0.028200960159301756, 0.028035072326660155, 0.02775654411315918, 0.027938816070556642, 0.028399456024169923, 0.028165632247924805, 0.028223712921142577, 0.028260223388671873, 0.02849577522277832, 0.028279455184936523, 0.02833203125, 0.02824153518676758, 0.028289312362670897, 0.028310688018798828, 0.028238176345825195, 0.028187231063842775, 0.028440576553344726, 0.028260351181030274, 0.028391424179077147, 0.028307167053222657, 0.028109088897705078, 0.028239616394042967, 0.02851456069946289, 0.02823776054382324, 0.028391487121582033, 0.02841708755493164, 0.028404672622680663, 0.028372575759887695, 0.028367263793945312, 0.028370943069458008, 0.028129152297973633, 0.02826588821411133, 0.028480224609375, 0.028280832290649413, 0.03003225517272949, 0.028643327713012694, 0.028213247299194336, 0.027805055618286133, 0.027527807235717773, 0.0277258243560791, 0.02768076705932617, 0.027789312362670897, 0.027869184494018553, 0.02772108840942383, 0.027728511810302735, 0.02795315170288086, 0.028014591217041016, 0.027881471633911133, 0.028854272842407228, 0.027662336349487306, 0.0304289608001709, 0.028092639923095703, 0.027715167999267577, 0.02804572868347168, 0.028140928268432615, 0.028307519912719726, 0.028129663467407227, 0.02779257583618164, 0.027673471450805665, 0.028042688369750976, 0.02826892852783203, 0.02833030319213867, 0.027848352432250978, 0.02775004768371582, 0.027937215805053713, 0.02826470375061035, 0.028286975860595705, 0.02813507270812988, 0.028211551666259764, 0.02857574462890625, 0.02837299156188965, 0.02841116714477539, 0.0283143367767334, 0.02837811279296875, 0.02826723289489746, 0.02826678466796875, 0.028352415084838867, 0.028368511199951173, 0.028352991104125976, 0.028520288467407225, 0.028422304153442383, 0.02842745590209961, 0.028392000198364256, 0.02872755241394043, 0.0284051513671875, 0.02826684761047363, 0.02824012756347656, 0.02857164764404297, 0.028452863693237306, 0.028317695617675782, 0.028429759979248046, 0.028414304733276368, 0.028367071151733397, 0.028442623138427735, 0.028349760055541993, 0.028514591217041016, 0.02836911964416504, 0.029598751068115235, 0.028383392333984375, 0.02857436752319336, 0.027811552047729494, 0.02782793617248535, 0.027922367095947264, 0.027912992477416992, 0.027674623489379883, 0.027824127197265625, 0.0279564151763916, 0.02807072067260742, 0.028048799514770507, 0.028125152587890626, 0.028097087860107423, 0.02804128074645996, 0.028061344146728517, 0.027950496673583985, 0.02790291213989258, 0.02791391944885254, 0.028193056106567384, 0.028147743225097655, 0.02812063980102539, 0.02788774490356445, 0.02778758430480957, 0.028019712448120116, 0.028256320953369142, 0.028228544235229493, 0.027954463958740235, 0.027725568771362306, 0.027925472259521484, 0.028225120544433595, 0.028244224548339844, 0.028001920700073242, 0.028016895294189454, 0.02791769599914551, 0.02793328094482422, 0.02774790382385254, 0.02820582389831543, 0.02839756774902344, 0.028196319580078125, 0.028341888427734375, 0.028311904907226564, 0.028076128005981447, 0.02831184005737305, 0.028358848571777343, 0.028223487854003908, 0.028197919845581055, 0.028478431701660156, 0.028317695617675782, 0.02835456085205078, 0.028276735305786133, 0.028358272552490234, 0.02885875129699707, 0.028482656478881836, 0.028195743560791017, 0.02835206413269043, 0.028462751388549805, 0.028463455200195314, 0.028412351608276366, 0.028552831649780272, 0.02848192024230957, 0.02844611167907715, 0.028237503051757814, 0.029987520217895507, 0.028606464385986328, 0.02816819190979004, 0.027963104248046874, 0.027745695114135743, 0.02763862419128418, 0.027778400421142577, 0.02782022476196289, 0.02766899108886719, 0.02770489692687988, 0.027809728622436525, 0.02780620765686035, 0.02786265563964844, 0.02828326416015625, 0.027924320220947266, 0.0278570556640625, 0.02800230407714844, 0.028104703903198244, 0.028209152221679686, 0.028108800888061523, 0.02779257583618164, 0.027929407119750976, 0.02793824005126953, 0.02820307159423828, 0.028219680786132812, 0.02801180839538574, 0.027724479675292967, 0.027922496795654297, 0.028121280670166015, 0.02830335998535156, 0.0281060791015625, 0.02781660842895508, 0.027717632293701173, 0.028065088272094727, 0.02849043273925781, 0.028669952392578125, 0.028461055755615236, 0.02850543975830078, 0.028318368911743164, 0.028340192794799806, 0.028383264541625975, 0.028338176727294922, 0.028337919235229492, 0.028118656158447265, 0.028119680404663085, 0.02835807991027832, 0.0285230712890625, 0.028283903121948242, 0.028169216156005858, 0.02853887939453125, 0.028294527053833007, 0.028506656646728516, 0.028401760101318358, 0.02839756774902344, 0.028387327194213868, 0.02838688087463379, 0.028383647918701172, 0.02851024055480957, 0.028335168838500978, 0.02852140808105469, 0.02834636878967285, 0.028479488372802734, 0.02844038391113281, 0.029844064712524414, 0.028533023834228517, 0.028013568878173828, 0.027953920364379884, 0.027992319107055665, 0.027929983139038085, 0.027789312362670897, 0.027699167251586915, 0.027793983459472656, 0.027962751388549804, 0.027986368179321288, 0.02777731132507324, 0.027728927612304687, 0.02779225540161133, 0.02797782325744629, 0.028014207839965822, 0.02787366485595703, 0.027870304107666017, 0.027691104888916015, 0.027897823333740236, 0.0282325439453125, 0.028178335189819336, 0.028133472442626952, 0.02796134376525879, 0.02772700881958008, 0.027953216552734375, 0.0281812801361084, 0.028299167633056642, 0.028127328872680664, 0.028037120819091797, 0.02800230407714844, 0.02804649543762207, 0.028068704605102537, 0.02821228790283203, 0.028412864685058593, 0.028815359115600587, 0.028642847061157228, 0.028568031311035157, 0.028438528060913085, 0.028372512817382813, 0.028482015609741212, 0.028338176727294922, 0.02828678321838379, 0.028559104919433594, 0.028382688522338866, 0.028410720825195312, 0.028201087951660157, 0.028281919479370116, 0.028215360641479493, 0.028132223129272462, 0.028286975860595705, 0.02853430366516113, 0.028219871520996094, 0.028452096939086916, 0.0284003849029541, 0.02854297637939453, 0.028321247100830078, 0.02841859245300293, 0.028423839569091797, 0.028468767166137696, 0.028552000045776366, 0.028576927185058595, 0.02844758415222168, 0.030043903350830077, 0.028701663970947266, 0.02819071960449219, 0.028085567474365233, 0.027802207946777343, 0.02798396873474121, 0.028005376815795898, 0.028079103469848633, 0.028104703903198244, 0.02812031936645508, 0.028031679153442384, 0.02783568000793457, 0.02789206314086914, 0.028055999755859377, 0.028100608825683594, 0.028208351135253905, 0.028095264434814454, 0.02778112030029297, 0.02775196838378906, 0.027996639251708984, 0.02811903953552246, 0.028227584838867188, 0.028121088027954103, 0.02809347152709961, 0.027780063629150392, 0.02793846321105957, 0.028156255722045897, 0.02816204833984375, 0.02801852798461914, 0.02798543930053711, 0.027787872314453125, 0.02796134376525879, 0.02826857566833496, 0.028649471282958985, 0.028463104248046874, 0.02834432029724121, 0.028297216415405273, 0.02837299156188965, 0.028286975860595705, 0.028436479568481447, 0.02814508819580078, 0.028328512191772463, 0.02841526412963867, 0.02827916717529297, 0.028303712844848634, 0.028266496658325195, 0.028133184432983398, 0.028314815521240235, 0.029752191543579103, 0.02884931182861328, 0.02840060806274414, 0.028364479064941408, 0.028326208114624024, 0.028325887680053712, 0.02833612823486328, 0.02836409568786621, 0.028387519836425783, 0.02846976089477539, 0.02846623992919922, 0.028445632934570312, 0.02853068733215332, 0.02854911994934082, 0.028483135223388672, 0.0295546875, 0.028497760772705077, 0.02807823944091797, 0.027678592681884766, 0.027649215698242188, 0.028341184616088866, 0.027848447799682617, 0.02811110305786133, 0.02805299186706543, 0.028133888244628907, 0.028106752395629882, 0.02799612808227539, 0.027926399230957032, 0.02790006446838379, 0.028016639709472657, 0.028078079223632812, 0.028108640670776366, 0.028097888946533204, 0.02800217628479004, 0.02786992073059082, 0.027880895614624025, 0.0281232967376709, 0.028283519744873045, 0.028078079223632812, 0.027785215377807617, 0.027797504425048827, 0.02806096076965332, 0.02887116813659668, 0.028425472259521484, 0.028267072677612304, 0.028247711181640624, 0.028282720565795897, 0.02826857566833496, 0.028437376022338867, 0.02870681571960449, 0.028611967086791992, 0.02899827194213867, 0.028139551162719725, 0.028133344650268555, 0.028434431076049805, 0.028262399673461915, 0.028550559997558594, 0.028301408767700195, 0.028338687896728516, 0.02814975929260254, 0.028243072509765627, 0.028405887603759766, 0.028414176940917968, 0.028340768814086915, 0.028475488662719727, 0.028411808013916014, 0.028471296310424804, 0.02831295967102051, 0.02842803192138672, 0.028418624877929687, 0.028547391891479493, 0.02849782371520996, 0.028518495559692384, 0.02855878448486328, 0.028509759902954103, 0.028277759552001954, 0.028486848831176758, 0.028668735504150392, 0.029609983444213867, 0.028530815124511718, 0.028220672607421875, 0.027871456146240235, 0.027654336929321288, 0.027715808868408204, 0.02804297637939453, 0.027828512191772462, 0.02770534324645996, 0.02797097587585449, 0.02803772735595703, 0.02814361572265625, 0.02812224006652832, 0.028070783615112303, 0.02788761520385742, 0.027850143432617186, 0.027927135467529295, 0.028211200714111328, 0.02823574447631836, 0.02798585510253906, 0.027770975112915038, 0.027926528930664062, 0.028280832290649413, 0.028258207321166993, 0.02794054412841797, 0.028002431869506836, 0.02781737518310547, 0.027900447845458986, 0.028040864944458007, 0.028313343048095702, 0.028134143829345704, 0.027930624008178712, 0.02794691276550293, 0.02806812858581543, 0.0280731201171875, 0.028203712463378907, 0.028489631652832033, 0.028720735549926758, 0.02853340721130371, 0.028454912185668944, 0.028448768615722656, 0.02838256072998047, 0.02837775993347168, 0.02854431915283203, 0.02823344039916992, 0.028406463623046874, 0.028479776382446288, 0.02835251235961914, 0.028291072845458985, 0.028252159118652344, 0.028358015060424804, 0.02845350456237793, 0.028272640228271483, 0.028463104248046874, 0.028231168746948244, 0.028406272888183592, 0.028497919082641602, 0.02857734489440918, 0.028438976287841797, 0.028478784561157225, 0.02827712059020996, 0.02843270492553711, 0.028407039642333983, 0.03000115203857422, 0.02866556739807129, 0.02804150390625, 0.028225536346435546, 0.02800230407714844, 0.0280798397064209, 0.02805583953857422, 0.02808121681213379, 0.027933120727539062, 0.027961856842041017, 0.028032384872436523, 0.028068191528320314, 0.028027168273925783, 0.028055551528930665, 0.02807347106933594, 0.027986431121826173, 0.027831647872924806, 0.02796816062927246, 0.02813337516784668, 0.028200960159301756, 0.02804470443725586, 0.027763296127319335, 0.02794291114807129, 0.028163167953491212, 0.028175264358520507, 0.02814076805114746, 0.027912992477416992, 0.027850751876831056, 0.027981792449951172, 0.02796463966369629, 0.028252992630004883, 0.02825823974609375, 0.02806790351867676, 0.028471296310424804, 0.0286363525390625, 0.028631359100341796, 0.02826905632019043, 0.02833612823486328, 0.028222848892211914, 0.028310144424438476, 0.028481536865234375, 0.028266496658325195, 0.02820207977294922, 0.02831439971923828, 0.028102144241333008, 0.028225887298583986, 0.028336416244506835, 0.028432384490966797, 0.02838528060913086, 0.028395519256591797, 0.02830335998535156, 0.02842624092102051, 0.02816204833984375, 0.028444223403930664, 0.028404159545898436, 0.028464992523193358, 0.028422304153442383, 0.028535839080810546, 0.028592607498168946, 0.028559871673583984, 0.028475391387939454, 0.028483455657958984, 0.028491455078125, 0.02993142318725586, 0.02875811195373535, 0.028368160247802733, 0.02813759994506836, 0.028168800354003907, 0.027992063522338868, 0.027875328063964845, 0.028055488586425783, 0.02804278373718262, 0.02810518455505371, 0.028000320434570312, 0.027936767578125, 0.027947008132934572, 0.028121088027954103, 0.028090368270874022, 0.0279552001953125, 0.027983680725097656, 0.0279205436706543, 0.028053152084350587, 0.028156288146972658, 0.02825212860107422, 0.027815967559814452, 0.02769830322265625, 0.02812607955932617, 0.028246015548706056, 0.028318784713745118, 0.028121248245239257, 0.028238624572753907, 0.028416000366210937, 0.02823891258239746, 0.028369855880737305, 0.02828428840637207, 0.02812112045288086, 0.02851260757446289, 0.028639488220214844, 0.02876006317138672, 0.02847123146057129, 0.02826358413696289, 0.028258623123168944, 0.028297151565551758, 0.028144287109375, 0.02829516792297363, 0.028435968399047853, 0.02829158401489258, 0.028293119430541993, 0.028674047470092775, 0.028493824005126952, 0.02814771270751953, 0.028215295791625978, 0.02855936050415039, 0.028258304595947265, 0.02840575981140137, 0.02842624092102051, 0.02860851287841797, 0.028252159118652344, 0.02840166473388672, 0.028481536865234375, 0.028433984756469726, 0.028338176727294922, 0.028463552474975586, 0.028430143356323243, 0.028483264923095702, 0.02834009552001953]",tokens/s,35.422730911263876,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-13b,huggyllama/llama-13b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 353, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 154183 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-2.7b,facebook/opt-2.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,phi,microsoft/phi-1_5,microsoft/phi-1_5,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,883.924992,3447.586816,0.0,3045.064704,2842.846208,s,1,7.73144775390625,7.73144775390625,0.0,7.73144775390625,7.73144775390625,7.73144775390625,7.73144775390625,[7.73144775390625],,kWh,6.263748837500316e-06,6.834448405335389e-07,1.8066681120032513e-06,8.753861790037106e-06,,MB,1202.87232,3621.650432,0.0,3208.64256,2982.452736,s,10,2.4020885467529296,0.24020885467529296,0.002600932736818267,0.24056247711181641,0.2429522277832031,0.24345884857177735,0.24386414520263672,"[0.24396546936035157, 0.2396962890625, 0.2410211181640625, 0.24008387756347657, 0.24154832458496095, 0.24033509826660157, 0.23770352172851564, 0.24078985595703126, 0.2341053466796875, 0.24283964538574218]",tokens/s,1065.7392307458983,kWh,7.284845664633845e-06,8.033842092674123e-07,4.8159659123901994e-06,1.2904195786291458e-05,tokens/kWh,19838508.671106573,MB,1227.792384,3621.650432,0.0,3208.64256,2982.455296,s,10,13.264886230468749,1.3264886230468749,0.0044929704543753834,1.327206298828125,1.330827099609375,1.3327566284179686,1.3343002514648437,"[1.320555908203125, 1.325497314453125, 1.3204974365234374, 1.326908447265625, 1.3207633056640624, 1.327504150390625, 1.3294420166015626, 1.3303983154296875, 1.3346861572265625, 1.3286331787109376]",tokens/s,47.49381103268892,kWh,3.8403281438285374e-05,4.234728751027129e-06,2.3797501422810448e-05,6.643551161212294e-05,tokens/kWh,948288.0235470928,,s,630,13.260606334686283,0.021048581483629016,0.00030605883327593074,0.02099033546447754,0.02128183937072754,0.02151933603286743,0.0221596604347229,"[0.021428672790527344, 0.020948095321655272, 0.020900735855102538, 0.02086502456665039, 0.020993600845336913, 0.020769216537475585, 0.020797088623046876, 0.020914527893066408, 0.020715456008911132, 0.020594751358032227, 0.020529151916503906, 0.021251552581787108, 0.020648479461669922, 0.020596736907958983, 0.020867071151733398, 0.020813823699951172, 0.020914176940917968, 0.020827808380126954, 0.020820575714111327, 0.02065996742248535, 0.020876640319824218, 0.020629535675048827, 0.020691583633422852, 0.020809728622436522, 0.020848320007324218, 0.020739423751831056, 0.020875295639038085, 0.020730367660522463, 0.02084227180480957, 0.020736671447753905, 0.02085446357727051, 0.021577184677124023, 0.022198720932006834, 0.021151968002319336, 0.02091846466064453, 0.020827264785766603, 0.0209967041015625, 0.02099001693725586, 0.021065568923950194, 0.020799871444702148, 0.021073919296264648, 0.023138303756713868, 0.02206719970703125, 0.02085683250427246, 0.02085683250427246, 0.02092982482910156, 0.02072256088256836, 0.02070512008666992, 0.02087740707397461, 0.02096073532104492, 0.020888288497924803, 0.02162803268432617, 0.021076704025268556, 0.021056640625, 0.020869888305664063, 0.020829696655273438, 0.02075494384765625, 0.020701183319091796, 0.020689983367919922, 0.020893951416015626, 0.020760608673095704, 0.02088003158569336, 0.02095699119567871, 0.02113484764099121, 0.020781728744506837, 0.02084659194946289, 0.020819839477539064, 0.02081964874267578, 0.02083875274658203, 0.020806976318359375, 0.020747039794921877, 0.02099318313598633, 0.02084124755859375, 0.021254207611083983, 0.021780479431152345, 0.020963327407836914, 0.020869152069091797, 0.020729408264160157, 0.02066281509399414, 0.020801248550415038, 0.02073206329345703, 0.02099628829956055, 0.020907840728759765, 0.020756479263305663, 0.02072972869873047, 0.0210883846282959, 0.02074998474121094, 0.020895519256591798, 0.02159228706359863, 0.021778783798217773, 0.021061632156372072, 0.021093727111816406, 0.021298847198486327, 0.0209553279876709, 0.020902399063110352, 0.021520128250122072, 0.021124799728393553, 0.021050239562988283, 0.0210513916015625, 0.020982847213745118, 0.02094927978515625, 0.02091263961791992, 0.021093984603881837, 0.021109312057495118, 0.02127052879333496, 0.021310560226440428, 0.02216566467285156, 0.021707551956176758, 0.021016544342041015, 0.020965375900268556, 0.0209039363861084, 0.021038848876953124, 0.020893951416015626, 0.020959232330322267, 0.020936704635620116, 0.021004287719726563, 0.021118976593017577, 0.020927871704101562, 0.020848735809326172, 0.021169952392578125, 0.02090006446838379, 0.02095158386230469, 0.02087641525268555, 0.020888511657714843, 0.02096886444091797, 0.021213727951049803, 0.021253952026367186, 0.02089251136779785, 0.0213156795501709, 0.020985855102539062, 0.021121023178100586, 0.021127168655395507, 0.02092406463623047, 0.02086537551879883, 0.02117344093322754, 0.020980480194091797, 0.02099318313598633, 0.020886240005493165, 0.02094304084777832, 0.02100223922729492, 0.020951040267944337, 0.020944896697998046, 0.020940095901489257, 0.02103366470336914, 0.02091609573364258, 0.02099622344970703, 0.020891359329223633, 0.020848928451538087, 0.02087936019897461, 0.020908031463623047, 0.020985855102539062, 0.02089779281616211, 0.020867071151733398, 0.020760511398315428, 0.020840127944946288, 0.021055871963500977, 0.02068070411682129, 0.02077241516113281, 0.021209535598754884, 0.020737056732177735, 0.021041727066040038, 0.0207956485748291, 0.020922527313232422, 0.02084659194946289, 0.020908031463623047, 0.02081177520751953, 0.020832256317138673, 0.020682752609252928, 0.02081203269958496, 0.02107366371154785, 0.020698720932006837, 0.020871583938598632, 0.02085878372192383, 0.020967519760131836, 0.020812896728515624, 0.021334943771362306, 0.02114691162109375, 0.020836383819580077, 0.02085958480834961, 0.02130534362792969, 0.020963327407836914, 0.021046432495117187, 0.021003231048583985, 0.020957056045532226, 0.02108415985107422, 0.020875263214111327, 0.02099830436706543, 0.02098361587524414, 0.021130720138549806, 0.021518367767333985, 0.021101696014404297, 0.021332832336425783, 0.02121507263183594, 0.021059743881225584, 0.02149740791320801, 0.02108051109313965, 0.021149599075317382, 0.021190656661987304, 0.02109859275817871, 0.021011903762817384, 0.02100896072387695, 0.02107596778869629, 0.02116377639770508, 0.021026464462280275, 0.021480031967163086, 0.02106947135925293, 0.020969823837280275, 0.020964799880981447, 0.020907808303833007, 0.021302047729492186, 0.020975616455078124, 0.020979328155517576, 0.021256479263305664, 0.021015871047973634, 0.020991872787475586, 0.02112784004211426, 0.021055744171142577, 0.02097871971130371, 0.02101910400390625, 0.020963775634765626, 0.02102176094055176, 0.020994720458984376, 0.020875616073608397, 0.020805376052856445, 0.02107548713684082, 0.02116864013671875, 0.020993919372558595, 0.020952896118164064, 0.021037023544311525, 0.021031808853149415, 0.020954816818237305, 0.02087116813659668, 0.020942848205566408, 0.020983808517456053, 0.020989952087402345, 0.02083020782470703, 0.020806720733642578, 0.020841407775878906, 0.02123161506652832, 0.021149696350097655, 0.020979711532592774, 0.021403648376464843, 0.021151744842529296, 0.02083635139465332, 0.021106687545776368, 0.020969472885131835, 0.02101785659790039, 0.020929279327392577, 0.02102662467956543, 0.02094816017150879, 0.02107084846496582, 0.020896991729736327, 0.021448703765869142, 0.021249664306640624, 0.021164031982421876, 0.02099404716491699, 0.020886207580566408, 0.020909215927124022, 0.020970176696777344, 0.0211495361328125, 0.02087116813659668, 0.02088492774963379, 0.0209290885925293, 0.02105753517150879, 0.020977727890014647, 0.020866527557373046, 0.02104911994934082, 0.021084863662719725, 0.021021919250488283, 0.021043584823608397, 0.021086624145507812, 0.021122655868530273, 0.020928543090820314, 0.02096780776977539, 0.020977664947509765, 0.02087731170654297, 0.020981376647949218, 0.021059808731079103, 0.02129724884033203, 0.021026079177856444, 0.021080863952636718, 0.020975616455078124, 0.020920223236083984, 0.02090608024597168, 0.020975616455078124, 0.020849695205688478, 0.020813888549804687, 0.020796319961547852, 0.020951040267944337, 0.021024320602416994, 0.020925088882446288, 0.020900991439819334, 0.021000864028930664, 0.02099404716491699, 0.02093075180053711, 0.020813631057739257, 0.020841728210449217, 0.02088217544555664, 0.021223424911499023, 0.02100217628479004, 0.02088694381713867, 0.02081875228881836, 0.020938432693481446, 0.020776960372924806, 0.021135520935058594, 0.02084249687194824, 0.020783103942871094, 0.020867040634155273, 0.020888832092285155, 0.02090473556518555, 0.020731903076171874, 0.020794687271118165, 0.020699487686157227, 0.020829727172851562, 0.02077984046936035, 0.02145734405517578, 0.020888799667358397, 0.020970272064208983, 0.020920032501220702, 0.020988224029541015, 0.02096940803527832, 0.020846559524536134, 0.020860992431640624, 0.02366422462463379, 0.021318048477172852, 0.02109235191345215, 0.0212541446685791, 0.020967424392700194, 0.020977216720581053, 0.021024703979492188, 0.021030656814575194, 0.020906623840332032, 0.02096976089477539, 0.021040576934814453, 0.021445024490356446, 0.021596160888671875, 0.020942848205566408, 0.02099404716491699, 0.021090303421020508, 0.022069183349609375, 0.02092560005187988, 0.020983039855957033, 0.020846111297607422, 0.020907648086547853, 0.020933120727539063, 0.021004287719726563, 0.020841535568237306, 0.020858816146850586, 0.020810752868652343, 0.020926271438598633, 0.020994239807128907, 0.020901056289672853, 0.020853567123413085, 0.020993951797485352, 0.02089743995666504, 0.020922815322875977, 0.021093536376953125, 0.020814687728881835, 0.02084454345703125, 0.020957023620605468, 0.021018016815185548, 0.02108415985107422, 0.02189548873901367, 0.0208920955657959, 0.021064992904663085, 0.02096201515197754, 0.02104729652404785, 0.020952896118164064, 0.021167808532714844, 0.020941312789916993, 0.020836288452148438, 0.020838464736938477, 0.021094400405883788, 0.02083020782470703, 0.020807136535644533, 0.020836383819580077, 0.021017087936401366, 0.021209087371826172, 0.021313472747802733, 0.020875328063964842, 0.021049280166625977, 0.020914176940917968, 0.021198720932006837, 0.020977792739868165, 0.020953088760375976, 0.020991327285766602, 0.02103772735595703, 0.02107913589477539, 0.020939584732055663, 0.021000288009643556, 0.02096732711791992, 0.020934207916259766, 0.02089561653137207, 0.020851360321044923, 0.0209039363861084, 0.0210402889251709, 0.021041311264038087, 0.021281471252441408, 0.02147327995300293, 0.020967424392700194, 0.02108595275878906, 0.021081920623779296, 0.021305503845214843, 0.021028383255004883, 0.02121763229370117, 0.021131519317626954, 0.02126278305053711, 0.021251071929931642, 0.020920991897583008, 0.021128480911254882, 0.02107472038269043, 0.021122207641601564, 0.021188575744628905, 0.021119935989379883, 0.02095097541809082, 0.020997631072998048, 0.02100275230407715, 0.020977664947509765, 0.021351936340332032, 0.021328384399414063, 0.021037120819091797, 0.020895679473876952, 0.021032960891723632, 0.021164031982421876, 0.021089599609375, 0.020980415344238282, 0.02101420783996582, 0.020969791412353514, 0.02109769630432129, 0.020904415130615233, 0.021087711334228515, 0.02093935966491699, 0.02087055969238281, 0.02124460792541504, 0.02111859130859375, 0.02176639938354492, 0.02091651153564453, 0.020903295516967773, 0.021604608535766602, 0.021988927841186525, 0.021154624938964844, 0.021571584701538086, 0.02123161506652832, 0.02109644889831543, 0.021018592834472657, 0.021067935943603514, 0.021139232635498047, 0.02108425521850586, 0.021171775817871094, 0.021041439056396483, 0.02098806381225586, 0.020963071823120117, 0.020904159545898436, 0.021049184799194334, 0.021262208938598634, 0.02092201614379883, 0.020973472595214843, 0.020863744735717775, 0.02108380889892578, 0.020912479400634766, 0.02103910446166992, 0.02105548858642578, 0.021168127059936523, 0.02093657684326172, 0.020934783935546875, 0.020979040145874022, 0.0209715518951416, 0.020988544464111327, 0.020948991775512696, 0.02086502456665039, 0.020918272018432618, 0.020805471420288085, 0.020926143646240233, 0.02099660873413086, 0.020888799667358397, 0.02096828842163086, 0.02107382392883301, 0.020899263381958008, 0.020818016052246095, 0.020988224029541015, 0.021037216186523437, 0.020989343643188475, 0.021061920166015626, 0.021208799362182618, 0.020937311172485353, 0.020983327865600587, 0.021119455337524413, 0.021071775436401367, 0.020963424682617186, 0.02123081588745117, 0.021224159240722656, 0.02113667106628418, 0.021111583709716795, 0.0210513916015625, 0.021759199142456054, 0.022086496353149413, 0.022144960403442382, 0.021259679794311523, 0.021164287567138673, 0.021057888031005858, 0.021129215240478515, 0.021053440093994142, 0.021393407821655275, 0.022261152267456053, 0.021737472534179687, 0.021319679260253906, 0.021298208236694337, 0.021209983825683593, 0.021188032150268554, 0.021285152435302733, 0.021064064025878907, 0.02123366355895996, 0.021075231552124023, 0.02107804870605469, 0.021152448654174805, 0.020972927093505858, 0.02100288009643555, 0.021180448532104493, 0.021175872802734374, 0.02119875144958496, 0.021233856201171877, 0.022177600860595705, 0.02113942337036133, 0.020965696334838867, 0.020990175247192384, 0.02105708885192871, 0.0209168643951416, 0.021065183639526367, 0.021174400329589844, 0.021272512435913087, 0.021301536560058593, 0.020950815200805665, 0.021393632888793944, 0.021086208343505858, 0.020991584777832032, 0.021036895751953125, 0.020981792449951173, 0.020916767120361328, 0.0210347843170166, 0.02115135955810547, 0.021328479766845702, 0.02122876739501953, 0.02171779251098633, 0.024671648025512697, 0.021182144165039062, 0.021191583633422852, 0.020883295059204103, 0.02101433563232422, 0.020814367294311523, 0.021189983367919923, 0.02097148895263672, 0.021019136428833008, 0.021133312225341795, 0.020770816802978515, 0.020981760025024415, 0.020824064254760744, 0.020998144149780275, 0.02095676803588867, 0.020986112594604492, 0.021550559997558595, 0.020878015518188478, 0.02110054397583008, 0.020880640029907225, 0.020902271270751952, 0.020978048324584962, 0.021167711257934572, 0.020882080078125, 0.021611839294433593, 0.021256832122802733, 0.0212677116394043, 0.021037439346313476, 0.020820415496826172, 0.020823520660400392, 0.020990495681762696, 0.02164531135559082, 0.020905344009399415, 0.020992000579833983, 0.020834943771362305, 0.02112246322631836, 0.020892255783081053, 0.020996095657348633, 0.020939775466918945, 0.020912384033203123, 0.0209290885925293, 0.020967199325561525, 0.020957599639892577, 0.021112831115722656, 0.020915552139282225, 0.0209005126953125, 0.020996095657348633, 0.021053440093994142, 0.021005983352661132, 0.021491167068481445, 0.021042047500610353, 0.021157440185546876, 0.021051839828491212, 0.020965248107910155, 0.02105523109436035, 0.021113216400146486, 0.020989952087402345, 0.02106777572631836, 0.021560991287231445, 0.0211046085357666, 0.02101910400390625, 0.02099190330505371, 0.021152896881103514, 0.021127647399902343, 0.02102854347229004, 0.021237823486328126, 0.02109507179260254, 0.021238912582397462, 0.02113225555419922, 0.020982719421386718, 0.021070304870605468, 0.021037567138671876, 0.021635168075561522, 0.0211167049407959, 0.02111027145385742, 0.021074079513549806, 0.021162464141845704, 0.020926464080810548, 0.02104092788696289, 0.021063615798950195, 0.021135168075561525, 0.021372480392456053, 0.021124000549316405, 0.021121023178100586, 0.020838016510009764, 0.020730239868164062, 0.021146656036376953]",tokens/s,47.50913978586972,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-2b,google/recurrentgemma-2b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gptj,EleutherAI/gpt-j-6b,EleutherAI/gpt-j-6b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTJForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-1_6b,stabilityai/stablelm-2-1_6b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,894.160896,3900.571648,0.0,3498.049536,3295.335424,s,1,7.7834404296875,7.7834404296875,0.0,7.7834404296875,7.7834404296875,7.7834404296875,7.7834404296875,[7.7834404296875],,kWh,5.925260191656889e-06,6.462645629215508e-07,1.913334863989924e-06,8.484859618568363e-06,,MB,1219.084288,4043.177984,0.0,3630.170112,3408.337408,s,10,2.6105945129394534,0.26105945129394537,0.002107157380846957,0.26122642517089845,0.26356578674316405,0.2639120895385742,0.26418913177490233,"[0.2589665832519531, 0.26098898315429686, 0.26425839233398435, 0.2603616943359375, 0.2614638671875, 0.25783480834960937, 0.26246090698242186, 0.25815374755859377, 0.26348883056640626, 0.26261669921875]",tokens/s,980.6195436753271,kWh,7.762054856469371e-06,8.557410310896022e-07,5.127452932368384e-06,1.3745248819927359e-05,tokens/kWh,18624617.375340674,MB,1244.004352,4045.275136,0.0,3632.267264,3408.339968,s,10,13.141498291015626,1.3141498291015625,0.004314069138410937,1.3140379028320313,1.3207787963867188,1.3213346496582032,1.3217793322753906,"[1.314895751953125, 1.3156776123046876, 1.31343115234375, 1.307330810546875, 1.3114093017578126, 1.312108642578125, 1.3218905029296875, 1.3206552734375, 1.30945458984375, 1.3146446533203124]",tokens/s,47.93973914151848,kWh,3.800914795186759e-05,4.192197263589238e-06,2.5073349006031698e-05,6.727469422148853e-05,tokens/kWh,936459.1058947819,,s,630,13.13879184341432,0.020855225148276677,0.0002478703466817706,0.02082054328918457,0.021040982818603515,0.021197203540802,0.021754835529327395,"[0.02152739143371582, 0.02083020782470703, 0.02081996726989746, 0.020754432678222655, 0.021004287719726563, 0.020752031326293944, 0.0208789119720459, 0.020760351181030274, 0.02086537551879883, 0.020925088882446288, 0.020743200302124024, 0.020757471084594727, 0.021120607376098634, 0.021057344436645507, 0.02104083251953125, 0.020969663619995117, 0.020843231201171875, 0.02090991973876953, 0.02074012756347656, 0.020821216583251954, 0.02086390495300293, 0.020926464080810548, 0.020959232330322267, 0.02080732727050781, 0.020936288833618165, 0.020996864318847657, 0.020867071151733398, 0.020969472885131835, 0.02087321662902832, 0.020706655502319336, 0.02088003158569336, 0.020785152435302736, 0.02089289665222168, 0.02075644874572754, 0.02077574348449707, 0.02080518341064453, 0.020769216537475585, 0.020676095962524413, 0.02083072090148926, 0.02102272033691406, 0.020815872192382814, 0.020694047927856445, 0.020732255935668947, 0.020781055450439453, 0.020668191909790037, 0.02061574363708496, 0.020737823486328126, 0.02072831916809082, 0.02094601631164551, 0.020707263946533203, 0.021578720092773437, 0.020924415588378906, 0.021950143814086914, 0.020836223602294923, 0.020871648788452147, 0.020641759872436525, 0.02063974380493164, 0.020864767074584963, 0.02100771141052246, 0.02074006462097168, 0.020580863952636717, 0.020517311096191405, 0.0208319034576416, 0.02143507194519043, 0.02080726432800293, 0.020767135620117186, 0.02062745666503906, 0.020813407897949218, 0.02095350456237793, 0.020932607650756836, 0.020899839401245117, 0.02087116813659668, 0.02084659194946289, 0.020995616912841797, 0.02080953598022461, 0.020879520416259765, 0.02097532844543457, 0.020990688323974608, 0.020824127197265625, 0.020796735763549803, 0.020797567367553713, 0.020824640274047852, 0.020791296005249024, 0.0207891845703125, 0.02097110366821289, 0.02084022331237793, 0.021291711807250976, 0.02210406494140625, 0.021204000473022462, 0.02098684883117676, 0.021127168655395507, 0.021028863906860353, 0.020789247512817383, 0.020682336807250977, 0.020875680923461915, 0.020821279525756835, 0.020835039138793945, 0.02075142478942871, 0.020818880081176758, 0.02096233558654785, 0.021054431915283202, 0.020864864349365235, 0.02075164794921875, 0.020679071426391603, 0.02086960029602051, 0.02083011245727539, 0.02067875289916992, 0.020690240859985352, 0.021141279220581056, 0.020970399856567384, 0.020924415588378906, 0.020766464233398438, 0.020850240707397463, 0.020693695068359375, 0.02068012809753418, 0.020782880783081055, 0.020773664474487304, 0.020709375381469726, 0.020759872436523438, 0.020713632583618163, 0.02080201530456543, 0.020850751876831056, 0.02088755226135254, 0.02082204818725586, 0.020641759872436525, 0.02070262336730957, 0.02123478317260742, 0.020628351211547852, 0.020714784622192384, 0.021002912521362306, 0.020819232940673827, 0.020740896224975585, 0.020764671325683593, 0.020684799194335936, 0.02072150421142578, 0.02077507209777832, 0.020977664947509765, 0.020699136734008788, 0.02072159957885742, 0.0207455997467041, 0.020986400604248046, 0.02075823974609375, 0.020933055877685548, 0.021004287719726563, 0.020989696502685548, 0.020854047775268555, 0.020827104568481445, 0.02084454345703125, 0.020784383773803712, 0.020730112075805666, 0.02076518440246582, 0.020752384185791017, 0.020891647338867187, 0.020840448379516603, 0.020780416488647462, 0.020832895278930664, 0.020809440612792968, 0.020986143112182616, 0.02112870407104492, 0.021078336715698243, 0.02099628829956055, 0.020867071151733398, 0.02089276885986328, 0.020861856460571288, 0.02106368064880371, 0.020776384353637694, 0.02059116744995117, 0.02083430480957031, 0.020959232330322267, 0.02104729652404785, 0.021045248031616212, 0.02088960075378418, 0.02091206359863281, 0.02088934326171875, 0.020614784240722658, 0.02079132843017578, 0.0209432315826416, 0.02077519989013672, 0.020793344497680662, 0.02086627197265625, 0.02077516746520996, 0.020699615478515624, 0.020607040405273436, 0.020788543701171874, 0.02087321662902832, 0.020675104141235353, 0.020934816360473632, 0.020744192123413087, 0.020840448379516603, 0.021341407775878906, 0.02100918388366699, 0.02104729652404785, 0.020938751220703124, 0.020770816802978515, 0.02088755226135254, 0.020899839401245117, 0.020719104766845704, 0.020644351959228514, 0.020686847686767578, 0.02071334457397461, 0.020723840713500977, 0.020731903076171874, 0.02069875144958496, 0.02056435203552246, 0.0206376953125, 0.020688543319702147, 0.020672319412231445, 0.020619327545166016, 0.020711904525756837, 0.020602432250976563, 0.020595136642456054, 0.020580320358276366, 0.020920352935791017, 0.02062335968017578, 0.02069708824157715, 0.020561920166015626, 0.020715520858764647, 0.02074825668334961, 0.020746271133422853, 0.020985183715820314, 0.02091667175292969, 0.020776735305786134, 0.02066067123413086, 0.020740095138549804, 0.02089574432373047, 0.020698400497436525, 0.02115043258666992, 0.020776960372924806, 0.02073788833618164, 0.020721824645996093, 0.020639328002929686, 0.020617631912231444, 0.020749343872070312, 0.02071446418762207, 0.02072902488708496, 0.020724544525146483, 0.02050662422180176, 0.02061516761779785, 0.02062678337097168, 0.020961599349975588, 0.02085103988647461, 0.020797439575195312, 0.02079897689819336, 0.02058041572570801, 0.020533088684082032, 0.02077142333984375, 0.020772863388061523, 0.020656032562255858, 0.020794912338256834, 0.020663871765136718, 0.020728832244873048, 0.020674560546875, 0.0214432315826416, 0.020753984451293946, 0.02093734359741211, 0.02085273551940918, 0.020625503540039062, 0.020671680450439454, 0.020637760162353514, 0.020702943801879883, 0.020517824172973632, 0.020406272888183592, 0.02062131118774414, 0.020633472442626952, 0.020557952880859376, 0.020576255798339844, 0.020570240020751952, 0.020600479125976564, 0.02056835174560547, 0.020625343322753908, 0.020725376129150392, 0.0206114559173584, 0.02098371124267578, 0.020750431060791014, 0.02066431999206543, 0.02085273551940918, 0.02075823974609375, 0.02098588752746582, 0.020660192489624023, 0.020846208572387694, 0.020607648849487306, 0.020502527236938475, 0.020537343978881836, 0.020570112228393556, 0.02060697555541992, 0.020636831283569336, 0.0207860164642334, 0.02061644744873047, 0.020645727157592775, 0.021133472442626953, 0.02355891227722168, 0.020766719818115235, 0.020844480514526368, 0.020801599502563477, 0.020936704635620116, 0.020824064254760744, 0.021236896514892578, 0.020951904296875, 0.020759647369384765, 0.020878240585327147, 0.020745920181274413, 0.02068921661376953, 0.020684799194335936, 0.02071347236633301, 0.021009439468383788, 0.020705663681030273, 0.020918880462646484, 0.02086297607421875, 0.020850624084472656, 0.02087660789489746, 0.020939168930053712, 0.020913631439208984, 0.020843391418457032, 0.020951040267944337, 0.021073408126831054, 0.021313983917236327, 0.020612384796142576, 0.020466400146484376, 0.02079539108276367, 0.020550912857055664, 0.020574975967407226, 0.020563968658447264, 0.020630687713623048, 0.0205832633972168, 0.02074972724914551, 0.020885663986206053, 0.020871007919311523, 0.020687328338623048, 0.020811519622802734, 0.020619647979736328, 0.020690944671630858, 0.020781055450439453, 0.020746240615844725, 0.020584096908569337, 0.02082441520690918, 0.020867071151733398, 0.020746015548706056, 0.020783103942871094, 0.020793567657470702, 0.021079744338989258, 0.02075270462036133, 0.020913951873779296, 0.020930784225463867, 0.020907840728759765, 0.020913408279418944, 0.020857791900634765, 0.020957183837890626, 0.020739839553833007, 0.020737312316894532, 0.02072060775756836, 0.020700927734375, 0.020658687591552736, 0.020778751373291014, 0.02089574432373047, 0.020784191131591797, 0.021006912231445313, 0.02078553581237793, 0.020813087463378906, 0.021063711166381834, 0.02094691276550293, 0.020836671829223632, 0.020973983764648436, 0.02083564758300781, 0.020910783767700194, 0.020977664947509765, 0.02082111930847168, 0.02087615966796875, 0.020813407897949218, 0.020939168930053712, 0.020914176940917968, 0.02086275291442871, 0.020902111053466798, 0.021147167205810547, 0.02088598442077637, 0.020930080413818358, 0.02091663932800293, 0.020925535202026366, 0.020876256942749024, 0.021313888549804687, 0.020952415466308594, 0.02087615966796875, 0.020754175186157228, 0.02085091209411621, 0.020873247146606447, 0.021042335510253907, 0.020948831558227538, 0.021246400833129883, 0.021717119216918945, 0.020935104370117186, 0.020882816314697267, 0.020822559356689453, 0.020768863677978516, 0.02083635139465332, 0.02089507293701172, 0.021246623992919923, 0.02286787223815918, 0.02082204818725586, 0.020903999328613282, 0.021062944412231447, 0.0207674560546875, 0.020771968841552736, 0.020779104232788087, 0.020766687393188477, 0.02082489585876465, 0.02082387161254883, 0.020893888473510744, 0.020928287506103517, 0.020934879302978517, 0.02142207908630371, 0.020891807556152345, 0.02080342483520508, 0.021106687545776368, 0.02085091209411621, 0.021115776062011718, 0.02099702453613281, 0.021000192642211913, 0.02102604866027832, 0.02085744094848633, 0.021137567520141603, 0.021154943466186522, 0.020976512908935548, 0.021007455825805665, 0.020872095108032226, 0.02105753517150879, 0.020942848205566408, 0.020887296676635744, 0.020693248748779296, 0.02087120056152344, 0.02094076728820801, 0.020797439575195312, 0.020809568405151368, 0.020782751083374025, 0.020922367095947265, 0.0210130558013916, 0.020817855834960937, 0.021006336212158205, 0.020908031463623047, 0.020960287094116212, 0.02111996841430664, 0.020829599380493165, 0.020912031173706054, 0.021600288391113283, 0.02191971206665039, 0.021456287384033202, 0.021770240783691407, 0.02111097526550293, 0.021241632461547852, 0.020988544464111327, 0.021008384704589843, 0.020919967651367187, 0.021356895446777345, 0.020965375900268556, 0.020729280471801757, 0.020835968017578126, 0.020811935424804688, 0.02118889617919922, 0.020863487243652345, 0.020934303283691405, 0.020996448516845703, 0.020969472885131835, 0.021405536651611327, 0.02273414421081543, 0.021066816329956054, 0.02100815963745117, 0.020942848205566408, 0.020991455078125, 0.020970016479492187, 0.02083635139465332, 0.020831232070922853, 0.02090291213989258, 0.020817920684814452, 0.020759584426879883, 0.020771808624267578, 0.02082611274719238, 0.02075667190551758, 0.02075628852844238, 0.020858272552490235, 0.020736608505249023, 0.020540800094604492, 0.02074678421020508, 0.02089174461364746, 0.020746015548706056, 0.020922880172729492, 0.020827295303344727, 0.020828704833984375, 0.020836383819580077, 0.02080486488342285, 0.020765472412109375, 0.020688863754272462, 0.021034080505371092, 0.02079158401489258, 0.020816255569458007, 0.02078950309753418, 0.02075391960144043, 0.020984319686889647, 0.02088140869140625, 0.020764671325683593, 0.02074575996398926, 0.02067673683166504, 0.02085513687133789, 0.020858688354492186, 0.020785600662231445, 0.02073574447631836, 0.020670175552368164, 0.02139139175415039, 0.020947391510009766, 0.020768192291259764, 0.020771392822265623, 0.02089776039123535, 0.020916255950927734, 0.02087936019897461, 0.020727807998657227, 0.020737503051757814, 0.02073382377624512, 0.02083452796936035, 0.02078704071044922, 0.0207093448638916, 0.020867071151733398, 0.020867712020874025, 0.020809728622436522, 0.020725759506225586, 0.020802688598632813, 0.020982080459594727, 0.020740671157836912, 0.02090943908691406, 0.02097011184692383, 0.020996095657348633, 0.02081292724609375, 0.020831104278564452, 0.020787200927734374, 0.020774784088134764, 0.02085286331176758, 0.020772159576416014, 0.02122003173828125, 0.020804800033569337, 0.02076483154296875, 0.02067020797729492, 0.02075331115722656, 0.020796607971191407, 0.02075116729736328, 0.020916383743286134, 0.02055561637878418, 0.02064588737487793, 0.020674240112304686, 0.02075270462036133, 0.020722976684570314, 0.02074697685241699, 0.020624576568603517, 0.020710208892822265, 0.020677663803100585, 0.02070185661315918, 0.02075270462036133, 0.020575551986694335, 0.02070547294616699, 0.020707263946533203, 0.02069766426086426, 0.020838367462158204, 0.020699167251586915, 0.020715520858764647, 0.020570112228393556, 0.020619104385375977, 0.020750112533569336, 0.02081830406188965, 0.02067251205444336, 0.020695039749145508, 0.0206376953125, 0.02064588737487793, 0.02149580764770508, 0.020746240615844725, 0.020969472885131835, 0.021106687545776368, 0.020751808166503908, 0.020789152145385743, 0.02081654357910156, 0.020957183837890626, 0.02094607925415039, 0.02080240058898926, 0.0208035831451416, 0.020874399185180664, 0.020980575561523437, 0.020791296005249024, 0.020794879913330077, 0.020845056533813477, 0.020812896728515624, 0.02086390495300293, 0.020647104263305665, 0.020904767990112306, 0.02066227149963379, 0.0207805118560791, 0.021006879806518556, 0.020579423904418945, 0.02113372802734375, 0.02072831916809082, 0.021011999130249023, 0.02086140823364258, 0.02071347236633301, 0.020753856658935546, 0.020791872024536133, 0.02086092758178711, 0.020684799194335936, 0.020932512283325197, 0.02084230422973633, 0.02091811180114746, 0.021336767196655275, 0.02071731185913086, 0.020737503051757814, 0.020836639404296874, 0.02085638427734375, 0.020887584686279298, 0.020914079666137696, 0.020986623764038086, 0.02082611274719238, 0.02088332748413086, 0.020985408782958983, 0.020768640518188476, 0.02106172752380371, 0.02139507293701172, 0.02092540740966797, 0.020783103942871094, 0.020882463455200194, 0.020716320037841796, 0.020547775268554686, 0.020594688415527345, 0.02072096061706543, 0.02095337677001953, 0.020541696548461913, 0.020846431732177734, 0.021004255294799806, 0.02103536033630371, 0.02068889617919922]",tokens/s,47.94961420412345,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-3B-v1,togethercomputer/RedPajama-INCITE-Base-3B-v1,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,888.983552,6266.159104,0.0,5863.636992,5744.700416,s,1,7.45699462890625,7.45699462890625,0.0,7.45699462890625,7.45699462890625,7.45699462890625,7.45699462890625,[7.45699462890625],,kWh,7.113759712501632e-06,7.638891259396855e-07,3.997225420021566e-06,1.1874874258462884e-05,,MB,1223.028736,6473.777152,0.0,6060.76928,6020.358144,s,10,4.848350708007812,0.48483507080078125,0.0022711373757931534,0.4853239288330078,0.48672204895019533,0.48673991546630857,0.4867542086791992,"[0.47902398681640623, 0.4825215148925781, 0.4853169250488281, 0.4847822265625, 0.4864744567871094, 0.48621688842773436, 0.4853309326171875, 0.48675778198242187, 0.48671807861328126, 0.4852079162597656]",tokens/s,528.0146083021094,kWh,1.410516786944653e-05,1.5555334130803751e-06,9.386184757619373e-06,2.504688604014628e-05,tokens/kWh,10220831.427494485,MB,1248.038912,6557.663232,0.0,6144.65536,6125.443072,s,10,18.058535888671873,1.8058535888671874,0.002565872375605868,1.8050317993164062,1.8087569702148436,1.8093953308105468,1.8099060192871095,"[1.8086151123046874, 1.81003369140625, 1.8023450927734375, 1.8039454345703125, 1.803730712890625, 1.8084869384765625, 1.8079364013671875, 1.80337890625, 1.80415625, 1.8059073486328125]",tokens/s,34.886549157908156,kWh,5.281517603180065e-05,5.825553445461517e-06,3.488962844078286e-05,9.353035791804504e-05,tokens/kWh,673578.0916737544,,s,630,18.05559309577944,0.02865967158060226,0.0002857781261842765,0.02862380790710449,0.028880179595947266,0.02894734869003296,0.029941834468841553,"[0.02987414360046387, 0.029070335388183592, 0.02874880027770996, 0.02859212875366211, 0.028434047698974608, 0.028285472869873048, 0.0285467529296875, 0.028294591903686522, 0.02846998405456543, 0.028456960678100586, 0.028485631942749022, 0.028190303802490234, 0.02842399978637695, 0.0285130558013916, 0.028560768127441405, 0.028376895904541014, 0.028442655563354492, 0.0283789119720459, 0.02846598434448242, 0.028456960678100586, 0.028422143936157225, 0.028300416946411132, 0.028474111557006836, 0.028401760101318358, 0.028577951431274413, 0.02968780708312988, 0.028512128829956053, 0.028636287689208985, 0.02877241516113281, 0.028635711669921876, 0.028574047088623048, 0.02864134407043457, 0.028640703201293947, 0.02883542442321777, 0.028651519775390624, 0.02869331169128418, 0.028765567779541014, 0.028710943222045898, 0.02885036849975586, 0.028879264831542968, 0.0288636474609375, 0.028842527389526366, 0.028782304763793946, 0.028803680419921877, 0.028729343414306642, 0.028857791900634765, 0.028658016204833985, 0.028661247253417968, 0.028562143325805665, 0.028680192947387696, 0.028794464111328126, 0.02883216094970703, 0.028850175857543944, 0.028841983795166014, 0.02886003112792969, 0.02894476890563965, 0.02891366386413574, 0.028942176818847656, 0.028847999572753906, 0.02874172782897949, 0.028864704132080077, 0.030020927429199217, 0.02866758346557617, 0.02977289581298828, 0.028886783599853517, 0.02853696060180664, 0.028480960845947267, 0.028389984130859375, 0.0284685115814209, 0.02846735954284668, 0.02843257522583008, 0.028346752166748045, 0.02845897674560547, 0.028415519714355467, 0.02848409652709961, 0.02833785629272461, 0.02847065544128418, 0.028543935775756837, 0.028572959899902342, 0.02859663963317871, 0.030325056076049805, 0.028355871200561523, 0.028483455657958984, 0.02865398406982422, 0.028565023422241213, 0.02852521514892578, 0.02859443283081055, 0.028598272323608398, 0.02848588752746582, 0.02857088088989258, 0.02881510353088379, 0.02862575912475586, 0.02852444839477539, 0.028692480087280273, 0.028694528579711914, 0.028901376724243165, 0.028800960540771484, 0.028844032287597656, 0.02878870391845703, 0.028576927185058595, 0.02852960014343262, 0.028763519287109376, 0.02903718376159668, 0.028933631896972657, 0.028776960372924806, 0.028655616760253907, 0.02872319984436035, 0.028831872940063476, 0.028790271759033204, 0.028823808670043947, 0.028659679412841796, 0.0285383358001709, 0.02858464050292969, 0.028620800018310546, 0.02876825523376465, 0.028654720306396483, 0.031232959747314454, 0.02856924819946289, 0.028687776565551756, 0.028786880493164062, 0.028775136947631837, 0.028733087539672852, 0.028698944091796876, 0.028708864212036132, 0.028880928039550783, 0.028853792190551758, 0.029952831268310547, 0.028876768112182618, 0.02850614356994629, 0.028387327194213868, 0.028104703903198244, 0.028362207412719727, 0.028475551605224608, 0.028211135864257813, 0.028406208038330077, 0.02836275291442871, 0.02849718475341797, 0.02842083168029785, 0.028501024246215822, 0.02842748832702637, 0.02849260711669922, 0.028357568740844726, 0.028366847991943358, 0.02830303955078125, 0.02840608024597168, 0.028598272323608398, 0.02854297637939453, 0.028428287506103517, 0.028474912643432618, 0.028434528350830077, 0.028607967376708985, 0.028595104217529296, 0.028434431076049805, 0.028554943084716795, 0.028508480072021485, 0.028491775512695314, 0.02856332778930664, 0.028538015365600584, 0.028781536102294922, 0.028831743240356447, 0.028862560272216797, 0.028833791732788085, 0.028731424331665038, 0.028678016662597658, 0.028548608779907225, 0.02843814468383789, 0.02853772735595703, 0.028590080261230468, 0.028708864212036132, 0.028719104766845704, 0.028708736419677736, 0.02864041519165039, 0.02859107208251953, 0.028700416564941406, 0.028700927734375, 0.028702592849731444, 0.028588159561157227, 0.02854812812805176, 0.0286278076171875, 0.028879072189331053, 0.028674047470092775, 0.028644447326660157, 0.028596927642822265, 0.028758144378662108, 0.028873920440673828, 0.02890015983581543, 0.02878463935852051, 0.028891008377075197, 0.02880240058898926, 0.029970943450927736, 0.028947967529296875, 0.028471807479858398, 0.0283504638671875, 0.028444671630859376, 0.028444671630859376, 0.028326976776123048, 0.028419200897216796, 0.02826630401611328, 0.028475391387939454, 0.02839257621765137, 0.02839139175415039, 0.028298080444335936, 0.02847350311279297, 0.028539840698242187, 0.02847433662414551, 0.028262527465820312, 0.02848912048339844, 0.028463584899902344, 0.028579296112060545, 0.02843497657775879, 0.028485727310180665, 0.028484832763671874, 0.028518239974975587, 0.028553951263427736, 0.028493440628051758, 0.028631679534912108, 0.028617919921875, 0.028564064025878907, 0.0285696964263916, 0.028571231842041016, 0.02855936050415039, 0.028867040634155273, 0.028927936553955078, 0.028833791732788085, 0.028823551177978517, 0.02861408042907715, 0.02873401641845703, 0.028821407318115236, 0.028805215835571288, 0.02861849594116211, 0.028535104751586913, 0.028591840744018555, 0.0287728328704834, 0.028638975143432617, 0.028601823806762697, 0.028629535675048827, 0.028633087158203126, 0.028672000885009766, 0.02865376091003418, 0.028595840454101563, 0.028739776611328125, 0.028726335525512695, 0.02885478401184082, 0.028692607879638673, 0.028706592559814455, 0.028719327926635743, 0.02876652717590332, 0.028762111663818358, 0.02879404830932617, 0.028775136947631837, 0.02893008041381836, 0.028855392456054688, 0.029849855422973633, 0.028813823699951172, 0.02853887939453125, 0.028420095443725587, 0.02839756774902344, 0.02841209602355957, 0.02831955146789551, 0.028438112258911134, 0.028196704864501952, 0.028445056915283203, 0.028439807891845702, 0.028416959762573243, 0.02845804786682129, 0.028375455856323242, 0.028506080627441407, 0.028436704635620116, 0.028356191635131835, 0.028563615798950195, 0.0284751033782959, 0.028502464294433594, 0.028382816314697266, 0.02851315116882324, 0.02842620849609375, 0.028516639709472658, 0.028542688369750976, 0.028606367111206055, 0.028715103149414063, 0.02859017562866211, 0.028554464340209963, 0.028596927642822265, 0.028590080261230468, 0.028610559463500978, 0.0287457275390625, 0.02876825523376465, 0.02898124885559082, 0.028891136169433593, 0.028703039169311523, 0.028714336395263673, 0.028801088333129884, 0.028690080642700195, 0.028661439895629883, 0.028656576156616213, 0.028575296401977538, 0.028801279067993166, 0.028663360595703125, 0.02872755241394043, 0.02866009521484375, 0.028602367401123048, 0.028862239837646485, 0.028657888412475584, 0.028721151351928712, 0.028717056274414062, 0.028733152389526367, 0.028676160812377928, 0.02868000030517578, 0.028674463272094726, 0.028656864166259767, 0.02879155158996582, 0.028667488098144532, 0.028663423538208006, 0.02871993637084961, 0.028717056274414062, 0.028864383697509766, 0.029855743408203125, 0.028882944107055664, 0.028519935607910156, 0.02844316864013672, 0.028362592697143554, 0.028358879089355468, 0.028454656600952147, 0.028381343841552734, 0.02855686378479004, 0.028451135635375976, 0.028341407775878905, 0.02921500778198242, 0.028365503311157225, 0.02837708854675293, 0.028488895416259766, 0.028380992889404297, 0.028500608444213867, 0.02869081687927246, 0.02822515106201172, 0.028512639999389647, 0.028558944702148436, 0.028579872131347658, 0.02847545623779297, 0.02859859275817871, 0.028631040573120117, 0.02860851287841797, 0.028618240356445314, 0.028604127883911132, 0.028693344116210936, 0.028577728271484373, 0.02855526351928711, 0.029050880432128907, 0.028661760330200195, 0.02904473686218262, 0.02907481575012207, 0.0288221435546875, 0.028721151351928712, 0.028646720886230468, 0.02876192092895508, 0.028691328048706055, 0.0287457275390625, 0.028688383102416993, 0.028757055282592772, 0.028617376327514647, 0.028663936614990233, 0.02892742347717285, 0.028834272384643554, 0.028905696868896484, 0.02886777687072754, 0.02879782485961914, 0.028805152893066406, 0.028790719985961916, 0.028851551055908205, 0.028920480728149414, 0.028884767532348633, 0.028889312744140624, 0.02874575996398926, 0.02880508804321289, 0.02867420768737793, 0.028949920654296874, 0.02887539291381836, 0.02891142463684082, 0.028938047409057616, 0.030078111648559572, 0.029108064651489258, 0.028747648239135743, 0.028458656311035155, 0.02830998420715332, 0.028434431076049805, 0.028499967575073244, 0.02851020812988281, 0.028401344299316407, 0.028383552551269533, 0.028344287872314453, 0.02840287971496582, 0.028539743423461914, 0.02854297637939453, 0.028399072647094726, 0.028455455780029296, 0.028299264907836914, 0.028563135147094725, 0.028447040557861326, 0.028452863693237306, 0.02835638427734375, 0.028545087814331054, 0.028500288009643555, 0.028548959732055665, 0.028497535705566405, 0.028372703552246095, 0.028503871917724608, 0.0284334716796875, 0.02972854423522949, 0.028649471282958985, 0.028625919342041017, 0.028572032928466797, 0.02867635154724121, 0.028759807586669923, 0.028924320220947267, 0.028904960632324218, 0.028704639434814452, 0.02874847984313965, 0.028594335556030272, 0.02858540725708008, 0.02987065505981445, 0.028573759078979494, 0.028995519638061525, 0.028628992080688476, 0.02867417526245117, 0.028659584045410157, 0.028669631958007813, 0.028651039123535157, 0.028601184844970703, 0.02889638328552246, 0.02877471923828125, 0.028948064804077148, 0.02886319923400879, 0.028911808013916015, 0.028929855346679686, 0.028782783508300783, 0.028721151351928712, 0.028729215621948242, 0.028843584060668944, 0.028777023315429688, 0.02885830307006836, 0.028833856582641603, 0.02885193634033203, 0.029914911270141602, 0.028858463287353517, 0.028428415298461913, 0.028253440856933595, 0.02812191963195801, 0.028429983139038086, 0.028589855194091796, 0.02848307228088379, 0.028328704833984374, 0.02843065643310547, 0.028422208786010743, 0.028477312088012696, 0.028485631942749022, 0.028427616119384765, 0.028324512481689452, 0.02841804885864258, 0.028424192428588867, 0.0285199031829834, 0.02840630340576172, 0.02852556800842285, 0.028431360244750976, 0.02855116844177246, 0.02847260856628418, 0.02843516731262207, 0.028576959609985353, 0.028576576232910156, 0.028720991134643555, 0.02857744026184082, 0.02851481628417969, 0.028624767303466796, 0.02859555244445801, 0.028613664627075194, 0.028745471954345705, 0.028789024353027343, 0.02903971290588379, 0.028959360122680664, 0.02862281608581543, 0.028608543395996094, 0.028703935623168947, 0.028656576156616213, 0.02859596824645996, 0.028610368728637696, 0.028555551528930665, 0.02868841552734375, 0.028622848510742187, 0.028614656448364258, 0.028741247177124025, 0.028651264190673827, 0.028631679534912108, 0.028681472778320314, 0.028619455337524413, 0.028704832077026367, 0.028803071975708007, 0.02873958396911621, 0.028590080261230468, 0.028594175338745118, 0.028725248336791992, 0.028901376724243165, 0.028819040298461916, 0.028770719528198242, 0.02881100845336914, 0.02874393653869629, 0.02879078483581543, 0.030087167739868165, 0.02875596809387207, 0.028536832809448243, 0.02843654441833496, 0.028278528213500978, 0.02837222480773926, 0.02824083137512207, 0.028430335998535155, 0.02857574462890625, 0.02850204849243164, 0.028327903747558593, 0.028450815200805665, 0.02845244789123535, 0.02850624084472656, 0.028393760681152343, 0.028374847412109376, 0.028272607803344726, 0.028507551193237304, 0.02849260711669922, 0.028534591674804686, 0.028379327774047853, 0.028535104751586913, 0.028437280654907228, 0.02861292839050293, 0.028574047088623048, 0.028454463958740236, 0.028524415969848633, 0.028492639541625977, 0.02859004783630371, 0.02854911994934082, 0.02854707145690918, 0.02882054328918457, 0.02868115234375, 0.02897305679321289, 0.02881295967102051, 0.02885446357727051, 0.028887199401855468, 0.028782655715942383, 0.028677183151245116, 0.028631839752197265, 0.02861270332336426, 0.02855449676513672, 0.028698879241943358, 0.028618240356445314, 0.02858857536315918, 0.028611040115356444, 0.02859555244445801, 0.02874844741821289, 0.02864454460144043, 0.0286625919342041, 0.028716127395629884, 0.028682336807250977, 0.028793664932250978, 0.028733440399169922, 0.028701984405517578, 0.02866236877441406, 0.02864905548095703, 0.028926496505737306, 0.028854272842407228, 0.028831520080566407, 0.0289256649017334, 0.028868768692016603, 0.028834144592285157, 0.02981372833251953, 0.028880096435546874, 0.02854764747619629, 0.028487808227539064, 0.028364063262939453, 0.02843280029296875, 0.028485952377319337, 0.028450815200805665, 0.0283275203704834, 0.028455263137817384, 0.028462560653686523, 0.02851443290710449, 0.028350944519042968, 0.02850201606750488, 0.028467199325561524, 0.028504064559936523, 0.02838105583190918, 0.02852672004699707, 0.028486656188964843, 0.02851036834716797, 0.028545888900756836, 0.02848307228088379, 0.028508384704589843, 0.028483200073242187, 0.028461727142333984, 0.028645376205444335, 0.028556768417358398, 0.02872153663635254, 0.02894659233093262, 0.028778656005859375, 0.02875119972229004, 0.028641471862792967, 0.028651584625244142, 0.02875008010864258, 0.02867203140258789, 0.02882294464111328, 0.02879955291748047, 0.028827648162841796, 0.028724639892578126, 0.028795488357543947, 0.028632383346557617, 0.029149343490600586, 0.028494367599487303, 0.02858736038208008, 0.028535455703735352, 0.028692480087280273, 0.02874982452392578, 0.028657663345336915, 0.028649248123168946, 0.028778560638427736, 0.02878879928588867, 0.028850271224975587, 0.02876767921447754, 0.02877907180786133, 0.028806943893432618, 0.028696640014648438, 0.028729503631591796, 0.02874982452392578, 0.028723295211791993, 0.028752832412719725, 0.02874880027770996, 0.028878816604614256, 0.028891136169433593]",tokens/s,34.89223514608697,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-8B,meta-llama/Meta-Llama-3-8B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1112, in __init__ self.lm_head = nn.Linear(config.hidden_size, config.vocab_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1002.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 532.12 MiB is free. Process 170593 has 14.22 GiB memory in use. Of the allocated memory 13.98 GiB is allocated by PyTorch, and 129.48 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-14B,Qwen/Qwen2-beta-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 270, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 85165 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 905.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-32B,Qwen/Qwen1.5-32B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 268.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 164.12 MiB is free. Process 73225 has 14.58 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 1.51 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-125m,EleutherAI/gpt-neo-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-564M,facebook/xglm-564M,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-14B,Qwen/Qwen-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-7b,huggyllama/llama-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.859328,13880.918016,0.0,13478.395904,13476.849152,s,1,7.33473486328125,7.33473486328125,0.0,7.33473486328125,7.33473486328125,7.33473486328125,7.33473486328125,[7.33473486328125],,kWh,8.746261008328322e-06,9.571777526443453e-07,4.518059169997257e-06,1.4221497930969926e-05,,MB,1208.328192,14113.701888,0.0,13700.694016,13671.637504,s,10,12.465203002929687,1.2465203002929688,0.004124346161829083,1.2461522216796874,1.2513182739257813,1.2517716247558595,1.2521343054199219,"[1.237404296875, 1.2436973876953126, 1.2450819091796874, 1.24515478515625, 1.2462760009765625, 1.2460284423828125, 1.247148193359375, 1.2522249755859376, 1.250969482421875, 1.251217529296875]",tokens/s,205.3717054907429,kWh,3.6381039097082826e-05,4.012331093909132e-06,2.4127130412799484e-05,6.452050060379144e-05,tokens/kWh,3967731.149081577,MB,1256.46848,14115.79904,0.0,13702.791168,13671.640064,s,10,37.689654296875005,3.7689654296875,0.00349155729303844,3.7675452880859375,3.7741384033203125,3.7743107788085934,3.7744486791992187,"[3.764998046875, 3.765468994140625, 3.767377685546875, 3.766267822265625, 3.767712890625, 3.766569091796875, 3.774483154296875, 3.769244873046875, 3.773431640625, 3.77410009765625]",tokens/s,16.71546241941083,kWh,0.00011014184260041703,1.2149533402413405e-05,7.336978091800062e-05,0.00019566115692083106,tokens/kWh,321985.216644156,,s,630,37.68689885330206,0.059820474370320634,0.00030086199764592673,0.059817808151245114,0.06011069374084473,0.06018684043884277,0.06111489501953125,"[0.061041374206542966, 0.05948416137695312, 0.059227489471435545, 0.05929846572875976, 0.0592828483581543, 0.0594417610168457, 0.05934908676147461, 0.05932380676269531, 0.05948627090454101, 0.05961580657958984, 0.059596641540527344, 0.05973948669433594, 0.05961299133300781, 0.059664798736572267, 0.059533790588378904, 0.05953875350952149, 0.059653984069824216, 0.059706207275390624, 0.05947903823852539, 0.0594442253112793, 0.059445343017578124, 0.059547550201416014, 0.05954870223999023, 0.05948720169067383, 0.059617279052734375, 0.05974425506591797, 0.059662303924560546, 0.05970537567138672, 0.05969276809692383, 0.05979900741577148, 0.059718463897705076, 0.059754528045654294, 0.0598263053894043, 0.059899742126464844, 0.0598364143371582, 0.05976473617553711, 0.05969036865234375, 0.059873184204101565, 0.0597061767578125, 0.05978307342529297, 0.05970934295654297, 0.05990409469604492, 0.059822078704833984, 0.059848705291748044, 0.059957248687744144, 0.05998387145996094, 0.05989548873901367, 0.05991872024536133, 0.05991740798950195, 0.05999494552612305, 0.05989174270629883, 0.05999411010742187, 0.059840511322021485, 0.05994291305541992, 0.059875328063964846, 0.059890846252441406, 0.060174625396728514, 0.0600945930480957, 0.06011654281616211, 0.06011379241943359, 0.0600656623840332, 0.06011068725585938, 0.060068126678466796, 0.06112598419189453, 0.059519550323486325, 0.05923209762573242, 0.05927088165283203, 0.05934710311889648, 0.05955807876586914, 0.05942911911010742, 0.05938143920898437, 0.059477886199951174, 0.059560447692871096, 0.059514976501464846, 0.05965619277954102, 0.05957804870605469, 0.05981430435180664, 0.059688545227050784, 0.05977734375, 0.059690559387207034, 0.059761089324951173, 0.05954969787597656, 0.05957827377319336, 0.05942486572265625, 0.05950892639160156, 0.05950444793701172, 0.059578369140625, 0.0596049919128418, 0.05972147369384766, 0.05957807922363281, 0.05969359970092773, 0.05964495849609375, 0.05982716751098633, 0.05978281784057617, 0.05986953735351563, 0.05986099243164063, 0.05987644958496094, 0.05980662536621094, 0.059830398559570314, 0.05967264175415039, 0.05990323257446289, 0.05978374481201172, 0.05976652908325195, 0.059783424377441406, 0.060071937561035155, 0.059908096313476565, 0.059824127197265625, 0.059735424041748045, 0.05977561569213867, 0.059813663482666014, 0.05988288116455078, 0.05989788818359375, 0.059988479614257816, 0.05992591857910156, 0.060015518188476565, 0.05985609436035156, 0.05994575881958008, 0.05986646270751953, 0.05992515182495117, 0.05992879867553711, 0.06002256011962891, 0.060055328369140626, 0.060215007781982424, 0.059963905334472656, 0.06005526351928711, 0.06002511978149414, 0.061005313873291014, 0.05955039978027344, 0.059289344787597655, 0.059351295471191404, 0.05920486450195313, 0.05940505599975586, 0.05937263870239258, 0.0593554573059082, 0.05944915390014648, 0.05953567886352539, 0.0595972785949707, 0.05954150390625, 0.059516960144042966, 0.059604961395263674, 0.05957632064819336, 0.059690784454345704, 0.05981721496582031, 0.05989884948730469, 0.05976063919067383, 0.059748382568359376, 0.05963983917236328, 0.05970899200439453, 0.05954207992553711, 0.059563838958740234, 0.059584510803222655, 0.05964396667480469, 0.05952710342407227, 0.059792896270751954, 0.05967923355102539, 0.05978316879272461, 0.05979750442504883, 0.05976681518554688, 0.05978470230102539, 0.05989785766601562, 0.059815711975097656, 0.05983916854858398, 0.05997568130493164, 0.05996035385131836, 0.059855838775634766, 0.05981561660766602, 0.05989961624145508, 0.059996768951416014, 0.05982316970825195, 0.05978412628173828, 0.06032179260253906, 0.0599920654296875, 0.0599101448059082, 0.059888992309570316, 0.05998659133911133, 0.060044734954833985, 0.06007251358032226, 0.060050559997558595, 0.05996540832519531, 0.060032161712646484, 0.05994041442871094, 0.05986323165893555, 0.06002483367919922, 0.06004956817626953, 0.06022332763671875, 0.060063838958740234, 0.05991414260864258, 0.05997568130493164, 0.06003683090209961, 0.06143532943725586, 0.059587230682373045, 0.059348926544189454, 0.05938079833984375, 0.059482398986816405, 0.05958623886108398, 0.059409374237060546, 0.059379711151123046, 0.0596234245300293, 0.059650047302246094, 0.05941657638549805, 0.059404289245605466, 0.059394046783447264, 0.05960217666625976, 0.059515647888183594, 0.059684864044189455, 0.05988880157470703, 0.05987728118896484, 0.05964048004150391, 0.05959724807739258, 0.059493377685546876, 0.05957923126220703, 0.05950848007202148, 0.059590240478515626, 0.059654335021972656, 0.05978774261474609, 0.059611137390136716, 0.05975449752807617, 0.0596580810546875, 0.05983833694458008, 0.059856895446777345, 0.05983273696899414, 0.05979673767089844, 0.059990367889404296, 0.05978134536743164, 0.05978937530517578, 0.059666431427001954, 0.059957248687744144, 0.05970534515380859, 0.05966201782226563, 0.059705665588378906, 0.05985686492919922, 0.05968694305419922, 0.05987523269653321, 0.05978489685058594, 0.05988803100585938, 0.059864959716796874, 0.059825920104980466, 0.05994643020629883, 0.060064704895019534, 0.06003302383422852, 0.060007713317871095, 0.05993667221069336, 0.06006425476074219, 0.05995756912231445, 0.05987942504882812, 0.05990195083618164, 0.06006719970703125, 0.05995280075073242, 0.06008127975463867, 0.06007513427734375, 0.06012979125976563, 0.06002521514892578, 0.061087745666503906, 0.059542625427246094, 0.05928236770629883, 0.05933580780029297, 0.05933961486816406, 0.059445247650146485, 0.059322368621826174, 0.059418174743652345, 0.05943545532226562, 0.059504638671875, 0.0594944953918457, 0.05961308670043945, 0.059584510803222655, 0.059697345733642576, 0.05970415878295898, 0.0599582405090332, 0.05974246215820313, 0.05971327972412109, 0.05958860778808594, 0.05952511978149414, 0.05951692962646484, 0.05961523056030273, 0.059545215606689454, 0.05950604629516602, 0.05996243286132812, 0.059684574127197264, 0.059601119995117184, 0.059678943634033206, 0.05969488143920899, 0.059822078704833984, 0.05974220657348633, 0.059827457427978514, 0.05998873519897461, 0.06003302383422852, 0.059873279571533204, 0.05985263824462891, 0.06004751968383789, 0.05991424179077148, 0.05989542388916016, 0.05983679962158203, 0.05978313446044922, 0.05991756820678711, 0.059723678588867186, 0.059730464935302735, 0.05990544128417969, 0.059929534912109374, 0.059850753784179686, 0.059870655059814454, 0.05994355010986328, 0.060093536376953124, 0.06007484817504883, 0.06006547164916992, 0.05990848159790039, 0.059950977325439456, 0.05986928176879883, 0.06021731185913086, 0.060001953125, 0.06025046539306641, 0.06009241485595703, 0.06007580947875977, 0.059995712280273436, 0.06010723114013672, 0.06004140853881836, 0.061257728576660155, 0.05964169692993164, 0.059248897552490236, 0.05925878524780273, 0.05935244750976563, 0.059453697204589845, 0.059401790618896486, 0.05936825561523437, 0.05952102279663086, 0.05955987167358399, 0.0594106559753418, 0.05962940979003906, 0.05952511978149414, 0.0596267204284668, 0.05964060974121094, 0.05974390411376953, 0.05989984130859375, 0.059867198944091794, 0.05975484848022461, 0.05966166305541992, 0.05956582260131836, 0.05967350387573242, 0.059671775817871094, 0.05949705505371094, 0.059494144439697264, 0.05964025497436523, 0.0596049919128418, 0.05965619277954102, 0.05981388854980469, 0.059815231323242186, 0.05984860610961914, 0.05981996917724609, 0.05990620803833008, 0.05996108627319336, 0.0597534065246582, 0.0598359375, 0.05979132843017578, 0.059904510498046876, 0.05978112030029297, 0.05978112030029297, 0.05977920150756836, 0.05989497756958008, 0.05985948944091797, 0.059878623962402344, 0.059810752868652346, 0.05994496154785156, 0.059875328063964846, 0.05996748733520508, 0.060152896881103514, 0.06013558578491211, 0.059853599548339846, 0.059863105773925784, 0.05983020782470703, 0.05993881607055664, 0.05984281539916992, 0.059929439544677734, 0.05989494323730469, 0.06003830337524414, 0.05990256118774414, 0.06004028701782226, 0.05997628784179688, 0.060150081634521485, 0.06008422470092773, 0.061479072570800784, 0.05981840133666992, 0.059478145599365234, 0.05956390380859375, 0.05943427276611328, 0.05960163116455078, 0.05953744125366211, 0.05954966354370117, 0.05944729614257813, 0.05950668716430664, 0.05948579025268555, 0.05959280014038086, 0.05970758438110352, 0.05976486587524414, 0.05969641494750977, 0.059815711975097656, 0.05998041534423828, 0.05990387344360352, 0.05965167999267578, 0.05962838363647461, 0.059616447448730465, 0.05979385757446289, 0.05969715118408203, 0.05967910385131836, 0.05983027267456055, 0.05982537460327148, 0.0598249282836914, 0.05975449752807617, 0.05977702331542969, 0.05997260665893555, 0.059894783020019535, 0.060073726654052736, 0.060088577270507815, 0.05996300888061523, 0.05974668884277344, 0.059829662322998044, 0.059644447326660154, 0.05989494323730469, 0.05985782241821289, 0.059830528259277344, 0.05978291320800781, 0.05986304092407226, 0.05973606491088867, 0.06005702209472656, 0.06044303894042969, 0.0601313591003418, 0.060020862579345705, 0.060047359466552735, 0.060211200714111325, 0.060450817108154295, 0.06014771270751953, 0.060290081024169925, 0.06001353454589844, 0.06009030532836914, 0.06023920059204101, 0.06018735885620117, 0.060028926849365234, 0.060252159118652344, 0.06008182525634766, 0.06006182479858398, 0.06029129409790039, 0.06012895965576172, 0.060348129272460936, 0.06145040130615234, 0.05973769760131836, 0.05935753631591797, 0.059502113342285154, 0.059368320465087894, 0.05949020767211914, 0.059594753265380856, 0.0595148811340332, 0.059404289245605466, 0.05949017715454102, 0.05939532852172852, 0.059493247985839846, 0.05963270568847656, 0.05966249465942383, 0.05957244873046875, 0.059744033813476566, 0.0598836784362793, 0.05990873718261719, 0.05972115325927734, 0.05959075164794922, 0.05951724624633789, 0.05973980712890625, 0.059798015594482425, 0.05969305419921875, 0.05971343994140625, 0.059684959411621094, 0.05969715118408203, 0.05972361755371094, 0.059846817016601564, 0.05993024063110351, 0.059945343017578125, 0.05997081756591797, 0.05985766220092773, 0.0598851203918457, 0.059728321075439454, 0.059713249206542966, 0.059797088623046876, 0.05976339340209961, 0.05972991943359375, 0.05975244903564453, 0.05997798538208008, 0.059919105529785154, 0.05998284912109375, 0.059915904998779294, 0.05980403137207031, 0.059892768859863284, 0.05987427139282227, 0.06001049423217773, 0.060071456909179685, 0.06011542510986328, 0.05998096084594726, 0.06004822540283203, 0.059947135925292966, 0.0600021743774414, 0.05988556671142578, 0.05996656036376953, 0.05995999908447266, 0.060143424987792966, 0.060002815246582034, 0.06011075210571289, 0.06007593536376953, 0.06019900894165039, 0.060114559173583985, 0.061517822265625, 0.05991526412963867, 0.05934592056274414, 0.059350143432617186, 0.059245441436767576, 0.059440513610839844, 0.059426464080810544, 0.05943190383911133, 0.05945923233032226, 0.059529441833496094, 0.05951702499389649, 0.05963779067993164, 0.059594753265380856, 0.059749534606933594, 0.0597053108215332, 0.05972876739501953, 0.0599384651184082, 0.06000387191772461, 0.05972665786743164, 0.05966233444213867, 0.05957020950317383, 0.05965388870239258, 0.059576446533203126, 0.05968905639648438, 0.05963289642333985, 0.059670654296875, 0.05959686279296875, 0.059654720306396486, 0.05969004821777344, 0.059805694580078124, 0.05979142379760742, 0.05983321762084961, 0.06001996612548828, 0.060033790588378905, 0.059854881286621094, 0.0600370864868164, 0.059834369659423826, 0.06001152038574219, 0.059808448791503904, 0.05987753677368164, 0.059988128662109376, 0.060065887451171876, 0.0601794548034668, 0.06000428771972656, 0.060670944213867185, 0.06003836822509766, 0.06001334381103516, 0.06015999984741211, 0.06018048095703125, 0.0603873291015625, 0.06014976119995117, 0.06015488052368164, 0.06001971054077149, 0.060104705810546874, 0.06018620681762695, 0.060080352783203124, 0.06015404891967773, 0.060184574127197264, 0.060032958984375, 0.06016825485229492, 0.060165855407714845, 0.06020915222167969, 0.0602578239440918, 0.06144985580444336, 0.0599315185546875, 0.06007580947875977, 0.05966780853271485, 0.05939904022216797, 0.05953279876708984, 0.059582847595214844, 0.059445247650146485, 0.05945561599731446, 0.05954764938354492, 0.05971775817871094, 0.0597125129699707, 0.059673473358154296, 0.059789310455322264, 0.05972991943359375, 0.05977251052856445, 0.059986335754394535, 0.05994694519042969, 0.05980271911621094, 0.059749343872070315, 0.05958041763305664, 0.05972172927856445, 0.05963145446777344, 0.0596841926574707, 0.059663169860839846, 0.059764801025390626, 0.059779041290283205, 0.059934688568115235, 0.05985599899291992, 0.05984156799316406, 0.05983216094970703, 0.059837825775146486, 0.05995174407958984, 0.06003497695922851, 0.05995119857788086, 0.0601099853515625, 0.059893695831298825, 0.059848670959472654, 0.05974233627319336, 0.059734848022460936, 0.05986431884765625, 0.05989868927001953, 0.05990188980102539, 0.06007350540161133, 0.05992700958251953, 0.06009369659423828, 0.060257022857666015, 0.060217342376708984, 0.06008214569091797, 0.06016175842285156, 0.060159809112548826, 0.06027065658569336, 0.06011743927001953, 0.060127391815185546, 0.05991193771362305, 0.06001264190673828, 0.06003926467895508, 0.060113822937011716, 0.060025856018066405, 0.06011904144287109, 0.05997903823852539, 0.06000918579101563, 0.06011222457885742]",tokens/s,16.716684555349172,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-180B,tiiuae/falcon-180B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 1248, in __init__ self.transformer = FalconModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 973, in __init__ self.h = nn.ModuleList([FalconDecoderLayer(config, layer_idx=i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 973, in self.h = nn.ModuleList([FalconDecoderLayer(config, layer_idx=i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 721, in __init__ self.self_attention = FALCON_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/falcon/modeling_falcon.py"", line 366, in __init__ self.query_key_value = FalconLinear(self.hidden_size, qkv_out_dim, bias=config.bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 450.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 424.12 MiB is free. Process 204110 has 14.32 GiB memory in use. Of the allocated memory 14.20 GiB is allocated by PyTorch, and 6.16 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neo,EleutherAI/gpt-neo-1.3B,EleutherAI/gpt-neo-1.3B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: GPTNeoForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,mistralai/Mistral-7B-v0.1,mistralai/Mistral-7B-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,869.834752,15021.768704,0.0,14619.246592,14483.4816,s,1,7.7386083984375,7.7386083984375,0.0,7.7386083984375,7.7386083984375,7.7386083984375,7.7386083984375,[7.7386083984375],,kWh,8.99147824169025e-06,9.842090478667755e-07,4.486670256001468e-06,1.4462357545558493e-05,,MB,1229.791232,15143.40352,0.0,14730.395648,14577.604608,s,10,13.176637329101561,1.3176637329101561,0.002744133154183554,1.3188666381835938,1.319790185546875,1.3199842041015626,1.3201394189453124,"[1.31066748046875, 1.3193587646484375, 1.32017822265625, 1.3191107177734376, 1.3190604248046875, 1.3197470703125, 1.3183209228515624, 1.31595947265625, 1.3155614013671875, 1.3186728515625]",tokens/s,194.28325573976707,kWh,3.835122724125161e-05,4.229670417937807e-06,2.543702034960016e-05,6.801791800878957e-05,tokens/kWh,3763714.143189719,MB,1278.263296,15158.083584,0.0,14745.075712,14577.607168,s,10,40.75353002929688,4.0753530029296865,0.001621132795300387,4.0749208984375,4.07738701171875,4.07786953125,4.078255546875,"[4.07527392578125, 4.074148681640625, 4.07456787109375, 4.075410888671875, 4.0743857421875, 4.073119140625, 4.07835205078125, 4.07727978515625, 4.0738330078125, 4.077158935546875]",tokens/s,15.458783559291824,kWh,0.0001190065864225009,1.3127354474380228e-05,7.904247990059982e-05,0.00021117642079748094,tokens/kWh,298328.7611471418,,s,630,40.74971666717533,0.06468208994789729,0.0002576383893258692,0.0646855354309082,0.06498955154418945,0.06506813354492187,0.06546254241943358,"[0.06556438446044922, 0.06451638031005859, 0.06424172973632812, 0.06430508422851562, 0.06432672119140626, 0.06431427001953124, 0.064204833984375, 0.06438642883300781, 0.06439794921875, 0.06447103881835937, 0.06447245025634765, 0.06444239807128906, 0.06437302398681641, 0.06438323211669922, 0.06500313568115235, 0.06475772857666015, 0.0645938262939453, 0.06455558776855469, 0.0642720947265625, 0.06459161376953125, 0.06451046752929687, 0.0645263671875, 0.06439116668701172, 0.06449065399169922, 0.0647279052734375, 0.06483270263671875, 0.06471558380126953, 0.06475126647949218, 0.0646884765625, 0.06472499084472656, 0.06475177764892578, 0.06482927703857422, 0.06469427490234375, 0.0645055389404297, 0.0646568603515625, 0.06472994995117187, 0.06470182037353515, 0.064582275390625, 0.06451964569091796, 0.06491190338134765, 0.06481510162353515, 0.06481919860839844, 0.06463488006591797, 0.06464717102050781, 0.06477005004882813, 0.06489087677001953, 0.0647240982055664, 0.06481584167480468, 0.064827392578125, 0.06493730926513672, 0.06475990295410156, 0.06498172760009766, 0.06456707000732421, 0.06483785247802734, 0.06475981140136719, 0.06510797119140625, 0.06493094635009766, 0.06477641296386719, 0.06493840026855469, 0.06512665557861329, 0.06498099517822266, 0.06486418914794922, 0.06495442962646485, 0.06547737884521485, 0.0644361572265625, 0.06423353576660157, 0.06428813171386719, 0.06417062377929687, 0.06427442932128906, 0.06414054107666016, 0.06420146942138671, 0.06422704315185547, 0.06436409759521484, 0.06463302612304687, 0.06461698913574218, 0.06436399841308593, 0.06458000183105468, 0.06471231842041016, 0.06479209899902344, 0.06461264038085937, 0.06448403167724609, 0.06440067291259766, 0.06465404510498046, 0.06448941040039062, 0.06443218994140625, 0.0643276824951172, 0.06437888336181641, 0.06441926574707031, 0.06461481475830078, 0.06465497589111328, 0.06475945281982422, 0.06451907348632813, 0.06501766204833985, 0.06497090911865235, 0.06486835479736328, 0.06461628723144532, 0.06465961456298829, 0.0645406723022461, 0.06470861053466796, 0.06445260620117188, 0.0644947509765625, 0.06453948974609375, 0.06478150177001953, 0.06465004730224609, 0.06475379180908203, 0.06477948760986328, 0.06495721435546875, 0.0646943359375, 0.06490681457519532, 0.06482710266113281, 0.06475244903564453, 0.06480435180664063, 0.06493596649169922, 0.06489107513427735, 0.06498918151855469, 0.06461571502685547, 0.06489513397216796, 0.06477078247070313, 0.0649332504272461, 0.06469904327392578, 0.06488006591796874, 0.06498060607910157, 0.06522499084472656, 0.06493593597412109, 0.0651061782836914, 0.06488921356201172, 0.06551702117919922, 0.06442809295654296, 0.06435676574707032, 0.06436438751220704, 0.06420713806152344, 0.06438329315185547, 0.0643165740966797, 0.06430499267578126, 0.0642768325805664, 0.06441641235351563, 0.0643048324584961, 0.0646843490600586, 0.06446089935302735, 0.06437673950195312, 0.06465878295898438, 0.06477849578857423, 0.06476636505126954, 0.06454886627197266, 0.06427033233642578, 0.06446694183349609, 0.06437996673583984, 0.06449862670898437, 0.06439730834960937, 0.06441574096679688, 0.06440755462646484, 0.06470451354980469, 0.06478028869628906, 0.0648392333984375, 0.06444281768798828, 0.06493996429443359, 0.06501177978515625, 0.06472207641601563, 0.0646090850830078, 0.06462640380859375, 0.0645450210571289, 0.06476134490966796, 0.06465388488769531, 0.06450176239013672, 0.06456320190429687, 0.06467305755615234, 0.06472918701171874, 0.06487840270996094, 0.06480057525634765, 0.06499027252197266, 0.06482733154296876, 0.06489907073974609, 0.06475132751464843, 0.06481743621826172, 0.06467292785644531, 0.06484419250488281, 0.06478070068359375, 0.06486428833007812, 0.06453862762451172, 0.06472294616699219, 0.06488448333740235, 0.0648563232421875, 0.06467948913574219, 0.06491324615478515, 0.06516796875, 0.06515302276611327, 0.0650035171508789, 0.06503424072265625, 0.06501580810546875, 0.06551958465576171, 0.06444985961914063, 0.06419446563720703, 0.06435311889648437, 0.06428578948974609, 0.06435107421875, 0.0642760009765625, 0.06425039672851562, 0.06427974700927734, 0.06443289947509766, 0.0645406723022461, 0.06463692474365235, 0.06469631958007813, 0.06444000244140625, 0.06470038604736328, 0.06475196838378906, 0.06463488006591797, 0.06467378997802735, 0.06447103881835937, 0.06455705261230468, 0.0644853744506836, 0.06480413055419922, 0.06448982238769531, 0.06454937744140625, 0.06450176239013672, 0.06467990112304688, 0.06476764678955078, 0.06474982452392578, 0.06446080017089843, 0.06488451385498047, 0.06479689788818359, 0.0647741470336914, 0.06460575866699218, 0.06457183837890625, 0.06447923278808594, 0.0647557144165039, 0.06470150756835938, 0.06468112182617188, 0.06447491455078125, 0.0644214096069336, 0.0646434555053711, 0.06487165069580078, 0.06474227142333984, 0.06473299407958985, 0.0647518081665039, 0.06497280120849609, 0.06480095672607422, 0.06485763549804688, 0.06471504211425781, 0.06487245178222656, 0.06467756652832031, 0.06504070281982421, 0.06483558654785156, 0.06493548583984375, 0.06489344024658203, 0.064910400390625, 0.06485286712646485, 0.06483558654785156, 0.06489702606201173, 0.06510361480712891, 0.06500787353515625, 0.0650662384033203, 0.06488054656982421, 0.06549062347412109, 0.06440386962890625, 0.06418163299560548, 0.06435190582275391, 0.06431843566894531, 0.0643376922607422, 0.06438524627685546, 0.06438070678710937, 0.06431731414794922, 0.06442797088623047, 0.064427490234375, 0.06464710235595703, 0.06443520355224609, 0.06440345764160156, 0.06461849975585937, 0.06495641326904297, 0.06455827331542968, 0.06461318206787109, 0.06430899047851563, 0.06453478240966797, 0.06442393493652344, 0.0645406723022461, 0.06441145324707032, 0.06437257385253906, 0.06467823791503906, 0.06463385772705078, 0.06484684753417969, 0.06472406768798829, 0.06452649688720703, 0.0647760009765625, 0.06478329467773437, 0.06473942565917969, 0.06457065582275391, 0.06466214752197266, 0.06460800170898437, 0.06476825714111328, 0.06457548522949219, 0.06460163116455078, 0.06448585510253907, 0.06461456298828125, 0.06462448120117187, 0.06478166198730469, 0.06468675231933593, 0.064753662109375, 0.064716796875, 0.06502809906005859, 0.06501171112060547, 0.06477593231201172, 0.0648911361694336, 0.06490930938720703, 0.06477823638916015, 0.06497814178466797, 0.06457218933105469, 0.06480095672607422, 0.06487785339355469, 0.06495696258544922, 0.06476390075683594, 0.0648826904296875, 0.06487859344482422, 0.06506700897216797, 0.0649175033569336, 0.06505677032470703, 0.06486630249023438, 0.06540083312988282, 0.06444236755371094, 0.06419251251220703, 0.06426435089111328, 0.06419033813476563, 0.06433379364013672, 0.06416393280029296, 0.06423331451416016, 0.06423763275146484, 0.06461542510986328, 0.06452851104736328, 0.06456204986572266, 0.06454886627197266, 0.06465331268310547, 0.06460771179199219, 0.06472067260742187, 0.06463155364990235, 0.064542236328125, 0.06433430480957031, 0.06463488006591797, 0.06446694183349609, 0.06443212890625, 0.06434758758544921, 0.06437126159667969, 0.06457062530517578, 0.06468479919433594, 0.0647741470336914, 0.06495549011230468, 0.06449654388427735, 0.0647518081665039, 0.06472275543212891, 0.0649912338256836, 0.06460006713867188, 0.06472022247314453, 0.06451631927490234, 0.06470211029052735, 0.06444246673583984, 0.06440415954589844, 0.06462464141845703, 0.06452153778076172, 0.06437709045410156, 0.06454521942138672, 0.06469535827636719, 0.06480563354492187, 0.06479408264160157, 0.06491849517822265, 0.06491110229492188, 0.06471644592285156, 0.06474076843261718, 0.0649389419555664, 0.06469017791748047, 0.064857666015625, 0.06462828826904297, 0.0646719970703125, 0.06477490997314453, 0.06501686096191406, 0.06470652770996094, 0.06490809631347656, 0.06502598571777343, 0.06512678527832032, 0.0649130859375, 0.06504454040527344, 0.06500556945800781, 0.0654694366455078, 0.06453440093994141, 0.06420889282226562, 0.06436249542236328, 0.0643028793334961, 0.06438236999511719, 0.06422406768798829, 0.06418022155761718, 0.06438201904296875, 0.0643675537109375, 0.06441574096679688, 0.06482125091552735, 0.06452838134765625, 0.06449356842041015, 0.06455490875244141, 0.06463225555419921, 0.0646869125366211, 0.06453641510009765, 0.06440067291259766, 0.06463155364990235, 0.06450163269042969, 0.06450595092773438, 0.06443827056884766, 0.06453008270263672, 0.06450415802001953, 0.06465638732910156, 0.06469939422607422, 0.06477823638916015, 0.06461440277099609, 0.06478185272216797, 0.06496473693847657, 0.06489116668701173, 0.06468819427490234, 0.06523632049560547, 0.06503081512451171, 0.06481919860839844, 0.06465535736083984, 0.06472704315185547, 0.06470217895507813, 0.06504271697998047, 0.06477619171142578, 0.06483148956298829, 0.06464326477050782, 0.06485485076904297, 0.06483209228515625, 0.06494454193115234, 0.06495654296875, 0.06488050842285156, 0.06482540893554688, 0.06505574035644532, 0.06482982635498047, 0.06507968139648437, 0.06475955200195313, 0.06490496063232422, 0.06476665496826171, 0.06496460723876953, 0.06491484832763672, 0.06502825927734375, 0.06495049285888672, 0.06506489562988281, 0.06497110748291016, 0.06509152221679687, 0.06512025451660156, 0.06548892974853515, 0.06444601440429687, 0.06424867248535156, 0.064372802734375, 0.06407961273193359, 0.06431136322021484, 0.0643544921875, 0.06434566497802735, 0.06431180572509766, 0.06448047637939452, 0.06461068725585938, 0.06457174682617188, 0.06449356842041015, 0.06460415649414063, 0.06461990356445313, 0.06486067199707031, 0.06459510040283203, 0.06447209930419921, 0.06435628509521485, 0.06462258911132812, 0.06447510528564453, 0.06458096313476562, 0.0645577621459961, 0.06444790649414063, 0.06460681915283203, 0.06472035217285156, 0.06470444488525391, 0.0648095703125, 0.06459129333496094, 0.064795166015625, 0.06495235443115234, 0.06493539428710937, 0.06460675048828125, 0.06471807861328124, 0.06468431854248047, 0.06488111877441406, 0.06460006713867188, 0.0645665283203125, 0.06468275451660156, 0.06478825378417968, 0.06481517028808594, 0.06486441802978515, 0.06461961364746094, 0.0651539535522461, 0.06478041839599609, 0.06495836639404297, 0.06483145904541016, 0.0648224334716797, 0.06468627166748046, 0.06495094299316406, 0.06487789154052734, 0.0648477783203125, 0.06468083190917968, 0.06498400115966797, 0.06474658966064453, 0.06496678161621093, 0.06480057525634765, 0.06517654418945312, 0.06500624084472656, 0.06525885009765625, 0.06498947143554687, 0.06508614349365234, 0.06505471801757813, 0.06544566345214843, 0.06442211151123046, 0.06417926025390625, 0.06428358459472656, 0.06433586883544921, 0.06436835479736328, 0.06444258880615235, 0.06426834869384766, 0.06401638031005859, 0.06460620880126954, 0.06451567840576172, 0.06454518127441407, 0.06428876495361328, 0.06431539154052734, 0.06467382049560547, 0.06474253082275391, 0.06453129577636718, 0.06452633666992187, 0.06429837036132813, 0.06452086639404297, 0.06444438171386718, 0.0646123504638672, 0.0644485092163086, 0.06440525054931641, 0.06464537811279297, 0.06467584228515624, 0.06459302520751953, 0.06481190490722656, 0.0644339828491211, 0.06483100891113282, 0.06490589141845703, 0.06469596862792969, 0.06438124847412109, 0.064563232421875, 0.06449049377441406, 0.06461746978759765, 0.06457548522949219, 0.06465299224853516, 0.06444064331054687, 0.06471206665039063, 0.06489289855957031, 0.06464374542236329, 0.06460211181640625, 0.064901123046875, 0.06496256256103515, 0.06503388977050781, 0.06486582183837891, 0.06484406280517578, 0.06470275115966796, 0.06486450958251953, 0.0647042236328125, 0.06492995452880859, 0.06465913391113282, 0.06473993682861329, 0.06477603149414063, 0.0651153564453125, 0.06478479766845703, 0.0649832992553711, 0.06497500610351563, 0.06534281921386718, 0.06491961669921875, 0.06495021057128907, 0.06500825500488282, 0.0654010238647461, 0.06436646270751953, 0.06428185272216796, 0.06436275482177735, 0.06417062377929687, 0.06424486541748047, 0.06431423950195313, 0.06426783752441406, 0.06432608032226562, 0.06438297271728516, 0.06451200103759766, 0.06462258911132812, 0.06444825744628906, 0.06457529449462891, 0.06485964965820312, 0.06500764465332032, 0.06469667053222657, 0.06456380462646484, 0.0644912338256836, 0.06464259338378907, 0.0645183334350586, 0.06450572967529297, 0.06454544067382813, 0.06462258911132812, 0.06434547424316406, 0.0646170883178711, 0.06470652770996094, 0.06471683502197266, 0.06471270751953125, 0.06477945709228515, 0.06476403045654297, 0.06488339233398438, 0.06481919860839844, 0.06482704162597656, 0.0646824951171875, 0.06475350189208984, 0.06459334564208985, 0.06461299133300781, 0.06467577362060548, 0.06464835357666016, 0.06460707092285156, 0.06481715393066406, 0.0646123504638672, 0.06495027160644531, 0.06473728179931641, 0.06506905364990234, 0.06494207763671875, 0.06502758026123047, 0.06483814239501953, 0.06479574584960937, 0.06492995452880859, 0.06523062133789062, 0.06476659393310547, 0.06487798309326172, 0.06484678649902344, 0.06495225524902344, 0.06488694763183593, 0.0649618911743164, 0.06483939361572266, 0.06510089874267579, 0.0649459228515625, 0.06503628540039062, 0.065115234375]",tokens/s,15.4602301936366,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x7B-v0.1,mistralai/Mixtral-8x7B-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 1195, in __init__ self.model = MixtralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in __init__ [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 757, in __init__ self.block_sparse_moe = MixtralSparseMoeBlock(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in __init__ self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 704, in self.experts = nn.ModuleList([MixtralBlockSparseTop2MLP(config) for _ in range(self.num_experts)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 672, in __init__ self.w3 = nn.Linear(self.hidden_dim, self.ffn_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 112.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 100.12 MiB is free. Process 180426 has 14.64 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 49.54 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen,Qwen/Qwen-72B,Qwen/Qwen-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 553, in from_pretrained model_class = get_class_from_dynamic_module( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 540, in get_class_from_dynamic_module final_module = get_cached_module_file( File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 365, in get_cached_module_file modules_needed = check_imports(resolved_module_file) File ""/usr/local/lib/python3.10/dist-packages/transformers/dynamic_module_utils.py"", line 197, in check_imports raise ImportError( ImportError: This modeling file requires the following packages that were not found in your environment: transformers_stream_generator. Run `pip install transformers_stream_generator` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-30b,facebook/opt-30b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2_moe,Qwen/Qwen1.5-MoE-A2.7B,Qwen/Qwen1.5-MoE-A2.7B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1262, in __init__ self.model = Qwen2MoeModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1030, in __init__ [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 1030, in [Qwen2MoeDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 797, in __init__ self.self_attn = QWEN2MOE_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2_moe/modeling_qwen2_moe.py"", line 402, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 20.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 12.12 MiB is free. Process 82151 has 14.73 GiB memory in use. Of the allocated memory 12.27 GiB is allocated by PyTorch, and 2.34 GiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.3b,EleutherAI/pythia-1.3b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.177728,3354.329088,0.0,2959.081472,2942.567424,s,1,7.613796875,7.613796875,0.0,7.613796875,7.613796875,7.613796875,7.613796875,[7.613796875],,kWh,1.060672593750193e-05,1.1526767979209956e-06,4.308059002006148e-06,1.6067461737429074e-05,,MB,1100.869632,3547.267072,0.0,3141.533696,3105.830912,s,10,2.598595458984375,0.2598595458984375,0.0010426096901951518,0.25929754638671876,0.2613603698730469,0.2614039855957031,0.2614388781738281,"[0.2589762268066406, 0.25927383422851563, 0.2591113586425781, 0.2612887878417969, 0.2588882141113281, 0.2593212585449219, 0.2588128356933594, 0.26144760131835937, 0.2601246643066406, 0.2613506774902344]",tokens/s,985.1475692951993,kWh,7.642937999786426e-06,8.428794512388161e-07,5.078194945743549e-06,1.3564012396768791e-05,tokens/kWh,18873471.397075996,MB,1122.267136,3589.210112,0.0,3183.476736,3163.057152,s,10,11.634897216796876,1.1634897216796873,0.01150969353260174,1.1660227661132812,1.177038916015625,1.1774024536132812,1.1776932836914062,"[1.1777659912109375, 1.1690953369140624, 1.1769581298828125, 1.1456146240234375, 1.141520751953125, 1.158644775390625, 1.1683118896484375, 1.163733642578125, 1.1710692138671874, 1.162182861328125]",tokens/s,54.147448684848904,kWh,3.4141401808964265e-05,3.763897673904306e-06,2.2511104903456173e-05,6.041640438632475e-05,tokens/kWh,1042763.1475245495,,s,630,11.63173612594604,0.018463073215787374,0.0004140495528841139,0.018481151580810547,0.01874390411376953,0.018877248573303224,0.020176229000091556,"[0.01915817642211914, 0.018645503997802734, 0.01864534378051758, 0.018683904647827147, 0.01863065528869629, 0.01859174346923828, 0.018743263244628907, 0.01859177589416504, 0.01863680076599121, 0.01860323143005371, 0.01851798439025879, 0.018459455490112305, 0.018545984268188476, 0.018516672134399413, 0.018518016815185546, 0.018530303955078126, 0.018343936920166014, 0.018617599487304688, 0.01888128089904785, 0.019720191955566405, 0.018764991760253907, 0.018629440307617186, 0.018517120361328125, 0.01843903923034668, 0.018597888946533202, 0.018564512252807617, 0.01841551971435547, 0.021264991760253905, 0.01870857620239258, 0.018677759170532226, 0.0184597110748291, 0.018441152572631837, 0.01844432067871094, 0.018483072280883788, 0.018518047332763674, 0.01846665573120117, 0.01838243293762207, 0.018802623748779296, 0.018848447799682616, 0.018628639221191408, 0.018695423126220703, 0.01856175994873047, 0.018448352813720703, 0.018704032897949217, 0.018551168441772462, 0.018622175216674804, 0.01847324752807617, 0.018397184371948243, 0.018577407836914063, 0.018581504821777343, 0.018597440719604494, 0.018526655197143554, 0.01868720054626465, 0.018505727767944336, 0.01845737648010254, 0.01865100860595703, 0.018757888793945313, 0.018592767715454102, 0.018566015243530274, 0.018669567108154296, 0.0188272647857666, 0.02052412796020508, 0.018760608673095702, 0.019173887252807616, 0.01856515121459961, 0.018584800720214845, 0.018772735595703124, 0.018481151580810547, 0.01867923164367676, 0.018670143127441405, 0.018517183303833007, 0.01865609550476074, 0.018343711853027345, 0.018409664154052735, 0.018737152099609376, 0.018492992401123048, 0.01833011245727539, 0.018454368591308595, 0.018607616424560547, 0.01874390411376953, 0.0188723201751709, 0.01862860870361328, 0.018597728729248048, 0.0183338565826416, 0.018350080490112306, 0.018353471755981444, 0.01835897636413574, 0.018587648391723634, 0.018322528839111327, 0.018250656127929688, 0.018601984024047852, 0.018689056396484376, 0.018640863418579103, 0.01863987159729004, 0.01855824089050293, 0.01849212837219238, 0.01848518371582031, 0.018372671127319336, 0.018610176086425782, 0.018583200454711915, 0.018686304092407225, 0.018759359359741212, 0.018594079971313477, 0.01859177589416504, 0.018491071701049806, 0.01811030387878418, 0.018466272354125977, 0.018508800506591795, 0.018609312057495116, 0.018506591796875, 0.018573312759399413, 0.018521600723266602, 0.01877452850341797, 0.018448383331298827, 0.018290176391601562, 0.01842243194580078, 0.019279712677001952, 0.01880825614929199, 0.018588224411010743, 0.018382623672485353, 0.018372352600097657, 0.018700767517089843, 0.018443391799926757, 0.018590591430664063, 0.018300928115844727, 0.018391040802001952, 0.01912214469909668, 0.018615968704223634, 0.018544288635253908, 0.01843846321105957, 0.01849081611633301, 0.01859270477294922, 0.0184770565032959, 0.018661376953125, 0.018464767456054687, 0.01859286308288574, 0.018313215255737304, 0.018231935501098633, 0.018313280105590822, 0.01870038414001465, 0.01880281639099121, 0.01875712013244629, 0.01857155227661133, 0.0184117431640625, 0.01826201629638672, 0.018356224060058594, 0.0184586238861084, 0.018534400939941405, 0.01839308738708496, 0.018525279998779298, 0.018576383590698242, 0.018540096282958985, 0.018446783065795898, 0.01880054473876953, 0.018499839782714845, 0.01836796760559082, 0.018321695327758788, 0.018495487213134765, 0.018573312759399413, 0.018684064865112305, 0.01864687919616699, 0.018534400939941405, 0.018464767456054687, 0.018461952209472655, 0.018471424102783202, 0.01841177558898926, 0.018421728134155272, 0.01835775947570801, 0.018788671493530272, 0.0192589111328125, 0.018747392654418944, 0.018756288528442383, 0.018497535705566406, 0.01860416030883789, 0.018743167877197264, 0.018602272033691407, 0.018791807174682616, 0.01875299263000488, 0.020423295974731446, 0.02287379264831543, 0.019982271194458008, 0.018472896575927735, 0.018618335723876955, 0.018572000503540038, 0.018523679733276368, 0.018452863693237304, 0.01832969665527344, 0.018447391510009764, 0.018688735961914064, 0.019151359558105468, 0.018565120697021483, 0.0186711368560791, 0.018683488845825196, 0.018404224395751952, 0.01847200012207031, 0.018675872802734375, 0.01859436798095703, 0.01855072021484375, 0.018579296112060547, 0.018532800674438476, 0.018745344161987306, 0.01865228843688965, 0.018468767166137694, 0.018283296585083007, 0.017977535247802736, 0.0180296630859375, 0.017764415740966797, 0.017879583358764647, 0.018177759170532225, 0.017926752090454103, 0.017930240631103517, 0.018165760040283203, 0.01825939178466797, 0.018391616821289064, 0.018549888610839844, 0.01827110481262207, 0.018323295593261717, 0.018049184799194335, 0.018135040283203126, 0.01815872001647949, 0.017953664779663085, 0.018306911468505858, 0.018048864364624023, 0.0178670711517334, 0.017969152450561524, 0.017928064346313475, 0.017834112167358397, 0.017920000076293945, 0.017885183334350584, 0.017870847702026366, 0.018112512588500978, 0.017946624755859376, 0.017923967361450195, 0.01800614356994629, 0.017890687942504882, 0.01804729652404785, 0.017918399810791016, 0.018187551498413085, 0.01793903923034668, 0.018024511337280273, 0.017971136093139647, 0.01816927909851074, 0.018047552108764648, 0.017889280319213868, 0.018025983810424806, 0.01786684799194336, 0.01809244728088379, 0.018081792831420897, 0.018470943450927733, 0.018034656524658202, 0.01807155227661133, 0.01803878402709961, 0.019248159408569335, 0.018264415740966797, 0.018201120376586916, 0.018126880645751953, 0.0180644474029541, 0.017988544464111328, 0.018008064270019532, 0.017983104705810545, 0.01795929527282715, 0.01816307258605957, 0.017906303405761718, 0.018077695846557617, 0.018044927597045898, 0.0182476806640625, 0.018198528289794923, 0.018542591094970702, 0.01819340705871582, 0.018042911529541017, 0.017951263427734374, 0.01801875114440918, 0.01817100715637207, 0.017941375732421876, 0.01800595283508301, 0.017991743087768554, 0.01801420783996582, 0.018089984893798827, 0.018304479598999022, 0.017986080169677735, 0.017942527770996093, 0.017890464782714843, 0.018221920013427734, 0.0179866886138916, 0.017883264541625976, 0.018486143112182617, 0.017899391174316406, 0.018054336547851563, 0.018550880432128908, 0.018064096450805665, 0.0184586238861084, 0.01919385528564453, 0.018245311737060548, 0.018097631454467772, 0.01823369598388672, 0.01802668762207031, 0.018104000091552733, 0.017971839904785156, 0.01803878402709961, 0.018059263229370116, 0.01799782371520996, 0.018000160217285156, 0.017947999954223633, 0.0178383674621582, 0.018153568267822266, 0.017989183425903322, 0.017989599227905273, 0.017948703765869142, 0.017990079879760743, 0.018013408660888672, 0.01821676826477051, 0.017945056915283204, 0.01791779136657715, 0.017948543548583985, 0.018207359313964843, 0.019017791748046874, 0.018353567123413086, 0.01842367935180664, 0.01818191909790039, 0.018381599426269532, 0.018128992080688477, 0.01809596824645996, 0.017976831436157227, 0.018232032775878905, 0.019179519653320314, 0.01912575912475586, 0.01806617546081543, 0.018194175720214843, 0.018440160751342773, 0.018519872665405272, 0.01822537612915039, 0.017967103958129883, 0.01800396728515625, 0.017928192138671875, 0.017874431610107423, 0.01815123176574707, 0.017820575714111327, 0.017900447845458984, 0.017869632720947267, 0.017979167938232423, 0.01790390396118164, 0.018062431335449217, 0.01773251152038574, 0.017835039138793946, 0.017943103790283202, 0.018127264022827147, 0.018128896713256838, 0.018380800247192384, 0.018563039779663087, 0.01864297676086426, 0.018546688079833985, 0.018666847229003906, 0.01852892875671387, 0.01871254348754883, 0.018462751388549806, 0.018625696182250975, 0.018572128295898438, 0.01861631965637207, 0.018713951110839844, 0.018639520645141603, 0.018643295288085938, 0.018576799392700197, 0.018433439254760743, 0.0186243839263916, 0.01866156768798828, 0.01849193572998047, 0.018604288101196288, 0.018542591094970702, 0.0186562557220459, 0.01860304069519043, 0.018597856521606445, 0.01869158363342285, 0.018481344223022462, 0.018732799530029296, 0.018628511428833008, 0.018681568145751955, 0.018477760314941406, 0.01870275115966797, 0.019372320175170897, 0.018633216857910157, 0.018700288772583007, 0.018638111114501952, 0.018627296447753905, 0.018667520523071288, 0.018702335357666015, 0.018638496398925782, 0.018542943954467775, 0.018540544509887694, 0.018548736572265623, 0.018481151580810547, 0.018522111892700196, 0.018528255462646484, 0.01848271942138672, 0.018483680725097658, 0.018583295822143554, 0.018630720138549803, 0.018600128173828126, 0.018509824752807616, 0.018382848739624022, 0.01844223976135254, 0.018425504684448243, 0.018441791534423827, 0.018500383377075196, 0.018597183227539064, 0.018356319427490234, 0.018641504287719726, 0.01863039970397949, 0.018530527114868165, 0.018608095169067383, 0.01853830337524414, 0.01844207954406738, 0.01869455909729004, 0.018479040145874023, 0.018626623153686524, 0.018547840118408203, 0.01845542335510254, 0.01845583915710449, 0.018614112854003908, 0.018520959854125975, 0.018308832168579103, 0.018440479278564452, 0.018630271911621095, 0.018813312530517576, 0.018699392318725586, 0.018529151916503905, 0.01860403251647949, 0.018302463531494142, 0.018612735748291014, 0.018563072204589845, 0.0186060791015625, 0.018599584579467775, 0.01863462448120117, 0.01857174491882324, 0.018333696365356447, 0.01819593620300293, 0.018275936126708983, 0.018099071502685547, 0.018028608322143556, 0.018503488540649413, 0.018728992462158204, 0.01857142448425293, 0.01909212875366211, 0.018731008529663085, 0.018849504470825194, 0.018647327423095703, 0.018636064529418947, 0.018555616378784178, 0.018554367065429688, 0.0186549129486084, 0.018440704345703125, 0.018395328521728517, 0.01872697639465332, 0.018593215942382814, 0.018815616607666015, 0.018366176605224608, 0.01814659118652344, 0.018371679306030272, 0.018470815658569336, 0.018675647735595702, 0.018245567321777345, 0.018237567901611327, 0.018718719482421875, 0.018937023162841796, 0.0185860481262207, 0.0185797119140625, 0.01857472038269043, 0.018526975631713866, 0.018696191787719727, 0.01840742492675781, 0.018647008895874024, 0.018501855850219726, 0.018554943084716797, 0.018552448272705076, 0.018368640899658204, 0.01794047927856445, 0.018092031478881835, 0.018190336227416993, 0.018163711547851562, 0.018231296539306642, 0.018569215774536133, 0.018777727127075195, 0.018545087814331056, 0.018450368881225587, 0.0186494083404541, 0.018482879638671876, 0.018374656677246092, 0.018577215194702148, 0.018309215545654296, 0.01807369613647461, 0.018247615814208983, 0.018144832611083985, 0.01804547119140625, 0.018030559539794922, 0.017956256866455078, 0.018500192642211914, 0.018366464614868162, 0.018257408142089843, 0.018102272033691406, 0.018548351287841797, 0.01874390411376953, 0.01872105598449707, 0.018513120651245118, 0.01847785568237305, 0.018448383331298827, 0.01924723243713379, 0.018618240356445312, 0.018518016815185546, 0.01878153610229492, 0.01866803169250488, 0.01847862434387207, 0.018487936019897462, 0.018589599609375, 0.018273439407348633, 0.01895315170288086, 0.018391040802001952, 0.018358272552490236, 0.018290592193603517, 0.018337888717651366, 0.018515968322753908, 0.01838489532470703, 0.01876201629638672, 0.018587360382080077, 0.01846886444091797, 0.018615743637084962, 0.018727615356445314, 0.019367040634155272, 0.02104944038391113, 0.01850828742980957, 0.01828700828552246, 0.01854745674133301, 0.01820159912109375, 0.01814246368408203, 0.018064128875732423, 0.018157567977905274, 0.018495487213134765, 0.01846790313720703, 0.018348415374755858, 0.018417823791503907, 0.018800735473632812, 0.01850809669494629, 0.018624128341674803, 0.018497087478637694, 0.018617055892944337, 0.018391136169433595, 0.018425247192382813, 0.01840140724182129, 0.01817033576965332, 0.018421695709228515, 0.018053184509277342, 0.018217023849487306, 0.018267168045043945, 0.01841417694091797, 0.018372928619384766, 0.018933759689331055, 0.018532352447509767, 0.018565120697021483, 0.020483840942382814, 0.018521888732910156, 0.01857174491882324, 0.018485248565673826, 0.018572639465332032, 0.018796575546264647, 0.018586240768432617, 0.018568479537963867, 0.018586080551147462, 0.01866326332092285, 0.018590112686157227, 0.019146751403808594, 0.01827993583679199, 0.01836435127258301, 0.018192703247070313, 0.01819878387451172, 0.01812665557861328, 0.018135232925415037, 0.018195520401000975, 0.018186464309692382, 0.018559711456298828, 0.018771968841552734, 0.018520063400268554, 0.018551040649414062, 0.01847475242614746, 0.018280448913574218, 0.018205759048461913, 0.01812371253967285, 0.01822431945800781, 0.018244224548339842, 0.01805913543701172, 0.01802272033691406, 0.01799897575378418, 0.018103168487548827, 0.01829478454589844, 0.018497055053710937, 0.01852422332763672, 0.018418079376220704, 0.018662527084350587, 0.01840937614440918, 0.01868079948425293, 0.01878131294250488, 0.018405248641967773, 0.0182609920501709, 0.01814233589172363, 0.018178943634033204, 0.01802422332763672, 0.018173152923583985, 0.018174976348876954, 0.018218271255493163, 0.01847983932495117, 0.018452159881591795, 0.01824515151977539, 0.0182523193359375, 0.01819264030456543, 0.01821696090698242, 0.01860812759399414, 0.018609792709350585, 0.018448095321655273, 0.018330400466918945, 0.018632064819335936, 0.0185533447265625, 0.018530303955078126, 0.018224735260009766, 0.01814352035522461, 0.01824492835998535, 0.018461503982543946, 0.01945315170288086, 0.019262239456176757, 0.020195104598999022, 0.020130016326904296, 0.018365472793579102, 0.018611072540283203, 0.018466047286987305]",tokens/s,54.16216402938389,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-1.4b,EleutherAI/pythia-1.4b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.2304,3354.329088,0.0,2959.081472,2942.567424,s,1,7.491158203125,7.491158203125,0.0,7.491158203125,7.491158203125,7.491158203125,7.491158203125,[7.491158203125],,kWh,1.0103033966659798e-05,1.1042954326628756e-06,3.3333359999998535e-06,1.4540665399322528e-05,,MB,1107.894272,3547.267072,0.0,3141.533696,3105.830912,s,10,2.592166152954102,0.2592166152954101,0.002362535045213905,0.26002906799316405,0.2611373199462891,0.26149653167724607,0.2617839010620117,"[0.25391856384277345, 0.25702288818359376, 0.2618557434082031, 0.2610574951171875, 0.2597601623535156, 0.259385498046875, 0.2569664611816406, 0.26088134765625, 0.26102001953125, 0.2602979736328125]",tokens/s,987.5910142112442,kWh,7.643231509936003e-06,8.42910826341281e-07,5.049056745794699e-06,1.353519908207198e-05,tokens/kWh,18913648.661369473,MB,1129.177088,3589.210112,0.0,3183.476736,3163.057152,s,10,11.573644653320313,1.1573644653320314,0.012999661095040372,1.1591688232421875,1.171823876953125,1.1718432128906249,1.171858681640625,"[1.1349251708984376, 1.157430419921875, 1.171819580078125, 1.16227685546875, 1.170641357421875, 1.1583616943359376, 1.1599759521484374, 1.1343333740234376, 1.171862548828125, 1.1520177001953125]",tokens/s,54.43401960844391,kWh,3.340594418672954e-05,3.684344683805461e-06,2.217671503480601e-05,5.926700390534102e-05,tokens/kWh,1062986.077390063,,s,630,11.570615110397329,0.018366055730789425,0.00036591905731029006,0.01838521671295166,0.01866156406402588,0.018822656536102295,0.019628646717071544,"[0.019042463302612306, 0.01836031913757324, 0.01830499267578125, 0.0184421443939209, 0.018424192428588868, 0.01829449653625488, 0.018257120132446288, 0.018156320571899413, 0.018214912414550782, 0.018104320526123048, 0.01807910346984863, 0.018102432250976564, 0.018143711090087892, 0.01790755271911621, 0.01811043167114258, 0.018022592544555665, 0.017938432693481447, 0.01787446403503418, 0.017916000366210938, 0.017910144805908204, 0.01790540885925293, 0.01791756820678711, 0.017947263717651367, 0.01801935958862305, 0.01811324882507324, 0.01823904037475586, 0.018235424041748046, 0.018119071960449217, 0.017932640075683594, 0.01790483283996582, 0.017777376174926758, 0.018077695846557617, 0.01789548873901367, 0.017817535400390626, 0.018032447814941406, 0.01786092758178711, 0.018283775329589844, 0.018236032485961916, 0.017978975296020508, 0.017807775497436524, 0.01796505546569824, 0.018069055557250975, 0.017938880920410155, 0.017820991516113282, 0.01771779251098633, 0.01772764778137207, 0.017755903244018555, 0.017724832534790038, 0.017830560684204102, 0.01794476890563965, 0.017924095153808595, 0.0180633602142334, 0.01803398323059082, 0.018307775497436524, 0.017885183334350584, 0.017766271591186523, 0.017846399307250977, 0.017735679626464843, 0.017821695327758787, 0.017844224929809572, 0.017846271514892577, 0.017692256927490234, 0.017715616226196287, 0.018753568649291993, 0.01827987289428711, 0.01861894416809082, 0.01860767936706543, 0.01799622344970703, 0.01802239990234375, 0.017897472381591797, 0.018244735717773436, 0.018377599716186525, 0.018298784255981446, 0.018563167572021484, 0.01846067237854004, 0.01862841606140137, 0.01852191925048828, 0.018616479873657228, 0.019132640838623045, 0.01857472038269043, 0.018590047836303712, 0.018362655639648437, 0.018466623306274414, 0.01822972869873047, 0.018202335357666015, 0.018193536758422852, 0.018361215591430665, 0.018420799255371094, 0.018432607650756837, 0.018398719787597655, 0.018540992736816406, 0.018415935516357423, 0.018357471466064455, 0.018239904403686523, 0.01835212707519531, 0.01821129608154297, 0.018447679519653322, 0.018277055740356447, 0.01850531196594238, 0.018547103881835936, 0.018513568878173826, 0.01842620849609375, 0.018391040802001952, 0.018096128463745118, 0.018147327423095702, 0.018128448486328125, 0.017983488082885742, 0.017975839614868164, 0.018061216354370118, 0.01816985511779785, 0.018534400939941405, 0.01827030372619629, 0.018267967224121093, 0.018118751525878905, 0.018316703796386717, 0.01836911964416504, 0.018263168334960937, 0.018295679092407226, 0.018195743560791015, 0.01836310386657715, 0.018679584503173828, 0.018495296478271483, 0.01849920082092285, 0.01835296058654785, 0.018407392501831054, 0.018617664337158203, 0.018977664947509767, 0.01842585563659668, 0.01863199996948242, 0.01872550392150879, 0.018660959243774415, 0.018445856094360353, 0.018371519088745118, 0.01845814323425293, 0.01851644706726074, 0.01852592086791992, 0.018432287216186522, 0.018499584197998048, 0.01827020835876465, 0.01845417594909668, 0.018589408874511718, 0.020296319961547852, 0.018468288421630858, 0.018520639419555663, 0.018448383331298827, 0.0188723201751709, 0.01883942413330078, 0.01887808036804199, 0.018550527572631835, 0.018292800903320312, 0.018526912689208985, 0.01870751953125, 0.018760639190673827, 0.01858355140686035, 0.01848320007324219, 0.01841152000427246, 0.01862041664123535, 0.018668991088867187, 0.018606655120849608, 0.018442176818847657, 0.01848531150817871, 0.018354175567626953, 0.018464799880981445, 0.018673631668090822, 0.01845020866394043, 0.018643167495727538, 0.018517311096191407, 0.018490047454833985, 0.018589696884155273, 0.018746912002563478, 0.018659807205200194, 0.018564895629882814, 0.019021472930908202, 0.01849158477783203, 0.01832383918762207, 0.019398656845092774, 0.018646240234375, 0.018524959564208986, 0.018452159881591795, 0.018476863861083985, 0.01843667221069336, 0.018437055587768553, 0.01850060844421387, 0.018656959533691408, 0.018554719924926757, 0.018518495559692382, 0.01842134475708008, 0.018454559326171877, 0.01854502487182617, 0.01898748779296875, 0.01831465530395508, 0.01843084716796875, 0.01834569549560547, 0.018262304306030274, 0.018667520523071288, 0.018528255462646484, 0.018464767456054687, 0.018549983978271484, 0.018475296020507813, 0.018487039566040038, 0.018509727478027344, 0.018385759353637697, 0.018321407318115233, 0.018323455810546875, 0.018282400131225587, 0.018275840759277344, 0.01862883186340332, 0.01838528060913086, 0.01845849609375, 0.018361600875854492, 0.01828748893737793, 0.01862041664123535, 0.018540191650390624, 0.018477407455444336, 0.018470624923706054, 0.018391328811645506, 0.0185031681060791, 0.01838515281677246, 0.018352575302124023, 0.018351295471191405, 0.018362079620361328, 0.018502527236938477, 0.01837059211730957, 0.01831078338623047, 0.0184036808013916, 0.018307104110717773, 0.018464767456054687, 0.018485248565673826, 0.018613344192504884, 0.018480031967163087, 0.01839468765258789, 0.018385343551635742, 0.018267711639404296, 0.018248191833496095, 0.01829875183105469, 0.01852422332763672, 0.018501216888427735, 0.018561279296875, 0.01858780860900879, 0.018509952545166016, 0.01857472038269043, 0.018446975708007813, 0.01824732780456543, 0.018339456558227538, 0.018562847137451172, 0.018637279510498046, 0.018485599517822266, 0.018296096801757814, 0.018459392547607423, 0.018411487579345704, 0.018601984024047852, 0.018497535705566406, 0.019444000244140624, 0.01851798439025879, 0.018350080490112306, 0.018497535705566406, 0.01854182434082031, 0.018313983917236328, 0.018036319732666017, 0.018487615585327147, 0.018700351715087892, 0.0186265926361084, 0.018485248565673826, 0.018339839935302735, 0.018298879623413086, 0.018343936920166014, 0.01884320068359375, 0.021878528594970702, 0.01910223960876465, 0.018391199111938476, 0.018306175231933595, 0.018385440826416015, 0.018406848907470703, 0.018444448471069335, 0.018238208770751954, 0.01816160011291504, 0.018354240417480468, 0.01863862419128418, 0.0186144962310791, 0.018505727767944336, 0.018526111602783203, 0.018530559539794923, 0.018302175521850587, 0.01843222427368164, 0.018288415908813478, 0.018571264266967775, 0.018363008499145506, 0.018339839935302735, 0.01839427185058594, 0.01830179214477539, 0.01836358451843262, 0.01827734375, 0.018232799530029296, 0.01799942398071289, 0.018305856704711913, 0.018720640182495116, 0.01849888038635254, 0.018596511840820312, 0.01831747245788574, 0.018255872726440428, 0.018374656677246092, 0.018593631744384765, 0.019454111099243165, 0.018333311080932616, 0.018205055236816405, 0.01838809585571289, 0.018322303771972658, 0.018411104202270507, 0.02052751922607422, 0.020413984298706056, 0.01836079978942871, 0.018224672317504884, 0.01853228759765625, 0.018788896560668945, 0.018505727767944336, 0.01930905532836914, 0.018534175872802733, 0.01869238471984863, 0.018627904891967775, 0.018438848495483398, 0.01846067237854004, 0.018415615081787108, 0.018350048065185545, 0.01837171173095703, 0.018489343643188477, 0.018232000350952147, 0.01805948829650879, 0.018159616470336915, 0.018274303436279296, 0.01821059226989746, 0.01802579116821289, 0.0181844482421875, 0.01802511978149414, 0.01799577522277832, 0.018100223541259765, 0.0188272647857666, 0.01862403106689453, 0.01832803153991699, 0.018466144561767577, 0.01845305633544922, 0.018346080780029295, 0.01827769660949707, 0.018459327697753908, 0.01829680061340332, 0.01822313690185547, 0.018208736419677733, 0.018706464767456056, 0.018149375915527344, 0.017920000076293945, 0.018251775741577148, 0.018324575424194335, 0.018092960357666017, 0.01799081611633301, 0.01868067169189453, 0.018679359436035155, 0.01841596794128418, 0.018301023483276366, 0.01823315238952637, 0.018159807205200194, 0.01814873504638672, 0.01828438377380371, 0.01860585594177246, 0.018397184371948243, 0.018389055252075195, 0.018400127410888673, 0.018439807891845704, 0.018366783142089844, 0.018437503814697265, 0.018399744033813475, 0.018290943145751953, 0.01842790412902832, 0.018538463592529298, 0.018685983657836913, 0.018671615600585938, 0.018511199951171876, 0.01880950355529785, 0.01843596839904785, 0.0184586238861084, 0.01918976020812988, 0.018634752273559572, 0.018579456329345705, 0.018605535507202148, 0.018591999053955078, 0.018409631729125978, 0.018386592864990236, 0.018292671203613282, 0.01850339126586914, 0.018397216796875, 0.018489952087402343, 0.01849718475341797, 0.018430496215820314, 0.018323392868041993, 0.018409408569335938, 0.018434175491333006, 0.018298336029052734, 0.018311616897583007, 0.018255392074584962, 0.018534400939941405, 0.01845510482788086, 0.018315263748168945, 0.018487295150756835, 0.01845452880859375, 0.01826767921447754, 0.018452384948730468, 0.018260543823242187, 0.018546367645263673, 0.018382783889770507, 0.018680160522460937, 0.018450464248657227, 0.01835612869262695, 0.018667007446289064, 0.018256128311157225, 0.01845180892944336, 0.018276704788208007, 0.018393760681152345, 0.01830019187927246, 0.018598623275756836, 0.01845625686645508, 0.018633024215698242, 0.01845846366882324, 0.018409151077270508, 0.018391519546508788, 0.018288639068603514, 0.01816166305541992, 0.018440000534057616, 0.01828883171081543, 0.018359487533569335, 0.018494144439697265, 0.01823139190673828, 0.018257951736450194, 0.018069503784179687, 0.01821900749206543, 0.018501119613647463, 0.018438432693481447, 0.018378976821899415, 0.018468320846557616, 0.01806153678894043, 0.018140832901000978, 0.018532991409301758, 0.01826806449890137, 0.018147455215454102, 0.018881759643554687, 0.018193183898925783, 0.018026208877563475, 0.017942815780639648, 0.01794767951965332, 0.017914623260498048, 0.01784649658203125, 0.017958911895751953, 0.01791328048706055, 0.01780588722229004, 0.017952127456665037, 0.017985439300537108, 0.017879776000976563, 0.017870847702026366, 0.017757568359375, 0.017737855911254884, 0.017850879669189454, 0.017756160736083985, 0.017698816299438477, 0.017752128601074217, 0.017880159378051756, 0.017840991973876952, 0.017987583160400392, 0.01795686340332031, 0.018095392227172852, 0.017965791702270507, 0.01801603126525879, 0.017860416412353516, 0.01787487983703613, 0.01782831954956055, 0.017874752044677734, 0.017833759307861328, 0.01780940818786621, 0.017811168670654298, 0.017853120803833007, 0.018271615982055664, 0.017862783432006837, 0.017779199600219727, 0.017893375396728514, 0.017838048934936523, 0.018616352081298828, 0.017977344512939454, 0.017778688430786133, 0.017880607604980468, 0.018086368560791016, 0.018020191192626954, 0.017946752548217773, 0.018068960189819336, 0.017898048400878906, 0.017837503433227538, 0.017797760009765625, 0.017874591827392577, 0.017844224929809572, 0.0179931526184082, 0.017858528137207032, 0.018099071502685547, 0.01788876724243164, 0.01828096008300781, 0.01885798454284668, 0.018491167068481446, 0.018548959732055663, 0.019050495147705078, 0.018513343811035157, 0.0191362247467041, 0.01850192070007324, 0.018522111892700196, 0.01847500801086426, 0.018773792266845703, 0.01969993591308594, 0.020068351745605468, 0.01838057518005371, 0.018460895538330076, 0.01841766357421875, 0.018722623825073243, 0.018651296615600586, 0.018711904525756835, 0.018463424682617188, 0.018406496047973633, 0.01837148857116699, 0.018655231475830078, 0.018558048248291017, 0.018688896179199218, 0.01840480041503906, 0.018535007476806642, 0.018667327880859376, 0.018558464050292968, 0.018342592239379882, 0.018448383331298827, 0.01858121681213379, 0.018474720001220704, 0.01864147186279297, 0.018817024230957033, 0.018400415420532227, 0.018483999252319337, 0.018851903915405272, 0.01901705551147461, 0.01849616050720215, 0.018757631301879883, 0.01883456039428711, 0.018652032852172852, 0.018683904647827147, 0.01869331169128418, 0.018473472595214844, 0.018624832153320312, 0.018469087600708006, 0.0186345272064209, 0.018871871948242188, 0.01842118453979492, 0.01838387107849121, 0.01846886444091797, 0.01839923286437988, 0.018547775268554688, 0.01848201560974121, 0.018597984313964845, 0.018364032745361327, 0.018212287902832032, 0.01813190460205078, 0.018522111892700196, 0.01869593620300293, 0.018658784866333007, 0.018496288299560546, 0.01846272087097168, 0.018476800918579103, 0.018425952911376952, 0.018315391540527345, 0.018413183212280273, 0.019150848388671874, 0.01862041664123535, 0.018515968322753908, 0.018480703353881835, 0.018506175994873048, 0.01822425651550293, 0.018183040618896484, 0.018092031478881835, 0.01803638458251953, 0.01822960090637207, 0.018288639068603514, 0.01832111930847168, 0.018357984542846678, 0.018182527542114257, 0.018198720932006834, 0.018592992782592774, 0.01856105613708496, 0.01827302360534668, 0.019986431121826173, 0.018534400939941405, 0.018386943817138672, 0.01846067237854004, 0.018449951171875, 0.018217439651489257, 0.018054239273071288, 0.018031520843505858, 0.018033727645874024, 0.018011072158813476, 0.018120704650878908, 0.018182144165039063, 0.018096128463745118, 0.018128896713256838, 0.017919647216796876, 0.01848512077331543, 0.018495071411132814, 0.01829158401489258, 0.018335744857788085, 0.01845180892944336, 0.018164384841918946, 0.018201759338378906, 0.01823174476623535, 0.018198623657226562, 0.017982879638671876, 0.01789139175415039, 0.01803664016723633, 0.017882047653198244, 0.01792527961730957, 0.01786556816101074, 0.018120704650878908, 0.018124799728393554, 0.017960960388183594, 0.01781273651123047, 0.018192352294921874, 0.01858639907836914, 0.018356224060058594, 0.01845043182373047, 0.018343936920166014, 0.018388992309570314, 0.018276031494140626, 0.018383167266845704, 0.018312416076660155, 0.0182906551361084, 0.018258752822875975]",tokens/s,54.448272109050016,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-34B,01-ai/Yi-34B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 280.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 42.12 MiB is free. Process 22574 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 1.64 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-12b,EleutherAI/pythia-12b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 718, in __init__ self.dense_4h_to_h = nn.Linear(config.intermediate_size, config.hidden_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 200.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 176.12 MiB is free. Process 46952 has 14.57 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 4.94 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-4.5B,facebook/xglm-4.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,xglm,facebook/xglm-7.5B,facebook/xglm-7.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: XGLMForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-14B,Qwen/Qwen1.5-14B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 270, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 134.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 44.12 MiB is free. Process 70148 has 14.70 GiB memory in use. Of the allocated memory 14.58 GiB is allocated by PyTorch, and 905.50 KiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-70b-hf,meta-llama/Llama-2-70b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 234.12 MiB is free. Process 167521 has 14.51 GiB memory in use. Of the allocated memory 14.39 GiB is allocated by PyTorch, and 1.74 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,Deci/DeciCoder-1b,Deci/DeciCoder-1b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 248, in __init__ self.model = DeciCoderModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in __init__ self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 215, in self.layers = nn.ModuleList([DeciCoderDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 181, in __init__ self.self_attn = DeciCoderAttention(config=config) File ""/root/.cache/huggingface/modules/transformers_modules/Deci/DeciCoder-1b/d045c14763eab7225fe79a6bc309890fda7b1483/modeling_decicoder.py"", line 54, in __init__ self._init_rope() File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1729, in __getattr__ raise AttributeError(f""'{type(self).__name__}' object has no attribute '{name}'"") AttributeError: 'DeciCoderAttention' object has no attribute '_init_rope' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Meta-Llama-3-70B,meta-llama/Meta-Llama-3-70B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 290, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 448.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 362.12 MiB is free. Process 173736 has 14.38 GiB memory in use. Of the allocated memory 14.27 GiB is allocated by PyTorch, and 1.78 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,togethercomputer/RedPajama-INCITE-Base-7B-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,887.947264,14333.902848,0.0,13931.380736,13915.964416,s,1,7.45382275390625,7.45382275390625,0.0,7.45382275390625,7.45382275390625,7.45382275390625,7.45382275390625,[7.45382275390625],,kWh,9.367333849968417e-06,1.0257979931460827e-06,4.785559383990723e-06,1.5178691227105223e-05,,MB,1248.129024,14749.138944,0.0,14336.131072,14287.960064,s,10,12.18843566894531,1.218843566894531,0.005143965134939329,1.2207818603515626,1.2232420654296876,1.2236216430664062,1.2239253051757812,"[1.2068980712890625, 1.2126597900390625, 1.216154296875, 1.2195396728515624, 1.2210064697265626, 1.224001220703125, 1.222531494140625, 1.2205572509765625, 1.2219296875, 1.22315771484375]",tokens/s,210.03515705650207,kWh,3.556225262916541e-05,3.9220533323976504e-06,2.3661796707200032e-05,6.31461026687631e-05,tokens/kWh,4054090.263382751,MB,1273.454592,14916.911104,0.0,14503.903232,14466.632704,s,10,38.20714453125,3.8207144531249995,0.0035915956113266568,3.82033154296875,3.8246840576171874,3.8258331176757814,3.826752365722656,"[3.817904541015625, 3.813291748046875, 3.8196650390625, 3.820501953125, 3.822408447265625, 3.8201611328125, 3.8188349609375, 3.8244287109375, 3.826982177734375, 3.8229658203125]",tokens/s,16.489062653837344,kWh,0.0001117688011883441,1.2327662865341871e-05,7.429297610099804e-05,0.00019838944015468404,tokens/kWh,317557.2245724317,,s,630,38.20337556457521,0.060640278673928884,0.00033924910267618235,0.06065254402160644,0.06107974357604981,0.06115693302154541,0.06127070381164551,"[0.06105459213256836, 0.06021590423583984, 0.060117374420166014, 0.06004121780395508, 0.06011843109130859, 0.06011145782470703, 0.06012313461303711, 0.06007603073120117, 0.06013542556762695, 0.060200958251953124, 0.06021529769897461, 0.06023372650146484, 0.06030099105834961, 0.06029657745361328, 0.060152767181396484, 0.06021529769897461, 0.06042534255981445, 0.060409950256347655, 0.06034707260131836, 0.06029935836791992, 0.06025830459594726, 0.0603422737121582, 0.060243839263916014, 0.06039302444458008, 0.060426815032958985, 0.060606464385986325, 0.060429534912109374, 0.06050406265258789, 0.06059292984008789, 0.06061056137084961, 0.060604480743408205, 0.06058115386962891, 0.06059222412109375, 0.060684608459472655, 0.06076559829711914, 0.060697441101074216, 0.06057369613647461, 0.06064643096923828, 0.06068524932861328, 0.060657470703125, 0.06070841598510742, 0.060803329467773434, 0.060801441192626954, 0.060727294921875, 0.06072652816772461, 0.06087260818481445, 0.06095491027832031, 0.06088963317871094, 0.06091500854492188, 0.06114377593994141, 0.061007808685302735, 0.06098940658569336, 0.06085846328735352, 0.06095443344116211, 0.06093628692626953, 0.06089532852172851, 0.060870655059814455, 0.06104883193969726, 0.061077503204345705, 0.06120243072509766, 0.06104678344726563, 0.0610689926147461, 0.06107955169677735, 0.06113488006591797, 0.06025417709350586, 0.05996748733520508, 0.059979774475097655, 0.05991219329833984, 0.05989734268188476, 0.059953662872314455, 0.05997475051879883, 0.060041664123535156, 0.06019465637207031, 0.06024051284790039, 0.06020505523681641, 0.06010835266113281, 0.0602751350402832, 0.06030131149291992, 0.0604139518737793, 0.060418048858642576, 0.060385280609130856, 0.060288032531738284, 0.06028134536743164, 0.060211681365966795, 0.06022665786743164, 0.06012377548217773, 0.060171585083007816, 0.06027772903442383, 0.060393470764160156, 0.06053273773193359, 0.06062451171875, 0.06047574234008789, 0.060561439514160154, 0.06062492752075195, 0.060639198303222654, 0.06067359924316406, 0.06073731231689453, 0.06059228897094727, 0.0606929931640625, 0.06071705627441406, 0.06072073745727539, 0.060555679321289066, 0.06064332962036133, 0.06048972702026367, 0.0605615348815918, 0.06057561492919922, 0.0606453742980957, 0.0607006721496582, 0.0607825927734375, 0.06077439880371094, 0.0607652816772461, 0.06075484848022461, 0.060919807434082034, 0.060919807434082034, 0.06092595291137695, 0.060851295471191405, 0.06084710311889648, 0.06077635192871094, 0.060724254608154296, 0.060804065704345704, 0.0609233283996582, 0.06084438323974609, 0.06098281478881836, 0.06092444610595703, 0.061007232666015626, 0.061071807861328126, 0.06127123260498047, 0.060276863098144534, 0.059924320220947264, 0.05998678588867187, 0.059873279571533204, 0.06014771270751953, 0.0600370864868164, 0.060010528564453124, 0.060181854248046875, 0.060248737335205076, 0.06027987289428711, 0.06031660842895508, 0.06025791931152344, 0.06026633453369141, 0.060120769500732425, 0.060299518585205075, 0.06044246292114258, 0.060478206634521484, 0.06028239822387695, 0.06034268951416016, 0.06026412963867187, 0.06036313629150391, 0.060217342376708984, 0.06023987197875977, 0.06045840072631836, 0.060561729431152345, 0.060578079223632814, 0.0605797119140625, 0.06062911987304687, 0.060740673065185546, 0.060668094635009766, 0.0606416015625, 0.060691967010498046, 0.06088800048828125, 0.06072934341430664, 0.06066175842285156, 0.06071705627441406, 0.06077849578857422, 0.06050585556030273, 0.060571903228759764, 0.06060227203369141, 0.06077356719970703, 0.06072208023071289, 0.06080435180664062, 0.060785408020019534, 0.061001728057861325, 0.06100787353515625, 0.06107136154174805, 0.06107136154174805, 0.06102220916748047, 0.06108160018920898, 0.06112870407104492, 0.06096486282348633, 0.060978206634521484, 0.06086550521850586, 0.06091286468505859, 0.06101449584960938, 0.061089248657226564, 0.06108451080322266, 0.0611710090637207, 0.06111097717285156, 0.0612782096862793, 0.06117990493774414, 0.06108966445922852, 0.06034675216674805, 0.060037120819091794, 0.060098560333251956, 0.05996543884277344, 0.06004940795898438, 0.06011638259887695, 0.06011759948730469, 0.06018563079833984, 0.06035555267333984, 0.06016204833984375, 0.06025187301635742, 0.06032735824584961, 0.06040396881103516, 0.060418655395507816, 0.060491199493408206, 0.06059065628051758, 0.06064879989624023, 0.06046966552734375, 0.06046284866333008, 0.06035302352905274, 0.060375038146972655, 0.0602501106262207, 0.060388481140136716, 0.060498817443847656, 0.06048972702026367, 0.06052777481079102, 0.06051107025146484, 0.060614654541015625, 0.06101923370361328, 0.060645408630371093, 0.06064831924438477, 0.0607375373840332, 0.06077849578857422, 0.06066134262084961, 0.06078300857543945, 0.06061875152587891, 0.06055526351928711, 0.06054275131225586, 0.06052272033691406, 0.06064886474609375, 0.06076870346069336, 0.0607020149230957, 0.06082851028442383, 0.060819454193115234, 0.060821502685546876, 0.06074492645263672, 0.06093084716796875, 0.061059070587158204, 0.061159423828125, 0.0609400634765625, 0.06090323257446289, 0.06086627197265625, 0.061129409790039065, 0.06092771148681641, 0.0608955192565918, 0.060945728302001956, 0.061175807952880856, 0.061149887084960934, 0.061087745666503906, 0.061037601470947264, 0.06125993728637695, 0.06118876647949219, 0.061104736328125, 0.06036684799194336, 0.06013951873779297, 0.06011084747314453, 0.05997964859008789, 0.06010070419311524, 0.06027881622314453, 0.06013747024536133, 0.06017603302001953, 0.06028326416015625, 0.06015382385253906, 0.06039059066772461, 0.060402496337890625, 0.060483585357666014, 0.06047948837280273, 0.06070588684082031, 0.0608037109375, 0.0605821762084961, 0.060453887939453124, 0.06038544082641602, 0.0603798713684082, 0.060401790618896486, 0.0604541130065918, 0.06046799850463867, 0.060532257080078124, 0.06057350540161133, 0.06040611267089844, 0.060561344146728514, 0.060544513702392576, 0.06077478408813477, 0.0607400016784668, 0.060800865173339845, 0.06078694534301758, 0.06091132736206055, 0.06064774322509765, 0.06081532669067383, 0.06060851287841797, 0.060680191040039064, 0.06068633651733398, 0.06065923309326172, 0.06059980773925781, 0.06089007949829101, 0.060757854461669925, 0.06088924789428711, 0.06070431900024414, 0.06099606323242188, 0.06105084609985351, 0.06094208145141602, 0.061030368804931644, 0.06116134262084961, 0.06096108627319336, 0.06090966415405273, 0.060747776031494144, 0.06107072067260742, 0.060916351318359374, 0.06108147048950195, 0.060954753875732424, 0.06102790451049805, 0.060916160583496096, 0.06096806335449219, 0.061030368804931644, 0.061244319915771485, 0.06119014358520508, 0.06104377746582031, 0.06031382369995117, 0.059971839904785156, 0.06002044677734375, 0.05996579360961914, 0.060147808074951174, 0.06005539321899414, 0.060133792877197265, 0.06011840057373047, 0.06025187301635742, 0.060246593475341795, 0.060461280822753906, 0.06042367935180664, 0.06048348617553711, 0.060395839691162106, 0.06061008071899414, 0.060584510803222656, 0.06059455871582031, 0.060391422271728515, 0.06033814239501953, 0.06027881622314453, 0.06030867385864258, 0.060326751708984376, 0.06034000015258789, 0.060239105224609374, 0.060332481384277346, 0.06032230377197265, 0.06056265640258789, 0.06056787109375, 0.060673664093017575, 0.06076707077026367, 0.06082486343383789, 0.06074236679077148, 0.06085836791992188, 0.06079283142089844, 0.06092153549194336, 0.0606162223815918, 0.06062979125976563, 0.06054297637939453, 0.06061052703857422, 0.06062492752075195, 0.0606778564453125, 0.06065129470825195, 0.06072780990600586, 0.06058393478393555, 0.06078464126586914, 0.0608092155456543, 0.06080207824707031, 0.06079996871948242, 0.061101505279541016, 0.061012542724609375, 0.06104064178466797, 0.0612694091796875, 0.06115388870239258, 0.06097433471679688, 0.0609615364074707, 0.06118195343017578, 0.06138265609741211, 0.06099494552612305, 0.06103065490722656, 0.06095500946044922, 0.06117990493774414, 0.06121670532226563, 0.061128639221191404, 0.060272926330566405, 0.059986080169677734, 0.060143295288085936, 0.05999647903442383, 0.060112895965576174, 0.059996063232421876, 0.05996758270263672, 0.060104705810546874, 0.06014976119995117, 0.06012723159790039, 0.060265567779541014, 0.0602080307006836, 0.060452129364013674, 0.06034096145629883, 0.060483585357666014, 0.06043852615356445, 0.060423999786376956, 0.06029331207275391, 0.06024720001220703, 0.060220256805419925, 0.0603135986328125, 0.06025830459594726, 0.060295040130615235, 0.06026444625854492, 0.06055894470214844, 0.06038774490356445, 0.06052230453491211, 0.060496192932128906, 0.060633087158203126, 0.060569217681884766, 0.06063756942749023, 0.060655616760253904, 0.06077439880371094, 0.06074889755249024, 0.0607457275390625, 0.06055193710327148, 0.060719390869140626, 0.060599361419677734, 0.06059500885009766, 0.06061260986328125, 0.060732833862304686, 0.060840545654296876, 0.06085408020019531, 0.06094454574584961, 0.06105487823486328, 0.06101567840576172, 0.061118080139160154, 0.061047679901123045, 0.061095550537109376, 0.06105513763427734, 0.061013729095458984, 0.06094387054443359, 0.06083462524414063, 0.06087200164794922, 0.060900222778320315, 0.06091334533691406, 0.061137214660644534, 0.06104883193969726, 0.061110271453857425, 0.06107081604003906, 0.061329952239990236, 0.061222240447998046, 0.06139052963256836, 0.060400161743164066, 0.06007100677490235, 0.0600684814453125, 0.060078369140625, 0.06022553634643555, 0.06014691162109375, 0.060197662353515625, 0.06022300720214844, 0.06044441604614258, 0.06037481689453125, 0.06030227279663086, 0.06030950546264648, 0.06043379211425781, 0.06031542587280273, 0.06040643310546875, 0.06060579299926758, 0.060615520477294925, 0.060469249725341796, 0.060434432983398435, 0.06034352111816406, 0.06041433715820312, 0.06037750244140625, 0.06032793426513672, 0.06039340972900391, 0.060556831359863283, 0.06054352188110351, 0.06070460891723633, 0.06062505722045899, 0.06081740951538086, 0.06075596618652344, 0.06082355117797852, 0.06106009674072266, 0.0610816650390625, 0.06081631851196289, 0.060878849029541014, 0.06068841552734375, 0.06069449615478516, 0.06073548889160156, 0.060716991424560544, 0.06083795166015625, 0.060854270935058595, 0.06076416015625, 0.06079209518432617, 0.06080179214477539, 0.06093756866455078, 0.06097164916992188, 0.06082944107055664, 0.06111257553100586, 0.06121881484985352, 0.06121408081054688, 0.061200126647949216, 0.06088972854614258, 0.060878238677978515, 0.06087964630126953, 0.06101408004760742, 0.06123891067504883, 0.06108607864379883, 0.061069313049316405, 0.06115078353881836, 0.061077953338623044, 0.06119833755493164, 0.061160831451416015, 0.061371265411376955, 0.06032998275756836, 0.06010675048828125, 0.060246017456054686, 0.060055553436279295, 0.06033203125, 0.06017977523803711, 0.060174625396728514, 0.06029558563232422, 0.06032793426513672, 0.06029660797119141, 0.06055382537841797, 0.06039039993286133, 0.06049459075927734, 0.06048284912109375, 0.06048838424682617, 0.060780414581298826, 0.06065961456298828, 0.06047385787963867, 0.06047516632080078, 0.060360927581787106, 0.06050534439086914, 0.06041196823120117, 0.06042489624023437, 0.06065151977539063, 0.06076211166381836, 0.0607457275390625, 0.06066899108886719, 0.060822078704833984, 0.06088947296142578, 0.06079487991333008, 0.06100582504272461, 0.06097100830078125, 0.06107955169677735, 0.06093414306640625, 0.06087401580810547, 0.060813697814941406, 0.06073174285888672, 0.060633087158203126, 0.06069657516479492, 0.06075596618652344, 0.06083916854858398, 0.06082227325439453, 0.06094768142700195, 0.06085302352905273, 0.0610175666809082, 0.06096131134033203, 0.061146495819091796, 0.06120470428466797, 0.06127196884155273, 0.06118451309204102, 0.061093887329101565, 0.06094780731201172, 0.061026241302490236, 0.06088777542114258, 0.06094435119628906, 0.060972862243652344, 0.06110844802856445, 0.061052928924560546, 0.06101103973388672, 0.06101084899902344, 0.06108160018920898, 0.06113798522949219, 0.061198463439941404, 0.06031497573852539, 0.06011743927001953, 0.06014384078979492, 0.06001663970947266, 0.060170238494873046, 0.06014361572265625, 0.0602883186340332, 0.06012793731689453, 0.06052403259277344, 0.06025212860107422, 0.06040630340576172, 0.0603704948425293, 0.06042464065551758, 0.06041190338134766, 0.060508159637451174, 0.06077644729614258, 0.06071696090698242, 0.060510303497314455, 0.06053683090209961, 0.060375038146972655, 0.06036684799194336, 0.06045286560058594, 0.06049116897583008, 0.06043913650512695, 0.060469249725341796, 0.06053180694580078, 0.06070569610595703, 0.060639232635498044, 0.060698623657226565, 0.06082336044311523, 0.060709056854248045, 0.060747390747070314, 0.061087936401367185, 0.06083782577514649, 0.06081561660766602, 0.06082559967041016, 0.06082559967041016, 0.06065356826782226, 0.06060435104370117, 0.060618240356445315, 0.060797504425048825, 0.060739200592041014, 0.06096108627319336, 0.060775550842285156, 0.06087763214111328, 0.060915328979492187, 0.061088287353515625, 0.06094025421142578, 0.06103859329223633, 0.06101606369018555, 0.06097628784179687, 0.06104560089111328, 0.06104064178466797, 0.060956417083740236, 0.06085657501220703, 0.06076169586181641, 0.06092144012451172, 0.06102713775634765, 0.06100582504272461, 0.061230430603027346, 0.06101059341430664, 0.06099967956542969]",tokens/s,16.490689387777017,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-72B,Qwen/Qwen1.5-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 54.12 MiB is free. Process 76285 has 14.69 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 1.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 557, in from_pretrained cls.register(config.__class__, model_class, exist_ok=True) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 584, in register raise ValueError( ValueError: The model class you are passing has a `config_class` attribute that is not consistent with the config class you passed (model has and you passed . Fix one of those so they match! ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-110B,Qwen/Qwen1.5-110B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 272, in __init__ self.down_proj = nn.Linear(self.intermediate_size, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 768.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 406.12 MiB is free. Process 79215 has 14.34 GiB memory in use. Of the allocated memory 14.23 GiB is allocated by PyTorch, and 1.77 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-16B-nl,Salesforce/codegen-16B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2-large,openai-community/gpt2-large,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,789.630976,2266.89024,0.0,1864.368128,1714.486272,s,1,7.70576806640625,7.70576806640625,0.0,7.70576806640625,7.70576806640625,7.70576806640625,7.70576806640625,[7.70576806640625],,kWh,4.537870545808194e-06,4.932864583729137e-07,1.014723034001297e-06,6.045880038182405e-06,,MB,1180.413952,2281.570304,0.0,1868.562432,1692.384256,s,10,1.5490482482910157,0.15490482482910156,0.0010436719351170306,0.154751708984375,0.15629789733886718,0.15637816619873046,0.15644238128662108,"[0.15395120239257812, 0.15472402954101563, 0.15525740051269532, 0.1546455078125, 0.15294073486328125, 0.15412252807617188, 0.15645843505859375, 0.15628005981445312, 0.15477938842773437, 0.1558889617919922]",tokens/s,1652.627671748969,kWh,4.7785950752021346e-06,5.266760230146795e-07,3.187533912032119e-06,8.492805010248932e-06,tokens/kWh,30143162.32282088,MB,1219.698688,2430.468096,0.0,2017.460224,1714.451968,s,10,13.486646118164062,1.3486646118164063,0.009215509032999005,1.3478015747070313,1.3593811279296875,1.3594043090820314,1.3594228540039064,"[1.3298787841796875, 1.3496878662109375, 1.359427490234375, 1.345915283203125, 1.3593759765625, 1.3434456787109375, 1.3401126708984374, 1.344683837890625, 1.35698828125, 1.3571302490234376]",tokens/s,46.712873940653374,kWh,3.878728988646765e-05,4.278211829441039e-06,2.0688262966369626e-05,6.375376468227831e-05,tokens/kWh,988176.9384751668,,s,630,13.47876117324828,0.021394859005156017,0.0006018733923358622,0.02129535961151123,0.02168241539001465,0.02187785005569458,0.023264937515258793,"[0.021112735748291016, 0.02122956848144531, 0.021258367538452148, 0.021095487594604494, 0.021297279357910155, 0.020967552185058594, 0.020952959060668946, 0.021000543594360353, 0.021102943420410157, 0.021021856307983398, 0.02111734390258789, 0.021352895736694334, 0.020977888107299805, 0.021224767684936523, 0.021070175170898438, 0.02120627212524414, 0.02112828826904297, 0.021061632156372072, 0.02129270362854004, 0.020904064178466797, 0.0210882568359375, 0.02117398452758789, 0.021485727310180665, 0.02129747200012207, 0.02101638412475586, 0.020935968399047853, 0.02100111961364746, 0.021058847427368164, 0.02106172752380371, 0.020988351821899415, 0.02099836730957031, 0.0210982723236084, 0.021121023178100586, 0.021319679260253906, 0.021194751739501954, 0.020940000534057618, 0.021044191360473634, 0.020960704803466797, 0.021004671096801757, 0.020946495056152342, 0.021055936813354492, 0.0209451847076416, 0.020936384201049804, 0.021047615051269532, 0.021636831283569337, 0.02109644889831543, 0.02105548858642578, 0.02100419235229492, 0.020840063095092773, 0.02100649642944336, 0.021005823135375978, 0.021095232009887697, 0.02094304084777832, 0.02116697692871094, 0.02110371208190918, 0.021440319061279297, 0.021125152587890626, 0.021020767211914062, 0.020954784393310548, 0.02098412895202637, 0.02122083282470703, 0.021087871551513673, 0.021195072174072266, 0.021258464813232424, 0.021186559677124024, 0.02122319984436035, 0.0212457275390625, 0.02103126335144043, 0.021055583953857423, 0.021135295867919922, 0.021098207473754883, 0.021876512527465822, 0.021406272888183593, 0.021817344665527344, 0.02129680061340332, 0.021444959640502928, 0.021428224563598632, 0.022654016494750975, 0.02160246467590332, 0.021361087799072264, 0.02144905662536621, 0.021367904663085937, 0.021273183822631835, 0.021414400100708008, 0.02125312042236328, 0.02117487907409668, 0.021240032196044922, 0.02128460884094238, 0.02156563186645508, 0.02131769561767578, 0.022133760452270508, 0.02128963279724121, 0.021411808013916015, 0.021330015182495117, 0.021259967803955077, 0.02127667236328125, 0.02128281593322754, 0.021273183822631835, 0.021221471786499024, 0.021325727462768555, 0.021170175552368165, 0.021132896423339844, 0.021261728286743165, 0.02153494453430176, 0.021332128524780274, 0.021439104080200194, 0.021134719848632813, 0.021401248931884765, 0.02117731285095215, 0.02105548858642578, 0.02112495994567871, 0.021181856155395508, 0.02127471923828125, 0.021750431060791015, 0.021415775299072265, 0.021264768600463866, 0.02129689598083496, 0.021310848236083986, 0.02131350326538086, 0.02144256019592285, 0.021487903594970704, 0.021075456619262696, 0.022043487548828126, 0.02315817642211914, 0.02232387161254883, 0.021481472015380858, 0.021487583160400392, 0.021539392471313475, 0.02147327995300293, 0.02153267288208008, 0.021469247817993163, 0.02159609603881836, 0.021495487213134764, 0.02346015930175781, 0.02271996879577637, 0.02168284797668457, 0.021651327133178713, 0.02146918487548828, 0.021268287658691407, 0.021212608337402343, 0.021398208618164063, 0.021786687850952148, 0.021712223052978517, 0.021406368255615236, 0.021598207473754884, 0.02125004768371582, 0.0213703670501709, 0.021285375595092772, 0.021282623291015625, 0.021391456604003906, 0.02166793632507324, 0.021712896347045898, 0.021302719116210938, 0.02313065528869629, 0.02149782371520996, 0.02145686340332031, 0.021649375915527343, 0.021291391372680664, 0.021353919982910155, 0.02197324752807617, 0.021552352905273436, 0.02165567970275879, 0.02127347183227539, 0.02195542335510254, 0.021300224304199217, 0.021317632675170898, 0.02208732795715332, 0.021325983047485352, 0.021696704864501953, 0.021427488327026366, 0.02232908821105957, 0.021355487823486327, 0.021151519775390624, 0.02132809638977051, 0.02129315185546875, 0.021347455978393555, 0.021355295181274415, 0.02185740852355957, 0.021407743453979493, 0.02143833541870117, 0.021380096435546874, 0.021288192749023438, 0.021325759887695313, 0.021485952377319335, 0.02128940773010254, 0.02174176025390625, 0.021370687484741212, 0.021395456314086913, 0.02131974411010742, 0.021231712341308592, 0.021509632110595703, 0.021372608184814453, 0.021844064712524414, 0.021299936294555663, 0.021189727783203126, 0.021209344863891602, 0.021410463333129882, 0.021321727752685548, 0.02129100799560547, 0.021376415252685545, 0.021280927658081053, 0.021294591903686523, 0.02143123245239258, 0.021481472015380858, 0.021737407684326172, 0.02159132766723633, 0.021295616149902344, 0.021147167205810547, 0.021555967330932617, 0.02130534362792969, 0.021191871643066407, 0.021287296295166017, 0.021641664505004883, 0.021321727752685548, 0.021323776245117186, 0.021384607315063475, 0.02121548843383789, 0.02135465621948242, 0.021169599533081056, 0.021232383728027344, 0.0216494083404541, 0.0212992000579834, 0.021214303970336915, 0.02139638328552246, 0.021178304672241213, 0.021250112533569336, 0.02126438331604004, 0.021211135864257814, 0.021718687057495117, 0.02133171272277832, 0.021297439575195313, 0.021397024154663085, 0.021334335327148436, 0.021277183532714843, 0.021044864654541015, 0.021107040405273437, 0.021557247161865235, 0.021216703414916993, 0.021156415939331055, 0.02122137641906738, 0.02164121627807617, 0.02134000015258789, 0.02135580825805664, 0.021209983825683593, 0.021093887329101564, 0.021222944259643554, 0.02121004867553711, 0.02122742462158203, 0.021597824096679687, 0.021771039962768555, 0.021470943450927735, 0.021538816452026367, 0.021197696685791016, 0.021335391998291015, 0.0212872314453125, 0.02132124710083008, 0.02111520004272461, 0.021330528259277344, 0.021538719177246094, 0.02126223945617676, 0.02109769630432129, 0.02145574378967285, 0.021201087951660157, 0.02112006378173828, 0.021203712463378908, 0.021647008895874023, 0.021466943740844728, 0.021379615783691405, 0.021325824737548828, 0.021331968307495116, 0.02231497573852539, 0.021616287231445312, 0.021303680419921874, 0.021528383255004883, 0.021333951950073243, 0.0212423038482666, 0.021291872024536133, 0.021410911560058594, 0.021282783508300783, 0.021161888122558595, 0.021640832901000977, 0.02136617660522461, 0.02147657585144043, 0.02129484748840332, 0.02127052879333496, 0.021272064208984375, 0.02117091178894043, 0.021796640396118165, 0.021356544494628905, 0.02125823974609375, 0.021964799880981444, 0.021473056793212892, 0.021463264465332033, 0.02144256019592285, 0.021399616241455078, 0.02154457664489746, 0.021429855346679686, 0.0212488956451416, 0.021337919235229492, 0.02122550392150879, 0.02162883186340332, 0.021346399307250977, 0.021235712051391603, 0.02124185562133789, 0.02125823974609375, 0.021440511703491212, 0.02169785690307617, 0.02137900733947754, 0.02146790313720703, 0.022425600051879883, 0.021301279067993163, 0.02139321517944336, 0.030193824768066407, 0.02263654327392578, 0.021393440246582032, 0.021180416107177736, 0.021295103073120117, 0.02129715156555176, 0.021192991256713867, 0.021755615234375, 0.021456895828247072, 0.021331296920776368, 0.021242816925048827, 0.021216991424560547, 0.02131772804260254, 0.021390975952148436, 0.021600160598754883, 0.021150079727172852, 0.021243904113769533, 0.021386592864990235, 0.02140604782104492, 0.02162719917297363, 0.021620031356811523, 0.02116636848449707, 0.021203359603881835, 0.021243295669555663, 0.021167711257934572, 0.021238784790039062, 0.021540864944458008, 0.021100576400756837, 0.021348320007324218, 0.021102592468261717, 0.02126665687561035, 0.021162847518920898, 0.021361183166503907, 0.021241983413696288, 0.021133312225341795, 0.021268768310546873, 0.02163443183898926, 0.02125654411315918, 0.02145686340332031, 0.021258560180664063, 0.02121516799926758, 0.02153254318237305, 0.021442752838134765, 0.021257247924804688, 0.021451744079589843, 0.02107561683654785, 0.021377376556396484, 0.02138470458984375, 0.021122655868530273, 0.021395776748657228, 0.021199520111083985, 0.02139129638671875, 0.02136288070678711, 0.021118816375732423, 0.02174563217163086, 0.021141376495361328, 0.02160812759399414, 0.021237632751464845, 0.02118508720397949, 0.021241952896118164, 0.02126963233947754, 0.021459743499755858, 0.02118169593811035, 0.021096960067749023, 0.0211843204498291, 0.02108870315551758, 0.02109235191345215, 0.021077728271484374, 0.021125408172607423, 0.021047456741333008, 0.02092620849609375, 0.020981632232666015, 0.021291231155395506, 0.021252063751220702, 0.021587648391723634, 0.021133663177490235, 0.02106572723388672, 0.020955135345458984, 0.021127168655395507, 0.02115331268310547, 0.02109881591796875, 0.021878944396972657, 0.022001535415649413, 0.021317760467529298, 0.02151219177246094, 0.021071359634399413, 0.02120147132873535, 0.021129215240478515, 0.021213119506835937, 0.02103036880493164, 0.021625375747680663, 0.021374015808105468, 0.021207231521606446, 0.02128767967224121, 0.021735424041748046, 0.021102592468261717, 0.021060928344726563, 0.021099199295043947, 0.021567487716674806, 0.021348352432250976, 0.021183872222900392, 0.023308544158935546, 0.021817855834960938, 0.021253087997436523, 0.021410816192626952, 0.021208736419677736, 0.021516511917114258, 0.021276704788208006, 0.021274463653564453, 0.021483680725097657, 0.021080127716064455, 0.02121321678161621, 0.021108160018920897, 0.021043424606323243, 0.021352703094482423, 0.021196735382080077, 0.02092246437072754, 0.02113324737548828, 0.02092019271850586, 0.021012319564819335, 0.0210762882232666, 0.021136831283569336, 0.02147577667236328, 0.02096976089477539, 0.021034847259521483, 0.021104639053344726, 0.021000192642211913, 0.02112089538574219, 0.02101478385925293, 0.02124595260620117, 0.021128608703613282, 0.02127519989013672, 0.021031999588012697, 0.021751007080078124, 0.021239551544189453, 0.021215360641479494, 0.02147212791442871, 0.02126540756225586, 0.021245407104492187, 0.02117689514160156, 0.021354463577270506, 0.021332128524780274, 0.021221216201782227, 0.0216014404296875, 0.02136150360107422, 0.02125823974609375, 0.021106880187988283, 0.02103481674194336, 0.02165724754333496, 0.02124630355834961, 0.02168627166748047, 0.021364831924438478, 0.021350303649902345, 0.02123504066467285, 0.02109097671508789, 0.021133472442626953, 0.021108768463134767, 0.021526336669921875, 0.021143264770507812, 0.021180896759033202, 0.02122857666015625, 0.02144540786743164, 0.021327423095703124, 0.021209087371826172, 0.02150169563293457, 0.021263328552246094, 0.021075904846191405, 0.022068096160888673, 0.023024703979492186, 0.0212488956451416, 0.021193695068359376, 0.022015743255615235, 0.021250303268432618, 0.02109574317932129, 0.02119455909729004, 0.021234111785888674, 0.02130556869506836, 0.02120627212524414, 0.021621728897094728, 0.021263967514038085, 0.022409631729125978, 0.021086208343505858, 0.021147520065307616, 0.021116064071655272, 0.021183359146118165, 0.02159014320373535, 0.021186208724975585, 0.021160255432128905, 0.021065792083740233, 0.02115987205505371, 0.0209532470703125, 0.02112086486816406, 0.021170175552368165, 0.021142911911010743, 0.021023359298706055, 0.021188608169555666, 0.02105763244628906, 0.021019647598266602, 0.021529504776000977, 0.021268640518188477, 0.021315711975097656, 0.021134527206420898, 0.021400096893310547, 0.021137216567993163, 0.021682367324829102, 0.02141904067993164, 0.021265375137329103, 0.02141619110107422, 0.02135424041748047, 0.021383264541625976, 0.021548959732055666, 0.021336063385009766, 0.021431936264038085, 0.021055871963500977, 0.020999967575073244, 0.02184009552001953, 0.021366783142089844, 0.021112831115722656, 0.02091007995605469, 0.021687423706054688, 0.02102681541442871, 0.02088640022277832, 0.021329631805419923, 0.02110892868041992, 0.021141504287719725, 0.02108844757080078, 0.021083520889282226, 0.02560771179199219, 0.027659263610839844, 0.021485567092895508, 0.02145065689086914, 0.02128435134887695, 0.021897727966308594, 0.021140640258789062, 0.021684255599975586, 0.02114975929260254, 0.021846303939819334, 0.021340736389160155, 0.021190143585205077, 0.021327552795410157, 0.021582656860351563, 0.021149728775024416, 0.021212480545043946, 0.021231456756591795, 0.02161257553100586, 0.022438432693481444, 0.02205516815185547, 0.021389440536499025, 0.021516319274902343, 0.02149760055541992, 0.02145289611816406, 0.02179680061340332, 0.02134841537475586, 0.021397504806518555, 0.021567487716674806, 0.021430240631103516, 0.021272512435913087, 0.02141798400878906, 0.02127872085571289, 0.02453673553466797, 0.026198368072509765, 0.021502239227294922, 0.02156719970703125, 0.021329919815063478, 0.02143846321105957, 0.021904672622680664, 0.021658464431762694, 0.02131135940551758, 0.021383167266845703, 0.021435712814331053, 0.021785120010375976, 0.02149545669555664, 0.021334720611572267, 0.02130620765686035, 0.021181407928466796, 0.02160771179199219, 0.021543519973754883, 0.02122969627380371, 0.021306943893432618, 0.021337791442871092, 0.02153766441345215, 0.021510015487670897, 0.021405887603759766, 0.02150281524658203, 0.021373504638671874, 0.021638847351074218, 0.021463552474975587, 0.02144278335571289, 0.021399551391601563, 0.021333791732788085, 0.021792991638183594, 0.02152560043334961, 0.021295679092407226, 0.02126896095275879, 0.021421951293945314, 0.02172211265563965, 0.02132655906677246, 0.021215520858764648, 0.02129715156555176, 0.021231359481811523, 0.021760255813598632, 0.02141187286376953, 0.021387231826782226, 0.021278976440429687, 0.021652864456176757, 0.0213919677734375, 0.02126620864868164, 0.021213184356689452, 0.021209184646606444, 0.021431360244750976, 0.021279584884643554, 0.02121104049682617, 0.021117023468017578, 0.02126643180847168, 0.021482912063598633, 0.021115488052368164, 0.021102527618408203, 0.021298751831054688]",tokens/s,46.740200520087875,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-65b,huggyllama/llama-65b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 686, in __init__ self.mlp = LlamaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 288, in __init__ self.gate_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=config.mlp_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 344.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 66.12 MiB is free. Process 159880 has 14.67 GiB memory in use. Of the allocated memory 14.56 GiB is allocated by PyTorch, and 1.71 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,huggyllama/llama-30b,huggyllama/llama-30b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 354, in __init__ self.v_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 86.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 2.12 MiB is free. Process 157028 has 14.74 GiB memory in use. Of the allocated memory 14.53 GiB is allocated by PyTorch, and 90.39 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,meta-llama/Llama-2-13b-hf,meta-llama/Llama-2-13b-hf,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 1110, in __init__ self.model = LlamaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in __init__ [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 898, in [LlamaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 684, in __init__ self.self_attn = LLAMA_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/llama/modeling_llama.py"", line 353, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 164672 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 3.02 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-7B,Qwen/Qwen1.5-7B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.0,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,899.956736,15848.046592,0.0,15445.52448,15444.76416,s,1,7.8238046875,7.8238046875,0.0,7.8238046875,7.8238046875,7.8238046875,7.8238046875,[7.8238046875],,kWh,9.725720520833646e-06,1.0654028245020105e-06,5.030837358001999e-06,1.5821960703337656e-05,,MB,1260.908544,16051.470336,0.0,15638.462464,15612.9408,s,10,12.274579711914061,1.2274579711914062,0.0023581352589667226,1.2279326782226563,1.229719189453125,1.2297327026367186,1.2297435131835937,"[1.222148193359375, 1.224143310546875, 1.227859130859375, 1.2278927001953126, 1.2288448486328125, 1.22697607421875, 1.2292803955078124, 1.2297462158203125, 1.2297161865234374, 1.22797265625]",tokens/s,208.56111248478757,kWh,3.580456846458256e-05,3.948063695316251e-06,2.3829102396599743e-05,6.358173455649855e-05,tokens/kWh,4026313.559793168,MB,1298.432,16053.567488,0.0,15640.559616,15627.385344,s,10,40.13578686523438,4.013578686523437,0.004727177273263984,4.0140830078125,4.01863203125,4.021255346679688,4.0233539990234375,"[4.0086669921875, 4.00740966796875, 4.014601806640625, 4.018049072265625, 4.023878662109375, 4.01403369140625, 4.01413232421875, 4.010464599609375, 4.01548046875, 4.009069580078125]",tokens/s,15.696714807545138,kWh,0.00011716388184958495,1.2924753896251023e-05,7.778897889780049e-05,0.00020787761464363645,tokens/kWh,303062.934929288,,s,630,40.13267690277102,0.06370266175043016,0.0002888157395679966,0.06367745590209961,0.06405080032348633,0.06416662788391113,0.06472485122680664,"[0.06470870208740234, 0.063534912109375, 0.06325676727294922, 0.06321152114868164, 0.06318489456176758, 0.06330697631835938, 0.06315075302124024, 0.06312995147705078, 0.06325225448608399, 0.06335283279418945, 0.06333440017700195, 0.06350233459472657, 0.06333849716186524, 0.06335692977905273, 0.06363340759277344, 0.06351667022705078, 0.06344838333129883, 0.06371795272827148, 0.06345536041259765, 0.0635239372253418, 0.06333071899414063, 0.0633779182434082, 0.0633446388244629, 0.06348799896240234, 0.06346342468261719, 0.06360063934326173, 0.06352896118164063, 0.0634958381652832, 0.06335318374633789, 0.06351827239990235, 0.06389519882202148, 0.06364755249023438, 0.06360150527954102, 0.06370931243896484, 0.06352191925048828, 0.06364985656738281, 0.0636424331665039, 0.06365398406982421, 0.06403846740722656, 0.0637239990234375, 0.06356057739257813, 0.06362214279174805, 0.06367027282714843, 0.06374195098876953, 0.06361907196044922, 0.06370524978637696, 0.06374383926391601, 0.06365923309326171, 0.06375708770751953, 0.06387664031982422, 0.06376291275024414, 0.0637050895690918, 0.06382899093627929, 0.06393344116210938, 0.06374604797363281, 0.06390784072875977, 0.06406143951416016, 0.06416944122314452, 0.06407782745361328, 0.06383830261230469, 0.06379974365234375, 0.06407545471191406, 0.06401261138916016, 0.06477273559570312, 0.06358451080322265, 0.0632176628112793, 0.06325043106079102, 0.06310873413085938, 0.0632547836303711, 0.06335091018676758, 0.06340745544433593, 0.06340265655517578, 0.06340403366088868, 0.06345318222045898, 0.06334873580932618, 0.0632273292541504, 0.06331167984008788, 0.0635192642211914, 0.06350460815429687, 0.06357104110717773, 0.06352083206176758, 0.06348995208740234, 0.06335174560546875, 0.06321145629882813, 0.06386819076538086, 0.06347446441650391, 0.06350643157958985, 0.06347161483764649, 0.0635412483215332, 0.06337535858154297, 0.06355945587158203, 0.06358192062377929, 0.06375065612792968, 0.06360019302368164, 0.06352934265136718, 0.06362118530273438, 0.06362521743774414, 0.06362726211547852, 0.06366003036499024, 0.06353852844238281, 0.06347433471679688, 0.0635781135559082, 0.0635821762084961, 0.06351865768432617, 0.06367987060546874, 0.06358473587036133, 0.0635733757019043, 0.06356057739257813, 0.06381075286865234, 0.06366451263427735, 0.06375878524780274, 0.06372147369384766, 0.06395654296875, 0.06374991989135742, 0.06364636611938476, 0.06359040069580078, 0.06426780700683593, 0.0638427848815918, 0.06372556686401368, 0.06378700637817383, 0.06380335998535157, 0.06387702560424804, 0.06394073486328125, 0.06394675064086915, 0.06402175903320312, 0.0638862075805664, 0.06473750305175781, 0.06346614456176758, 0.0632034568786621, 0.06317596817016602, 0.06338633728027344, 0.06326291275024414, 0.06315398406982421, 0.06328742218017579, 0.06361856079101562, 0.06352896118164063, 0.06365222549438476, 0.06341632080078125, 0.06340534210205079, 0.0634372787475586, 0.06364531326293946, 0.06378764724731445, 0.06347113418579102, 0.06361913681030273, 0.06353731155395508, 0.06344319915771485, 0.06330572891235352, 0.06333440017700195, 0.06332527923583985, 0.06348384094238281, 0.06366511917114258, 0.06376825714111328, 0.06392044830322266, 0.06382592010498046, 0.06365702438354492, 0.06379375839233399, 0.06375664138793945, 0.06398735809326171, 0.06385654449462891, 0.06384684753417968, 0.0637287368774414, 0.06353171157836913, 0.06350204849243164, 0.0637834243774414, 0.06352671813964844, 0.0635558090209961, 0.06378287887573242, 0.06376652908325195, 0.06384640121459961, 0.06383817672729492, 0.06373174285888672, 0.06380505752563477, 0.06389497756958008, 0.0639595832824707, 0.06385043334960938, 0.06426175689697265, 0.06387948989868164, 0.06404560089111327, 0.06380124664306641, 0.06398575973510742, 0.06401433563232421, 0.06386892700195312, 0.06410594940185547, 0.0639218864440918, 0.06408013153076172, 0.06385516738891601, 0.06409136199951172, 0.06435676574707032, 0.06414988708496094, 0.06492160034179688, 0.0635494384765625, 0.06331187057495118, 0.06317657470703125, 0.0632176628112793, 0.06332956695556641, 0.06330249786376953, 0.0634859504699707, 0.06329344177246093, 0.06341804885864258, 0.063482177734375, 0.06350233459472657, 0.06345318222045898, 0.06355148696899414, 0.06357606506347656, 0.06365184020996094, 0.0634511375427246, 0.06354739379882812, 0.06339379119873047, 0.06331558227539062, 0.06330716705322266, 0.06352790451049804, 0.06346956634521485, 0.06351855850219726, 0.06331532669067383, 0.06355759811401367, 0.06354377746582031, 0.06371343994140626, 0.06374588775634765, 0.06366243362426757, 0.06392131042480469, 0.06380630493164062, 0.06395676803588868, 0.06396912002563476, 0.06372153472900391, 0.06381600189208984, 0.06375446319580078, 0.06375196838378906, 0.06392422485351562, 0.06396063995361329, 0.06382636642456055, 0.06384230422973633, 0.06384435272216797, 0.06404300689697266, 0.06418841552734375, 0.06421024322509766, 0.0641645736694336, 0.06407084655761719, 0.06401904296875, 0.06423165130615234, 0.06408188629150391, 0.0640117416381836, 0.06391862487792968, 0.06396883010864257, 0.06399132919311523, 0.06416681671142578, 0.06416175842285156, 0.06415977478027343, 0.06397321701049805, 0.06415171051025391, 0.0641794204711914, 0.06431209564208984, 0.06429695892333985, 0.06496870422363281, 0.06365971374511718, 0.0635082893371582, 0.06345536041259765, 0.06335676956176758, 0.0635621452331543, 0.06352710342407226, 0.06357609558105469, 0.063532958984375, 0.06363750457763671, 0.06361088180541992, 0.06367436981201172, 0.0638680648803711, 0.06380950546264648, 0.06386953735351562, 0.06382620620727539, 0.0637350082397461, 0.0638144302368164, 0.06377171325683594, 0.06378118515014648, 0.06356396865844727, 0.06365462493896484, 0.06377443313598632, 0.06375385665893554, 0.06396297454833984, 0.0638669776916504, 0.06399020767211915, 0.06412083435058594, 0.06396928024291992, 0.06408995056152343, 0.06397148895263671, 0.0640552978515625, 0.06390480041503906, 0.06397875213623047, 0.0638768653869629, 0.06377264022827149, 0.06381324768066406, 0.0639853744506836, 0.06372214508056641, 0.06374195098876953, 0.06371100616455078, 0.06385385513305664, 0.06407878112792968, 0.06409625244140625, 0.06382601547241211, 0.06412483215332031, 0.06419251251220703, 0.06385862350463867, 0.06404467010498047, 0.06405923461914062, 0.06389580917358398, 0.06395273590087891, 0.06387763214111328, 0.06417155456542968, 0.06400252532958985, 0.0638361587524414, 0.06385372924804687, 0.063838623046875, 0.06392860794067383, 0.06416524505615234, 0.06412108612060546, 0.06405538940429688, 0.06394924926757813, 0.06486217498779297, 0.0635470085144043, 0.06333071899414063, 0.06319718551635742, 0.06339696121215821, 0.06341510391235351, 0.06360073471069336, 0.06327500915527344, 0.06332771301269531, 0.06362531280517578, 0.06326521682739258, 0.06338313674926757, 0.06349456024169922, 0.06352828979492188, 0.06347024154663086, 0.06347161483764649, 0.06346342468261719, 0.06347900772094726, 0.0634620475769043, 0.06346531295776367, 0.06338896179199219, 0.06340915298461915, 0.06331596755981446, 0.0634463996887207, 0.06328124618530273, 0.06354150390625, 0.06347600173950195, 0.06357334518432617, 0.0634820785522461, 0.06361542510986327, 0.06355955123901368, 0.06349590301513672, 0.06365980911254883, 0.06364633560180664, 0.06379251098632813, 0.06385318374633789, 0.06378905487060547, 0.06370278549194336, 0.06398796844482423, 0.06367023849487305, 0.06378704071044922, 0.06395481491088867, 0.0638526725769043, 0.0638966064453125, 0.06416639709472656, 0.06417865753173828, 0.06387209701538087, 0.06383222579956055, 0.06382672119140625, 0.06423260498046875, 0.06409503936767579, 0.06397308731079102, 0.06392393493652344, 0.06397395324707031, 0.06398361587524413, 0.0640401611328125, 0.06393116760253906, 0.0641638412475586, 0.06401433563232421, 0.06396742248535156, 0.06393980789184571, 0.06420537567138672, 0.06412496185302734, 0.06472589111328125, 0.06358220672607422, 0.06339788818359375, 0.06340607833862305, 0.06347331237792969, 0.0634144630432129, 0.06349158477783202, 0.06364227294921875, 0.06344908905029296, 0.06349795150756836, 0.06345347213745117, 0.06358425521850586, 0.06377881622314453, 0.06378086471557617, 0.06381897735595703, 0.06468246459960937, 0.06382774353027344, 0.06378758239746093, 0.06388323211669922, 0.06372307205200195, 0.06339628982543945, 0.06341379165649413, 0.0634659538269043, 0.06339993667602539, 0.06349619293212891, 0.06365923309326171, 0.06361782455444336, 0.06367436981201172, 0.06344812774658203, 0.06365894317626954, 0.06367804718017578, 0.06383657455444336, 0.06367436981201172, 0.06392243194580079, 0.06377827072143555, 0.06344323348999023, 0.06341433715820312, 0.06365996932983399, 0.06358348846435546, 0.06358822250366211, 0.06352985763549805, 0.06366972732543945, 0.06354179382324218, 0.06370463943481446, 0.06370742416381836, 0.06375558471679688, 0.06382799911499024, 0.06376031875610351, 0.06380147171020507, 0.06390000152587891, 0.06384198379516602, 0.063785888671875, 0.06372307205200195, 0.06371955108642578, 0.06371340942382812, 0.06367027282714843, 0.06393382263183593, 0.0639628791809082, 0.06385343933105468, 0.06398771286010742, 0.06387507247924805, 0.06405030059814452, 0.06431423950195313, 0.0647674560546875, 0.06347260665893555, 0.06325212860107422, 0.06320323181152344, 0.06306662368774414, 0.06315411376953126, 0.06344089508056641, 0.06344876861572266, 0.06335830307006836, 0.06341884613037109, 0.06340460968017578, 0.06340192031860352, 0.06349004745483398, 0.06375423812866211, 0.06356598281860351, 0.06368239974975586, 0.06361907196044922, 0.06359814453125, 0.06341471862792969, 0.06335692977905273, 0.06352825546264648, 0.06352297592163086, 0.06343097686767578, 0.06349996948242187, 0.06338614273071289, 0.0637786865234375, 0.06365731048583985, 0.06380976104736329, 0.06368022537231445, 0.0637589111328125, 0.0637746238708496, 0.06358054351806641, 0.06383411026000976, 0.06373769760131837, 0.06366223907470703, 0.06356121444702148, 0.06352742385864257, 0.06378486251831054, 0.06366831970214844, 0.0636313591003418, 0.06361439895629883, 0.06375625610351562, 0.06370969772338868, 0.06368675231933593, 0.06371942520141602, 0.06377676773071289, 0.06381132888793946, 0.06368188858032227, 0.06376950454711915, 0.06380748748779297, 0.06372505569458008, 0.06383871841430665, 0.06370099258422851, 0.0638930549621582, 0.06367686462402344, 0.06365798568725586, 0.06371123123168945, 0.063963134765625, 0.0639315185546875, 0.06395075225830078, 0.06391292953491211, 0.06402031707763672, 0.0641098861694336, 0.06464895629882812, 0.06361088180541992, 0.06329072189331054, 0.06325519943237305, 0.06319513702392578, 0.06340403366088868, 0.0632627830505371, 0.06349407958984375, 0.06365184020996094, 0.06365289688110351, 0.06366419219970704, 0.06358639907836915, 0.06360259246826172, 0.06369075012207032, 0.06378793716430664, 0.0635431022644043, 0.06342470550537109, 0.06356691360473633, 0.06336924743652343, 0.06344700622558594, 0.06347462463378906, 0.0635412483215332, 0.06350640106201172, 0.06354451370239257, 0.06357183837890625, 0.06365219116210938, 0.06388595199584961, 0.06396518325805664, 0.06371043014526367, 0.06387113571166993, 0.06380153656005859, 0.06360620880126953, 0.06360166549682617, 0.06368204879760742, 0.06359817504882813, 0.06362774276733399, 0.06357651138305664, 0.06362511825561523, 0.06379254531860351, 0.06385315322875977, 0.06394432067871093, 0.06399433517456055, 0.06390140914916992, 0.06412429046630859, 0.06382246398925781, 0.06422147369384766, 0.06377059173583985, 0.0637174072265625, 0.06384214401245117, 0.06384860610961914, 0.06365184020996094, 0.06402460479736329, 0.06385372924804687, 0.06388435363769532, 0.06371712112426758, 0.064110595703125, 0.06377062225341797, 0.06392185592651367, 0.06391574478149414, 0.06424342346191406, 0.0639947509765625, 0.06404637145996094, 0.06419939422607422, 0.06472230529785156, 0.06342716979980469, 0.06331596755981446, 0.06328323364257812, 0.06329929733276367, 0.06336947250366211, 0.06320435333251953, 0.06321363067626953, 0.06339996719360351, 0.06346435165405273, 0.06332812881469727, 0.06338751983642578, 0.06334054565429688, 0.06341247940063477, 0.0634975357055664, 0.06360543823242187, 0.06349209594726563, 0.06352640151977539, 0.06343731307983398, 0.06341408157348633, 0.063340576171875, 0.06367452621459961, 0.06372054290771484, 0.06351964950561523, 0.06346876907348632, 0.06350048065185547, 0.0635684814453125, 0.0635596809387207, 0.06380134582519531, 0.06372512054443359, 0.0635928955078125, 0.0635781135559082, 0.06357123184204101, 0.06362335968017578, 0.06364358520507812, 0.06365039825439453, 0.06345727920532226, 0.06351443099975586, 0.06358848190307617, 0.06352083206176758, 0.06352486419677734, 0.06378700637817383, 0.06368051147460937, 0.06373740768432617, 0.06369126510620117, 0.06382585525512695, 0.06368246459960937, 0.06380963134765626, 0.06373990249633789, 0.06379084777832031, 0.06375145721435548, 0.06383305740356446, 0.06371942520141602, 0.06392217636108398, 0.0637583351135254, 0.06385254287719727, 0.06385459136962891, 0.06395663833618163, 0.06393791961669922, 0.06394313430786133, 0.06417254638671875, 0.06408191680908203, 0.06395596694946289]",tokens/s,15.697931177785478,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,stabilityai/stablelm-base-alpha-3b,stabilityai/stablelm-base-alpha-3b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,888.905728,7981.62944,0.0,7579.107328,7559.873536,s,1,7.80645361328125,7.80645361328125,0.0,7.80645361328125,7.80645361328125,7.80645361328125,7.80645361328125,[7.80645361328125],,kWh,6.544163324989919e-06,7.134013637728344e-07,4.1391699779902424e-06,1.1396734666752996e-05,,MB,1240.4736,8212.31616,0.0,7799.308288,7764.228096,s,10,6.204932922363282,0.6204932922363281,0.002544505335613178,0.620578125,0.6229171203613281,0.6238391754150391,0.6245768194580078,"[0.6169404907226562, 0.6173489990234375, 0.6224830932617188, 0.6175281372070313, 0.622705322265625, 0.6192971801757813, 0.6205166625976563, 0.6227122192382812, 0.6206395874023437, 0.62476123046875]",tokens/s,412.5749676960197,kWh,1.8199632839708313e-05,2.0071123081432286e-06,1.2066790699176188e-05,3.227353584702773e-05,tokens/kWh,7932195.629676463,MB,1265.471488,8296.20224,0.0,7883.194368,7841.714176,s,10,20.009478515625,2.0009478515625,0.0007982241911104899,2.0012632446289063,2.0017003906250004,2.0017551025390627,2.001798872070313,"[2.000184326171875, 2.001809814453125, 2.0013062744140626, 2.001654052734375, 2.00122021484375, 2.001688232421875, 1.99939697265625, 2.0009942626953126, 2.0013748779296874, 1.9998494873046875]",tokens/s,31.485078409617003,kWh,5.8254768123206386e-05,6.424666205190946e-06,3.877874997722405e-05,0.0001034581843056214,tokens/kWh,608941.6745793102,,s,630,20.005920227050787,0.03175542893182663,0.00017390326631818747,0.03176462459564209,0.03197031726837158,0.03205492076873779,0.03220499465942383,"[0.03217359924316406, 0.031797407150268554, 0.03163308715820313, 0.0315152645111084, 0.03144246482849121, 0.03147977638244629, 0.03144710350036621, 0.031471391677856446, 0.03168720054626465, 0.0315392951965332, 0.03152079963684082, 0.03153436851501465, 0.03148211288452148, 0.0315765438079834, 0.03153715133666992, 0.03154060745239258, 0.03155356788635254, 0.03158652877807617, 0.0316440315246582, 0.03163545608520508, 0.03164703941345215, 0.03173439979553223, 0.031659456253051756, 0.03156159973144531, 0.03186150360107422, 0.03163955116271973, 0.03163340759277344, 0.031696895599365234, 0.03176873588562012, 0.03182780838012695, 0.03181977653503418, 0.031850496292114255, 0.031780864715576174, 0.03179929542541504, 0.03177471923828125, 0.031796575546264645, 0.03179996871948242, 0.031896640777587894, 0.031836544036865234, 0.031666751861572265, 0.03171897506713867, 0.031743520736694335, 0.031718303680419925, 0.031716703414916995, 0.031750816345214844, 0.031834112167358396, 0.031764480590820314, 0.0317255687713623, 0.031863872528076174, 0.03187398338317871, 0.0318317756652832, 0.031834400177001954, 0.03182499122619629, 0.03188422393798828, 0.03190931129455567, 0.031867424011230466, 0.03190966415405273, 0.03188553619384766, 0.03188086318969727, 0.03206335830688477, 0.03207961654663086, 0.032161983489990234, 0.03213478469848633, 0.03260432052612305, 0.0318382396697998, 0.03150435256958008, 0.031571104049682615, 0.031447839736938474, 0.03168876838684082, 0.031430496215820315, 0.0313591365814209, 0.03143894386291504, 0.03152400016784668, 0.031480575561523436, 0.03161087989807129, 0.031950624465942386, 0.03167398452758789, 0.031672927856445314, 0.03163340759277344, 0.0317255687713623, 0.03165798377990723, 0.031573055267333984, 0.031596607208251956, 0.03177340888977051, 0.03167014312744141, 0.03156918334960938, 0.03163443183898926, 0.03171305656433106, 0.03180361557006836, 0.03165388870239258, 0.03166527938842773, 0.03174899291992188, 0.03177881622314453, 0.031928192138671874, 0.03179737663269043, 0.03176358413696289, 0.03174899291992188, 0.031768575668334964, 0.03178031921386719, 0.031765024185180665, 0.03175014305114746, 0.03169228744506836, 0.03173324775695801, 0.0317388801574707, 0.03182140731811523, 0.031789472579956055, 0.03186892890930176, 0.031889408111572266, 0.03187264060974121, 0.03192665672302246, 0.03196108818054199, 0.031916032791137694, 0.03196047973632812, 0.031830623626708986, 0.03185385513305664, 0.031970016479492186, 0.03185663986206055, 0.031835968017578126, 0.03188140869140625, 0.031921247482299804, 0.031902624130249024, 0.03188323211669922, 0.03197465515136719, 0.03196188735961914, 0.032059391021728514, 0.03204854583740235, 0.03223126220703125, 0.03178134346008301, 0.03156988716125488, 0.031513919830322264, 0.03157472038269043, 0.031475711822509765, 0.03144499206542969, 0.031475711822509765, 0.03152076721191406, 0.03152607917785644, 0.03150505638122559, 0.03153276824951172, 0.03153961563110352, 0.03164284706115723, 0.0316013126373291, 0.031586463928222654, 0.03156991958618164, 0.03161075210571289, 0.03166630363464355, 0.03167027282714844, 0.03166371154785156, 0.03166044807434082, 0.0316866569519043, 0.031666175842285156, 0.0316866569519043, 0.031740959167480466, 0.0317420482635498, 0.03173459243774414, 0.03174588775634766, 0.0317872314453125, 0.03187235260009766, 0.03180611228942871, 0.03184230422973633, 0.03182796859741211, 0.03181091117858887, 0.03179587173461914, 0.031798816680908205, 0.031789535522460936, 0.03177801513671875, 0.03177961540222168, 0.03176038360595703, 0.0317706241607666, 0.03180748748779297, 0.031854591369628905, 0.03181363105773926, 0.031768575668334964, 0.0318111686706543, 0.03177916717529297, 0.03186079978942871, 0.0318791675567627, 0.031833248138427736, 0.031862911224365235, 0.03185296058654785, 0.03189571189880371, 0.03192630386352539, 0.03194278335571289, 0.03194675254821777, 0.03195699119567871, 0.03198281669616699, 0.032072479248046876, 0.032077823638916016, 0.03211264038085938, 0.03211468887329102, 0.0321855697631836, 0.03187142372131348, 0.03172512054443359, 0.03154323196411133, 0.031527776718139645, 0.03155148887634277, 0.03151414489746094, 0.03149385643005371, 0.03154960060119629, 0.03159916877746582, 0.031582239151000974, 0.0316822395324707, 0.031760704040527346, 0.0317071361541748, 0.031647743225097655, 0.03164470481872558, 0.031624160766601565, 0.031645696640014646, 0.03163497543334961, 0.03164518356323242, 0.031642591476440426, 0.031676416397094724, 0.03169075202941894, 0.03161907196044922, 0.03168239974975586, 0.031743871688842774, 0.03170742416381836, 0.0317071361541748, 0.03171520042419434, 0.03178303909301758, 0.03181916809082031, 0.03176646423339844, 0.03179792022705078, 0.031818815231323246, 0.03176339149475098, 0.03179916763305664, 0.03175011253356934, 0.031797407150268554, 0.031731231689453125, 0.031805919647216796, 0.031762399673461915, 0.03184576034545898, 0.0318133430480957, 0.03186345672607422, 0.0318014087677002, 0.031795072555541995, 0.03179350471496582, 0.031739007949829104, 0.03179792022705078, 0.03186483192443847, 0.03178927993774414, 0.031979455947875976, 0.031866943359375, 0.031903743743896484, 0.03188028717041016, 0.03194767951965332, 0.03205254364013672, 0.03199571228027344, 0.03194150352478027, 0.031932416915893554, 0.03189459228515625, 0.032024959564208984, 0.032057247161865234, 0.0321003532409668, 0.03167231941223145, 0.03149180793762207, 0.03147529602050781, 0.03146208000183105, 0.03145113563537598, 0.03143600082397461, 0.03141097640991211, 0.03154944038391113, 0.0315984001159668, 0.031529151916503906, 0.03153286361694336, 0.03148204803466797, 0.03155148887634277, 0.031649791717529296, 0.031659648895263674, 0.03160716819763183, 0.031692800521850584, 0.03167388725280762, 0.031654367446899416, 0.03169484710693359, 0.03160848045349121, 0.03182217597961426, 0.03170099258422852, 0.0316682243347168, 0.03169599914550781, 0.03161382484436035, 0.03165360069274902, 0.031764768600463865, 0.031854591369628905, 0.03186278343200684, 0.03180159950256348, 0.03182975959777832, 0.03178291130065918, 0.03170918464660644, 0.03179520034790039, 0.032103935241699216, 0.03177724838256836, 0.03173513603210449, 0.03170275115966797, 0.03170348739624024, 0.0318176326751709, 0.031784767150878905, 0.03181001663208008, 0.03183651161193848, 0.031850496292114255, 0.03194220733642578, 0.031858816146850585, 0.03188358306884766, 0.03187049674987793, 0.031827455520629884, 0.031884096145629884, 0.031811744689941406, 0.031954208374023435, 0.03191472053527832, 0.03193382453918457, 0.0319362564086914, 0.0320049934387207, 0.03207075119018555, 0.0320357437133789, 0.03207583999633789, 0.0321146240234375, 0.032089248657226566, 0.03223756790161133, 0.031842336654663086, 0.0316210880279541, 0.03156991958618164, 0.031455232620239255, 0.03148150444030762, 0.03148220825195312, 0.031510528564453126, 0.03160268783569336, 0.03155148887634277, 0.03159040069580078, 0.031543296813964845, 0.03151769638061523, 0.03156393623352051, 0.03157257652282715, 0.03153331184387207, 0.03151872062683105, 0.03166959953308106, 0.031578784942626954, 0.03162015914916992, 0.03157702445983887, 0.031719423294067385, 0.03166592025756836, 0.03162956809997559, 0.03170451164245605, 0.03168108749389648, 0.0317457275390625, 0.03173353576660156, 0.031815935134887695, 0.03175862312316895, 0.03180339241027832, 0.031850400924682616, 0.031873119354248046, 0.03187001609802246, 0.03178716850280762, 0.031773439407348635, 0.03177238464355469, 0.03207609558105469, 0.03184639930725098, 0.03181747245788574, 0.03184051132202149, 0.03186073684692383, 0.03183616065979004, 0.03180879974365235, 0.03180118370056152, 0.03184320068359375, 0.03178291130065918, 0.03172275161743164, 0.031803232192993164, 0.03189852714538574, 0.03188531112670898, 0.031916032791137694, 0.031881088256835936, 0.031912063598632814, 0.032, 0.03195833587646484, 0.03196784019470215, 0.032006240844726565, 0.03197881507873535, 0.03203142547607422, 0.032004096984863284, 0.032004096984863284, 0.032020481109619144, 0.032212928771972654, 0.031817728042602536, 0.031640640258789064, 0.03150739288330078, 0.03150233650207519, 0.03153919982910156, 0.03150752067565918, 0.03146847915649414, 0.031526912689208986, 0.031545343399047854, 0.03147980880737305, 0.031498239517211916, 0.03155878448486328, 0.031605632781982425, 0.031715328216552735, 0.03155094337463379, 0.031563583374023436, 0.03161084747314453, 0.03161164855957031, 0.031651199340820314, 0.03161903953552246, 0.03169881629943848, 0.03161721611022949, 0.031574623107910156, 0.03158425521850586, 0.031646879196166995, 0.03163590431213379, 0.031679935455322265, 0.031719743728637693, 0.03177916717529297, 0.03181395149230957, 0.03173948860168457, 0.031895904541015624, 0.03182377624511719, 0.031727392196655275, 0.031668607711791995, 0.03169017601013183, 0.03170355224609375, 0.0317399673461914, 0.03168604850769043, 0.031737503051757814, 0.03173676872253418, 0.03180454444885254, 0.031816160202026364, 0.03188572883605957, 0.03187507247924805, 0.03184169578552246, 0.031775327682495115, 0.03171465682983399, 0.031873695373535155, 0.03182124710083008, 0.03180191993713379, 0.03181158447265625, 0.03181977653503418, 0.03187478446960449, 0.031869216918945314, 0.03186483192443847, 0.03188531112670898, 0.03197302436828613, 0.032045406341552736, 0.032026622772216795, 0.03201808166503906, 0.03202256011962891, 0.03228054428100586, 0.031821855545043944, 0.031698944091796875, 0.031526239395141605, 0.0315409927368164, 0.03154217529296875, 0.0314715518951416, 0.031451200485229494, 0.031468608856201175, 0.03147664070129395, 0.03142982482910156, 0.03146428871154785, 0.03151785659790039, 0.03161734390258789, 0.0317357120513916, 0.03165657615661621, 0.03168998336791992, 0.0316112003326416, 0.03161542320251465, 0.03161203193664551, 0.0316363525390625, 0.03171683120727539, 0.03168310356140137, 0.03162521553039551, 0.031682559967041016, 0.03180291175842285, 0.031742431640625, 0.031714496612548826, 0.031699775695800785, 0.03179692840576172, 0.0318275203704834, 0.03181439971923828, 0.03182156753540039, 0.03185078430175781, 0.03179926490783692, 0.031757919311523435, 0.031750560760498044, 0.03176038360595703, 0.03173542404174805, 0.0317193603515625, 0.031772127151489256, 0.03190873527526855, 0.031778591156005856, 0.031775039672851564, 0.03177449607849121, 0.03181088066101074, 0.031820064544677736, 0.03185318374633789, 0.031893503189086916, 0.031893503189086916, 0.031883264541625975, 0.031889408111572266, 0.03185868835449219, 0.0318791675567627, 0.031899648666381834, 0.03190784072875977, 0.03194393539428711, 0.03193222427368164, 0.03192108726501465, 0.03196268844604492, 0.032072128295898436, 0.03205686569213867, 0.03198409652709961, 0.03226243209838867, 0.03181769561767578, 0.03149065589904785, 0.0315043830871582, 0.03139993667602539, 0.03139379119873047, 0.03142831993103028, 0.031700927734375, 0.031530847549438475, 0.03154550361633301, 0.03156163215637207, 0.03148784065246582, 0.03148422431945801, 0.031678752899169924, 0.03167334365844727, 0.03162796783447266, 0.03163987159729004, 0.03165593528747559, 0.0316231689453125, 0.03163545608520508, 0.03163113594055176, 0.03179952049255371, 0.03157606315612793, 0.03160646438598633, 0.031656255722045896, 0.031715328216552735, 0.031698848724365236, 0.031673696517944334, 0.031740224838256836, 0.031865280151367185, 0.03186278343200684, 0.03180678367614746, 0.03180374336242676, 0.03185699272155762, 0.03177574348449707, 0.03179596710205078, 0.031762687683105466, 0.031786016464233395, 0.03171017646789551, 0.03173936080932617, 0.03173635292053223, 0.03172966384887695, 0.03192608070373535, 0.031783103942871094, 0.03189760017395019, 0.03185868835449219, 0.03188287925720215, 0.03182809638977051, 0.03193791961669922, 0.031931264877319336, 0.03191926383972168, 0.031975616455078126, 0.03202115249633789, 0.032069633483886716, 0.03191398429870605, 0.03183807945251465, 0.03189760017395019, 0.031891199111938474, 0.03191200065612793, 0.031938880920410154, 0.03197952079772949, 0.03210649490356445, 0.03204095840454101, 0.03226009750366211, 0.031926271438598636, 0.0314768009185791, 0.031603647232055665, 0.031492095947265625, 0.031465152740478515, 0.03135519981384277, 0.031320064544677735, 0.03147952079772949, 0.03145142364501953, 0.03150643157958984, 0.03136905670166015, 0.03147996711730957, 0.03161497688293457, 0.03162521553039551, 0.03168063926696777, 0.03162259292602539, 0.03158028793334961, 0.03155507278442383, 0.03164243125915527, 0.03159040069580078, 0.03164067268371582, 0.03172035217285156, 0.03182592010498047, 0.0316682243347168, 0.03166527938842773, 0.031689599990844725, 0.03170508766174317, 0.03172867202758789, 0.03178384017944336, 0.03174336051940918, 0.03172979164123535, 0.03171180725097656, 0.031841983795166014, 0.03178937530517578, 0.03183206367492676, 0.03178070449829101, 0.031857887268066404, 0.03170825576782227, 0.031618911743164065, 0.031666175842285156, 0.031698944091796875, 0.0317205753326416, 0.031761280059814455, 0.03180544090270996, 0.031801343917846676, 0.031848447799682614, 0.03182592010498047, 0.03179315185546875, 0.031950176239013674, 0.03191465568542481, 0.031920127868652344, 0.03187673568725586, 0.031909343719482425, 0.031838144302368164, 0.03182691192626953, 0.03206876754760742, 0.031984031677246096, 0.03200979232788086, 0.03207395172119141, 0.03200271987915039, 0.032024574279785153, 0.03203641510009766]",tokens/s,31.49067840169395,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,recurrent_gemma,google/recurrentgemma-9b,google/recurrentgemma-9b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: RecurrentGemmaForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/gpt-neox-20b,EleutherAI/gpt-neox-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 744, in __init__ self.mlp = GPTNeoXMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 717, in __init__ self.dense_h_to_4h = nn.Linear(config.hidden_size, config.intermediate_size) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 288.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 188.12 MiB is free. Process 38889 has 14.55 GiB memory in use. Of the allocated memory 14.43 GiB is allocated by PyTorch, and 13.45 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mixtral,mistralai/Mixtral-8x22B-v0.1,mistralai/Mixtral-8x22B-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 1195, in __init__ self.model = MixtralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in __init__ [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 968, in [MixtralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 755, in __init__ self.self_attn = MIXTRAL_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mixtral/modeling_mixtral.py"", line 349, in __init__ self.o_proj = nn.Linear(self.num_heads * self.head_dim, self.hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 72.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 38.12 MiB is free. Process 183367 has 14.70 GiB memory in use. Of the allocated memory 14.55 GiB is allocated by PyTorch, and 41.65 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,01-ai/Yi-6B,01-ai/Yi-6B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,812.05248,12523.077632,0.0,12127.830016,12122.08896,s,1,7.1470244140625,7.1470244140625,0.0,7.1470244140625,7.1470244140625,7.1470244140625,7.1470244140625,[7.1470244140625],,kWh,1.1793927454169288e-05,1.2933164151426684e-06,5.384726529999995e-06,1.847197039931195e-05,,MB,1128.370176,12695.044096,0.0,12289.31072,12248.586752,s,10,11.387053466796877,1.1387053466796877,0.0040679235285528866,1.1404854736328125,1.1433168212890623,1.14371474609375,1.1440330859375,"[1.13221044921875, 1.1353138427734375, 1.133013427734375, 1.13561669921875, 1.140848876953125, 1.1403653564453125, 1.1417381591796876, 1.1406055908203125, 1.1432283935546874, 1.1441126708984375]",tokens/s,224.81671904541568,kWh,3.331983561374988e-05,3.6729775271002274e-06,2.205385097639994e-05,5.904666411725005e-05,tokens/kWh,4335553.986448009,MB,1172.037632,12701.335552,0.0,12295.602176,12248.589312,s,10,33.83045361328125,3.383045361328125,0.0016382533517189726,3.382980712890625,3.38504169921875,3.38539853515625,3.38568400390625,"[3.38241748046875, 3.381180908203125, 3.38253857421875, 3.38005712890625, 3.3834228515625, 3.38220703125, 3.383595947265625, 3.38575537109375, 3.38431591796875, 3.38496240234375]",tokens/s,18.62227468781775,kWh,9.886225469458334e-05,1.0904857549761307e-05,6.581610820840001e-05,0.0001755832204527447,tokens/kWh,358804.21738223784,,s,630,33.827737373352036,0.05369482122754294,0.0002572514064457032,0.053686574935913084,0.053942374420166014,0.054014467430114746,0.054938772735595706,"[0.05515267181396484, 0.05363302230834961, 0.05338665771484375, 0.05337926483154297, 0.053221630096435546, 0.05333417510986328, 0.053190654754638675, 0.05322137451171875, 0.05331545639038086, 0.053556961059570314, 0.053712512969970705, 0.05363792037963867, 0.05347516632080078, 0.05363235092163086, 0.053555423736572266, 0.05354550552368164, 0.05347651290893555, 0.053695392608642575, 0.05385340881347656, 0.05386076736450195, 0.05361452865600586, 0.05363654327392578, 0.05363328170776367, 0.0534310417175293, 0.053303295135498044, 0.053651454925537106, 0.05348966217041016, 0.05359619140625, 0.05344662475585937, 0.05350400161743164, 0.053713920593261716, 0.05366400146484375, 0.053623550415039065, 0.053698558807373044, 0.05363302230834961, 0.05375503921508789, 0.053668704986572266, 0.05385420989990235, 0.05379072189331055, 0.05373270416259766, 0.05391222381591797, 0.05372854232788086, 0.0536030387878418, 0.053768192291259766, 0.05369241714477539, 0.05376803207397461, 0.053626209259033206, 0.053635902404785156, 0.05377590560913086, 0.053817825317382814, 0.053821376800537106, 0.05400515365600586, 0.05385257720947265, 0.05389132690429688, 0.0539156494140625, 0.053890625, 0.05383612823486328, 0.05412054443359375, 0.054038047790527344, 0.053801441192626955, 0.053766143798828124, 0.05377228927612305, 0.05382495880126953, 0.054830944061279294, 0.05366732788085937, 0.053269153594970704, 0.053364479064941406, 0.05316198348999023, 0.05334041595458985, 0.05325008010864258, 0.053236736297607425, 0.05365244674682617, 0.053596160888671876, 0.053466239929199216, 0.05362163162231445, 0.05352447891235351, 0.053405696868896485, 0.05346051025390625, 0.05355712127685547, 0.05342435073852539, 0.05375734329223633, 0.05364828872680664, 0.05354297637939453, 0.05347532653808594, 0.053512191772460936, 0.0533985595703125, 0.053434398651123045, 0.053315937042236326, 0.053451519012451175, 0.053664768218994144, 0.05358678436279297, 0.05346495819091797, 0.053723262786865233, 0.05363056182861328, 0.053588382720947264, 0.05350934219360352, 0.05367071914672852, 0.05369852828979492, 0.053903392791748043, 0.053778400421142576, 0.05393395233154297, 0.05378060913085937, 0.05378662490844727, 0.053653247833251955, 0.05369987106323242, 0.05363811111450195, 0.05378656005859375, 0.05376387023925781, 0.05377795028686523, 0.05366198348999023, 0.05378841781616211, 0.05356777572631836, 0.05365804672241211, 0.053663105010986326, 0.05391782379150391, 0.05375654220581055, 0.05382131195068359, 0.05379276657104492, 0.05383782577514649, 0.05430006408691406, 0.054153472900390624, 0.053868896484375, 0.05399552154541016, 0.05380204772949219, 0.05395552062988281, 0.05390457534790039, 0.05504355239868164, 0.05392438507080078, 0.053386463165283206, 0.05330614471435547, 0.05312102508544922, 0.05327462387084961, 0.053212383270263675, 0.053334815979003906, 0.053313278198242185, 0.05349811172485352, 0.05349299240112305, 0.05353657531738281, 0.05383468627929688, 0.05346889495849609, 0.053448352813720706, 0.05355916976928711, 0.05354095840454102, 0.05367055892944336, 0.05369651031494141, 0.05378211212158203, 0.05359001541137695, 0.05372560119628906, 0.053569534301757815, 0.05339129638671875, 0.05331155014038086, 0.05362483215332031, 0.05365887832641601, 0.0541124496459961, 0.05382374572753906, 0.05361056137084961, 0.05350016021728515, 0.05361395263671875, 0.05353740692138672, 0.05360435104370117, 0.053767326354980466, 0.05416159820556641, 0.053854881286621095, 0.053997566223144534, 0.0537393913269043, 0.05376422500610351, 0.05371903991699219, 0.053709854125976564, 0.05367907333374024, 0.05364326477050781, 0.053544960021972655, 0.053662750244140626, 0.05378761672973633, 0.05373747253417969, 0.05369369506835937, 0.053771007537841795, 0.053677471160888675, 0.05379945755004883, 0.053704769134521484, 0.0539422721862793, 0.0538869743347168, 0.05385942459106445, 0.05387139129638672, 0.05390742492675781, 0.05372940826416016, 0.053856288909912106, 0.05398323059082031, 0.053856254577636715, 0.053866497039794924, 0.05495568084716797, 0.053563743591308596, 0.05327462387084961, 0.05324390411376953, 0.053340160369873046, 0.053411838531494144, 0.053362144470214846, 0.05336886215209961, 0.05327718353271484, 0.05348556900024414, 0.05338521575927734, 0.05354086303710937, 0.0535530891418457, 0.05355116653442383, 0.053540542602539064, 0.05355756759643555, 0.05343027114868164, 0.05395865631103516, 0.05373747253417969, 0.05372927856445313, 0.05363507080078125, 0.053579776763916016, 0.05333353424072266, 0.05348803329467773, 0.05355023956298828, 0.05362575912475586, 0.053499454498291014, 0.05354691314697266, 0.05346563339233398, 0.05354684829711914, 0.053432289123535155, 0.05355440139770508, 0.05358252716064453, 0.053528190612792965, 0.05346985626220703, 0.05387174224853516, 0.053934974670410155, 0.053855327606201174, 0.05375596618652344, 0.053691104888916014, 0.053536895751953126, 0.05367193603515625, 0.05368627166748047, 0.053788257598876954, 0.05366620635986328, 0.053688030242919925, 0.05355753707885742, 0.05369161605834961, 0.05381324768066406, 0.053828384399414064, 0.05367193603515625, 0.05380422210693359, 0.05370758438110351, 0.05379276657104492, 0.05396889495849609, 0.05383891296386719, 0.053814208984375, 0.05387059020996094, 0.05378224182128906, 0.05392790222167969, 0.05373574447631836, 0.05393561553955078, 0.053800609588623045, 0.05472249603271485, 0.05370383834838867, 0.05342879867553711, 0.05328726577758789, 0.05326847839355469, 0.0533438720703125, 0.053288959503173826, 0.05341632080078125, 0.05352035140991211, 0.053633056640625, 0.05364070510864258, 0.05361305618286133, 0.05340972900390625, 0.05344467163085938, 0.0533988151550293, 0.053365310668945315, 0.05351628875732422, 0.05376015853881836, 0.05374092864990234, 0.053631359100341794, 0.05342972946166992, 0.05356614303588867, 0.05361667251586914, 0.05355718231201172, 0.05362700653076172, 0.053663745880126956, 0.05352243041992188, 0.05367113494873047, 0.053639968872070315, 0.05372108840942383, 0.053856510162353516, 0.05358975982666016, 0.053491519927978515, 0.05353286361694336, 0.05349484634399414, 0.053609375, 0.05357904052734375, 0.05375052642822266, 0.053755489349365235, 0.053921951293945315, 0.05372134399414062, 0.053817344665527345, 0.053733375549316405, 0.05393203353881836, 0.05361996841430664, 0.0544117431640625, 0.053644798278808595, 0.05386483383178711, 0.05403408050537109, 0.05413065719604492, 0.05381814575195312, 0.05401337432861328, 0.05377900695800781, 0.05392176055908203, 0.05382896041870117, 0.05379756927490234, 0.05387059020996094, 0.05397452926635742, 0.05380966567993164, 0.05389926528930664, 0.053997310638427734, 0.053946495056152344, 0.053858432769775394, 0.05504214477539063, 0.0534984016418457, 0.05328630447387695, 0.053279327392578124, 0.053354305267333986, 0.05372537612915039, 0.053381153106689457, 0.05333193588256836, 0.053515743255615235, 0.05364284896850586, 0.053416160583496096, 0.05359872055053711, 0.053637344360351565, 0.05349763107299805, 0.053446880340576174, 0.05352767944335937, 0.053418880462646486, 0.053814624786376955, 0.05389126586914063, 0.05381558227539063, 0.05355868911743164, 0.05365430450439453, 0.05334630584716797, 0.053376575469970704, 0.053457344055175784, 0.05368832015991211, 0.053601375579833986, 0.05368924713134766, 0.0535551986694336, 0.05363097763061524, 0.0535551986694336, 0.05371696090698242, 0.05371292877197266, 0.05360153579711914, 0.05359199905395508, 0.05381808090209961, 0.05381539154052734, 0.05374771118164062, 0.05369241714477539, 0.05376409530639648, 0.053689823150634766, 0.05365151977539063, 0.05355507278442383, 0.0536868782043457, 0.05394636917114258, 0.05384172821044922, 0.05379897689819336, 0.0536736946105957, 0.05358633422851562, 0.05374566268920898, 0.053752864837646484, 0.053856254577636715, 0.05371798324584961, 0.053824928283691405, 0.05386908721923828, 0.05405036926269531, 0.05374003219604492, 0.054086753845214844, 0.053701534271240234, 0.05386751937866211, 0.053836799621582034, 0.05390335845947265, 0.05388006210327148, 0.05498470306396484, 0.05384755325317383, 0.053370784759521485, 0.05333769607543945, 0.05325449752807617, 0.053340576171875, 0.053348190307617185, 0.053344673156738284, 0.05343231964111328, 0.053438465118408204, 0.053605537414550784, 0.053773151397705075, 0.05350400161743164, 0.05340774536132813, 0.053351585388183596, 0.05336147308349609, 0.053558334350585934, 0.05362992095947266, 0.05366294479370117, 0.05380585479736328, 0.053628929138183595, 0.053710849761962894, 0.05365264129638672, 0.053437278747558596, 0.053433345794677733, 0.053648384094238284, 0.05347078323364258, 0.053580223083496095, 0.05365964889526367, 0.05372915267944336, 0.05373759841918945, 0.0536346549987793, 0.05358428955078125, 0.05364227294921875, 0.05361059188842773, 0.05376895904541015, 0.053864574432373045, 0.05367724609375, 0.05371769714355469, 0.05389516830444336, 0.05359580612182617, 0.05361865615844726, 0.05363750457763672, 0.05379904174804687, 0.05375696182250977, 0.053832672119140626, 0.05384185409545898, 0.05378396987915039, 0.05390607833862305, 0.053822559356689455, 0.05375683212280274, 0.054079521179199216, 0.05371030426025391, 0.05385782241821289, 0.05399856185913086, 0.05411635208129883, 0.05387673568725586, 0.05408134460449219, 0.05403871917724609, 0.05389644622802734, 0.05400243377685547, 0.05387468719482422, 0.053972991943359375, 0.055091358184814455, 0.0538603515625, 0.053319679260253904, 0.05336064147949219, 0.05342588806152344, 0.05346128082275391, 0.0534835205078125, 0.05346918487548828, 0.05361004638671875, 0.05365395355224609, 0.05355641555786133, 0.05358675384521484, 0.0535551986694336, 0.05348112106323242, 0.053373279571533205, 0.05346239852905273, 0.053569278717041015, 0.05376633453369141, 0.0538221435546875, 0.053792736053466794, 0.05369244766235352, 0.05380662536621094, 0.05358540725708008, 0.053682239532470706, 0.0537097282409668, 0.053733184814453126, 0.05364550399780273, 0.053732799530029296, 0.053750049591064455, 0.05370399856567383, 0.053701824188232425, 0.05364313507080078, 0.05367798233032227, 0.05377206420898437, 0.05358335876464844, 0.05376073455810547, 0.05401536178588867, 0.05391219329833984, 0.05394432067871094, 0.05384806442260742, 0.053594112396240234, 0.053792415618896486, 0.053671295166015626, 0.05372630310058594, 0.05384342575073242, 0.05381980895996094, 0.05377964782714844, 0.05381817626953125, 0.053682174682617184, 0.05384601593017578, 0.0538419189453125, 0.053755615234375, 0.053672222137451174, 0.053952129364013675, 0.05386240005493164, 0.05400950241088867, 0.053938911437988284, 0.05388224029541016, 0.05387731170654297, 0.05396207809448242, 0.05379699325561523, 0.053952224731445314, 0.05381824111938477, 0.05496422576904297, 0.05350604629516602, 0.05325423812866211, 0.05340467071533203, 0.05343734359741211, 0.05338422393798828, 0.05338211059570312, 0.0534466552734375, 0.05345235061645508, 0.05341843032836914, 0.05334220886230469, 0.05342617416381836, 0.05359795379638672, 0.05356943893432617, 0.05342448043823242, 0.05353472137451172, 0.05351955032348633, 0.053889057159423825, 0.053814048767089846, 0.053866497039794924, 0.05359001541137695, 0.05362483215332031, 0.05354848098754883, 0.053596702575683594, 0.05369244766235352, 0.05371244812011719, 0.053622848510742185, 0.05362521743774414, 0.05360835266113281, 0.05369865417480469, 0.05364940643310547, 0.05361628723144531, 0.05358601760864258, 0.05365526580810547, 0.05360591888427734, 0.053943294525146485, 0.05380300903320313, 0.05382928085327148, 0.053981311798095705, 0.05374780654907227, 0.053758079528808594, 0.05401769638061524, 0.05383980941772461, 0.05369843292236328, 0.053678398132324216, 0.053741695404052735, 0.05386399841308594, 0.0538135986328125, 0.05367417526245117, 0.053916961669921874, 0.05371567916870117, 0.053927745819091794, 0.05371718215942383, 0.053868385314941404, 0.05385555267333984, 0.0538427848815918, 0.05399484634399414, 0.053927745819091794, 0.053989761352539065, 0.054047199249267576, 0.05381907272338867, 0.053948734283447264, 0.054001247406005856, 0.054897377014160156, 0.05348947143554687, 0.05324425506591797, 0.05351174545288086, 0.053403167724609374, 0.053421825408935544, 0.05336716842651367, 0.05370479965209961, 0.05359260940551758, 0.05365305709838867, 0.0538298568725586, 0.053673919677734376, 0.0535513916015625, 0.05349692916870117, 0.05357231903076172, 0.053548641204833984, 0.05343907165527344, 0.05374771118164062, 0.053823486328125, 0.05362073516845703, 0.05345894241333008, 0.053526527404785154, 0.053495807647705076, 0.05351804733276367, 0.053467296600341795, 0.05353279876708984, 0.05353881454467774, 0.05361635208129883, 0.053698848724365235, 0.053902816772460935, 0.05378102493286133, 0.05376204681396484, 0.05370169448852539, 0.053727680206298825, 0.0536165771484375, 0.05388345718383789, 0.05390665435791016, 0.05403510284423828, 0.053685630798339844, 0.05365945434570313, 0.05373139190673828, 0.05383257675170899, 0.05373132705688476, 0.05384729766845703, 0.05365619277954101, 0.05359628677368164, 0.053690174102783206, 0.05394364929199219, 0.053742431640625, 0.05382099151611328, 0.054042945861816405, 0.05396902465820313, 0.053819393157958986, 0.05385526275634766, 0.054051361083984374, 0.05399321746826172, 0.05380940628051758, 0.0538218879699707, 0.05397452926635742, 0.054067745208740234, 0.05378249740600586, 0.05385980987548828, 0.05396089553833008]",tokens/s,18.62376998635106,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,mistral,TencentARC/Mistral_Pro_8B_v0.1,TencentARC/Mistral_Pro_8B_v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 959, in __init__ self.model = MistralModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in __init__ [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 714, in [MistralDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 510, in __init__ self.self_attn = MISTRAL_ATTENTION_CLASSES[config._attn_implementation](config=config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/mistral/modeling_mistral.py"", line 201, in __init__ self.q_proj = nn.Linear(self.hidden_size, self.num_heads * self.head_dim, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 32.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 24.12 MiB is free. Process 96158 has 14.71 GiB memory in use. Of the allocated memory 14.46 GiB is allocated by PyTorch, and 141.44 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-160m,EleutherAI/pythia-160m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.706688,806.289408,0.0,411.041792,391.374848,s,1,7.4192998046875,7.4192998046875,0.0,7.4192998046875,7.4192998046875,7.4192998046875,7.4192998046875,[7.4192998046875],,kWh,5.054497087508025e-06,5.503927119673102e-07,2.0302794020016224e-06,7.635169201476957e-06,,MB,1098.924032,879.689728,0.0,473.956352,454.832128,s,16,0.37611609649658206,0.02350725603103638,0.0003895896859630637,0.0234224796295166,0.02354478359222412,0.023951807975769044,0.024774489402770997,"[0.024980159759521486, 0.023404767990112305, 0.023301408767700194, 0.02340281677246094, 0.023271360397338868, 0.023257152557373047, 0.02345427131652832, 0.023463552474975585, 0.023609024047851562, 0.023480543136596678, 0.023447519302368165, 0.02347452735900879, 0.023395904541015623, 0.02337260818481445, 0.023360288619995118, 0.0234401912689209]",tokens/s,10890.254467046512,kWh,8.355347409166522e-07,9.214411109693993e-08,5.548179041714452e-07,1.4824967561850374e-06,tokens/kWh,172681659.45858395,MB,1120.452608,906.952704,0.0,501.219328,454.834688,s,16,9.710575134277343,0.606910945892334,0.012089048943782812,0.6111069030761719,0.6191018981933594,0.6193922882080078,0.6196740142822266,"[0.6129658203125, 0.6042952880859375, 0.5861619262695312, 0.5873612060546874, 0.59046728515625, 0.5870277709960937, 0.6189288940429688, 0.6136454467773438, 0.61927490234375, 0.6182135620117187, 0.6197444458007813, 0.610319091796875, 0.6175580444335937, 0.6102855224609375, 0.6118947143554687, 0.6024312133789063]",tokens/s,103.80435618502786,kWh,1.734154936766225e-05,1.912482562088808e-06,7.800629956652067e-06,2.7054661886403127e-05,tokens/kWh,2328619.010820532,,s,1008,9.702061608314505,0.009625061119359637,0.0002930521366950281,0.009635200023651123,0.009933778953552247,0.009996521377563477,0.010454401950836181,"[0.009595935821533202, 0.009617247581481934, 0.009658559799194336, 0.00968832015991211, 0.009486175537109376, 0.009703424453735352, 0.009759455680847169, 0.0095, 0.009595552444458007, 0.009773056030273437, 0.009921759605407714, 0.010906399726867676, 0.009795583724975587, 0.009715200424194336, 0.009690752029418945, 0.009709823608398437, 0.009646719932556152, 0.00959488010406494, 0.009988096237182617, 0.009772576332092286, 0.009902560234069824, 0.009748895645141602, 0.00976956844329834, 0.00978006362915039, 0.00966051197052002, 0.009490495681762695, 0.009498335838317872, 0.009705471992492675, 0.01026857566833496, 0.00993727970123291, 0.009926655769348144, 0.009846783638000489, 0.0104017915725708, 0.009987808227539063, 0.0098155517578125, 0.009814335823059081, 0.009681568145751954, 0.009592160224914551, 0.00963811206817627, 0.009708864212036133, 0.009450464248657227, 0.009678815841674805, 0.009659744262695313, 0.00963651180267334, 0.009541824340820312, 0.009458623886108398, 0.009355456352233886, 0.009453472137451171, 0.009627519607543945, 0.009857952117919922, 0.009644351959228515, 0.009650079727172852, 0.009551199913024902, 0.009541728019714356, 0.009773407936096191, 0.009838144302368164, 0.009813983917236329, 0.009648608207702638, 0.009670656204223632, 0.00979270362854004, 0.009621855735778808, 0.009470432281494141, 0.009422975540161133, 0.010043392181396485, 0.010067392349243164, 0.011911935806274414, 0.010933183670043945, 0.009993120193481446, 0.009969823837280274, 0.009827263832092285, 0.00975171184539795, 0.009776288032531739, 0.00984547233581543, 0.010139488220214843, 0.009764415740966797, 0.009422944068908692, 0.009446880340576173, 0.009726847648620605, 0.009952832221984863, 0.009535807609558106, 0.009451647758483887, 0.009485312461853027, 0.009582719802856444, 0.009597824096679688, 0.009531295776367188, 0.009828096389770507, 0.009483712196350098, 0.009544608116149902, 0.009797504425048828, 0.010219967842102051, 0.009595711708068848, 0.009472895622253417, 0.009584799766540527, 0.009500384330749512, 0.009672351837158203, 0.009814687728881836, 0.009842592239379883, 0.009693375587463379, 0.00961731243133545, 0.009320608139038086, 0.009229920387268066, 0.009223872184753418, 0.009292160034179688, 0.009307456016540528, 0.009231040000915527, 0.00930406379699707, 0.009259008407592773, 0.009224287986755371, 0.009274911880493165, 0.009378175735473632, 0.00924403190612793, 0.009240703582763672, 0.009250880241394043, 0.009322943687438965, 0.009514816284179687, 0.009324735641479492, 0.009257216453552247, 0.009189151763916015, 0.009220064163208008, 0.00919753646850586, 0.00925209617614746, 0.009265952110290528, 0.009208928108215332, 0.009225055694580079, 0.009318016052246093, 0.00932476806640625, 0.009152511596679687, 0.00943887996673584, 0.009678879737854003, 0.009345439910888672, 0.009829888343811035, 0.009355584144592284, 0.009468031883239745, 0.009420160293579102, 0.00928553581237793, 0.009267328262329101, 0.009265376091003417, 0.009291423797607421, 0.009310912132263184, 0.009414112091064453, 0.009246560096740722, 0.009224896430969238, 0.00928767967224121, 0.009243935585021972, 0.009230719566345216, 0.00921225643157959, 0.009205375671386719, 0.009164575576782227, 0.009260640144348145, 0.009251775741577149, 0.009193023681640626, 0.009236639976501464, 0.009228416442871094, 0.009269472122192382, 0.009305343627929688, 0.00928223991394043, 0.009220319747924805, 0.009203136444091797, 0.009217568397521973, 0.009234335899353028, 0.009264191627502442, 0.009242591857910156, 0.009243616104125977, 0.009192416191101074, 0.009349087715148925, 0.009387392044067383, 0.009248671531677246, 0.00938595199584961, 0.009239583969116212, 0.00923209571838379, 0.009193120002746582, 0.009314432144165039, 0.009232640266418457, 0.009236767768859863, 0.009205471992492676, 0.009342144012451172, 0.009321311950683593, 0.009342368125915528, 0.009325119972229004, 0.009268320083618165, 0.00927836799621582, 0.009270912170410156, 0.009334976196289063, 0.00920524787902832, 0.009288415908813477, 0.009228256225585938, 0.009314432144165039, 0.009615232467651367, 0.009349151611328125, 0.00907529640197754, 0.00935321617126465, 0.00931430435180664, 0.00943824005126953, 0.009460543632507324, 0.009474047660827637, 0.009586784362792969, 0.009400383949279785, 0.009326592445373535, 0.00935529613494873, 0.009220352172851562, 0.009365216255187988, 0.009666560173034668, 0.009269536018371582, 0.009219807624816894, 0.00932249641418457, 0.009393407821655273, 0.009364224433898926, 0.009260607719421386, 0.009214400291442871, 0.00923363208770752, 0.009272095680236816, 0.009271295547485351, 0.009254560470581055, 0.009283935546875, 0.009248767852783203, 0.00923472023010254, 0.009413536071777345, 0.009356096267700195, 0.009383328437805176, 0.009321056365966796, 0.009390080451965332, 0.00929587173461914, 0.009464960098266601, 0.009462656021118164, 0.009324543952941895, 0.009312255859375, 0.009346847534179687, 0.009400511741638183, 0.009407999992370606, 0.009204256057739257, 0.00920684814453125, 0.009233344078063964, 0.009265279769897461, 0.009559103965759277, 0.009300800323486328, 0.009259008407592773, 0.009242624282836913, 0.009342880249023437, 0.00933897590637207, 0.00926028823852539, 0.009255680084228515, 0.009183232307434081, 0.009153792381286621, 0.00925158405303955, 0.009328767776489258, 0.009185248374938965, 0.009264960289001464, 0.009277536392211913, 0.009252863883972168, 0.009274944305419922, 0.009251423835754394, 0.009246560096740722, 0.00908675193786621, 0.009285887718200684, 0.009311679840087891, 0.009428576469421386, 0.009465888023376464, 0.009413311958312989, 0.009361408233642577, 0.009326432228088379, 0.009369759559631348, 0.009272704124450684, 0.009273152351379394, 0.009345536231994628, 0.009443648338317871, 0.009682304382324218, 0.00957913589477539, 0.010866687774658204, 0.009437184333801269, 0.009347200393676758, 0.009295519828796386, 0.009285856246948242, 0.009312447547912597, 0.009291487693786621, 0.009273440361022948, 0.009254816055297852, 0.009268671989440917, 0.009275808334350585, 0.009394432067871094, 0.009377792358398437, 0.00941004753112793, 0.009274080276489257, 0.009271072387695313, 0.009348575592041015, 0.009357119560241699, 0.009322976112365723, 0.009248640060424805, 0.009269856452941894, 0.009375519752502441, 0.009332991600036622, 0.009227999687194824, 0.009578495979309083, 0.009266528129577636, 0.009265312194824219, 0.00928159999847412, 0.009498944282531738, 0.009390239715576171, 0.009215999603271484, 0.009289728164672852, 0.009399616241455078, 0.009382335662841797, 0.00927667236328125, 0.009245408058166505, 0.009297696113586426, 0.009263615608215332, 0.00931222438812256, 0.009436927795410156, 0.009330400466918945, 0.009327168464660645, 0.00943446445465088, 0.009468255996704101, 0.009410016059875488, 0.009308671951293946, 0.009249247550964356, 0.009307200431823731, 0.00907852840423584, 0.009291775703430176, 0.009310272216796875, 0.009390015602111817, 0.009408415794372559, 0.009343071937561035, 0.009314240455627442, 0.009257023811340331, 0.009233951568603516, 0.009255647659301758, 0.009381728172302246, 0.009220000267028808, 0.009281536102294922, 0.009381631851196288, 0.00947430419921875, 0.009414752006530762, 0.009379712104797364, 0.009289759635925293, 0.009265215873718261, 0.009271231651306153, 0.009453568458557129, 0.00934716796875, 0.009275296211242675, 0.009248767852783203, 0.009281215667724609, 0.009312576293945312, 0.009227392196655273, 0.009233344078063964, 0.00923641586303711, 0.009242624282836913, 0.009367168426513672, 0.009244832038879394, 0.009305855751037598, 0.009247200012207031, 0.009320575714111328, 0.009295743942260742, 0.009340928077697755, 0.009285568237304687, 0.009265215873718261, 0.009234560012817384, 0.0092542724609375, 0.009236991882324219, 0.009303487777709962, 0.009245247840881347, 0.009240575790405273, 0.009344351768493653, 0.009362015724182129, 0.009381695747375489, 0.0092674560546875, 0.009273344039916993, 0.009262432098388671, 0.009212575912475585, 0.009254912376403808, 0.009352383613586425, 0.009234880447387695, 0.009349504470825195, 0.009393407821655273, 0.009487104415893555, 0.00940771198272705, 0.00938649559020996, 0.009382143974304199, 0.00945132827758789, 0.00941267204284668, 0.009422719955444336, 0.009874688148498536, 0.00984233570098877, 0.009905055999755859, 0.010100799560546875, 0.009820159912109374, 0.009754624366760254, 0.009803808212280274, 0.009934720039367675, 0.00997590446472168, 0.009852928161621094, 0.00994099235534668, 0.009751872062683105, 0.010789567947387696, 0.00984607982635498, 0.009918656349182129, 0.00986128044128418, 0.009813952445983886, 0.00966697597503662, 0.009576671600341797, 0.009762656211853028, 0.009985343933105469, 0.00990998363494873, 0.0099334716796875, 0.009839872360229492, 0.009741120338439942, 0.009863360404968262, 0.009777152061462402, 0.009538911819458007, 0.009591456413269042, 0.009760800361633301, 0.010008543968200683, 0.009789440155029297, 0.009641119956970215, 0.009715968132019043, 0.009969728469848633, 0.009904671669006347, 0.009881407737731933, 0.009738528251647949, 0.009743616104125977, 0.009793984413146972, 0.009873439788818359, 0.00992240047454834, 0.00973964786529541, 0.009845120429992676, 0.009763423919677734, 0.009775103569030762, 0.009598591804504394, 0.009484671592712402, 0.009584799766540527, 0.009719776153564454, 0.009797504425048828, 0.009814016342163086, 0.009918463706970216, 0.010140735626220703, 0.009993151664733886, 0.009967231750488281, 0.009863264083862304, 0.009818400382995605, 0.009803744316101074, 0.009582624435424805, 0.009481760025024414, 0.009559647560119629, 0.01043827247619629, 0.010006143569946288, 0.009634559631347656, 0.009580863952636718, 0.009482943534851074, 0.009728223800659179, 0.00961616039276123, 0.009576448440551758, 0.009748576164245605, 0.009754528045654296, 0.00963321590423584, 0.009611840248107911, 0.009512639999389649, 0.009462240219116212, 0.009446368217468262, 0.009718655586242676, 0.009745951652526855, 0.009664511680603028, 0.009598464012145995, 0.009546719551086425, 0.009498016357421875, 0.009451583862304687, 0.009390368461608886, 0.009633440017700196, 0.009744768142700195, 0.009699040412902833, 0.00971622371673584, 0.009573504447937012, 0.009616448402404785, 0.009702336311340333, 0.009892736434936523, 0.009828319549560547, 0.009772928237915039, 0.009695455551147461, 0.009557791709899902, 0.009494848251342773, 0.0095283203125, 0.009503583908081054, 0.009992192268371582, 0.010205183982849121, 0.009803775787353516, 0.009901247978210449, 0.009673184394836425, 0.009715776443481445, 0.00973142433166504, 0.009672991752624512, 0.009713727951049805, 0.00971014404296875, 0.009891712188720703, 0.009902432441711425, 0.009868255615234375, 0.009923328399658203, 0.010332256317138673, 0.009916095733642579, 0.009941087722778321, 0.009920255661010742, 0.009767583847045899, 0.009804736137390137, 0.00978384017944336, 0.009766752243041993, 0.009777664184570312, 0.009646080017089843, 0.009854975700378419, 0.009486047744750977, 0.009804575920104981, 0.009672703742980958, 0.009702912330627441, 0.009640064239501953, 0.009677184104919434, 0.009594911575317383, 0.009574496269226074, 0.009545599937438965, 0.009637344360351562, 0.009893471717834473, 0.009973888397216796, 0.010060832023620606, 0.009762751579284667, 0.00976041603088379, 0.009698559761047363, 0.009523743629455567, 0.009604736328125, 0.009975584030151367, 0.00992972755432129, 0.009787391662597657, 0.009901760101318359, 0.009875776290893555, 0.009764863967895507, 0.009877504348754883, 0.00976416015625, 0.009962176322937012, 0.009623295783996582, 0.009592991828918457, 0.009551551818847656, 0.009655839920043945, 0.010060223579406738, 0.010168191909790038, 0.010455615997314453, 0.010293184280395508, 0.009933216094970703, 0.011004896163940429, 0.010186944007873535, 0.009890303611755372, 0.009822208404541016, 0.009773216247558593, 0.009983839988708497, 0.00994649600982666, 0.009958271980285644, 0.010020223617553711, 0.00994326400756836, 0.00986672019958496, 0.009668831825256347, 0.009726431846618653, 0.00963584041595459, 0.009600031852722167, 0.009851263999938966, 0.009757280349731445, 0.009805824279785156, 0.009811327934265137, 0.009731840133666993, 0.009710399627685547, 0.009834560394287109, 0.009859071731567384, 0.009672703742980958, 0.009719807624816895, 0.009620800018310546, 0.009536479949951172, 0.009924927711486816, 0.00989132785797119, 0.009902239799499512, 0.00986736011505127, 0.00986956787109375, 0.009840640068054199, 0.009651616096496582, 0.009636704444885253, 0.009799424171447755, 0.009844736099243164, 0.009897664070129394, 0.009847040176391602, 0.009802016258239746, 0.009680864334106445, 0.009557248115539551, 0.009607744216918946, 0.009569791793823243, 0.009705984115600585, 0.009799679756164551, 0.009885696411132813, 0.009969663619995118, 0.01028502368927002, 0.009883968353271485, 0.00982755184173584, 0.00987609577178955, 0.009803647994995118, 0.009815520286560058, 0.009794079780578613, 0.009692768096923828, 0.009879263877868652, 0.009917375564575195, 0.009786463737487794, 0.009730367660522462, 0.009704031944274903, 0.009760607719421386, 0.009733375549316406, 0.009673376083374024, 0.00970137596130371, 0.009671680450439453, 0.009750911712646484, 0.009882240295410157, 0.009891839981079101, 0.009926655769348144, 0.009930751800537109, 0.009780799865722656, 0.009773504257202149, 0.009940896034240723, 0.009931967735290528, 0.010032032012939453, 0.009950976371765136, 0.009988351821899414, 0.009924192428588868, 0.009750304222106933, 0.00972662353515625, 0.00976252841949463, 0.009756192207336426, 0.009783295631408692, 0.009796319961547851, 0.0097260160446167, 0.009672479629516601, 0.009625151634216308, 0.009663071632385254, 0.0096278076171875, 0.009656864166259766, 0.00974396800994873, 0.009816415786743164, 0.009833951950073243, 0.009826592445373535, 0.010085856437683106, 0.01001683235168457, 0.01003600025177002, 0.009943231582641602, 0.009804896354675293, 0.009706208229064942, 0.009964768409729003, 0.009718560218811035, 0.009781248092651367, 0.009865216255187988, 0.010059935569763184, 0.00986511993408203, 0.009869248390197753, 0.009765983581542969, 0.00969820785522461, 0.009807871818542481, 0.009645407676696777, 0.009732768058776856, 0.009911616325378418, 0.009956031799316407, 0.009838591575622559, 0.009805824279785156, 0.009906175613403321, 0.009829631805419922, 0.009919327735900879, 0.009916319847106933, 0.009902400016784668, 0.00982323169708252, 0.010006239891052246, 0.00984499168395996, 0.009917344093322754, 0.010024352073669434, 0.010055839538574219, 0.009873824119567871, 0.009860992431640625, 0.009678815841674805, 0.00971782398223877, 0.009768992424011231, 0.009739263534545899, 0.009640128135681152, 0.009645888328552246, 0.00961622428894043, 0.009850784301757813, 0.009716992378234863, 0.009522080421447754, 0.009629695892333985, 0.009953503608703613, 0.009828415870666504, 0.009950559616088867, 0.009920895576477051, 0.009965567588806153, 0.009808992385864258, 0.00967356777191162, 0.00958620834350586, 0.009799391746520995, 0.00973087978363037, 0.009928383827209473, 0.009846847534179687, 0.009527296066284179, 0.009790656089782714, 0.009670559883117675, 0.00948691177368164, 0.009461088180541992, 0.009458527565002442, 0.009591967582702636, 0.009903103828430175, 0.00997920036315918, 0.00986736011505127, 0.009792384147644042, 0.009727456092834473, 0.009768704414367675, 0.009619487762451171, 0.009566720008850099, 0.009587776184082032, 0.009583519935607911, 0.009502655982971191, 0.009680959701538086, 0.009758720397949219, 0.009752415657043456, 0.009748127937316895, 0.009568767547607422, 0.00955388832092285, 0.009469440460205078, 0.009428768157958985, 0.009650943756103515, 0.00972544002532959, 0.009794048309326172, 0.009953280448913575, 0.00999833583831787, 0.009938943862915038, 0.009938336372375489, 0.009729824066162109, 0.009919296264648438, 0.009755680084228516, 0.009671648025512696, 0.009631232261657715, 0.00961996841430664, 0.009662464141845703, 0.009583776473999023, 0.009472064018249512, 0.00951968002319336, 0.009677023887634277, 0.00996771240234375, 0.009801631927490234, 0.009822400093078613, 0.009930527687072754, 0.009678879737854003, 0.009709024429321289, 0.00978764820098877, 0.00963817596435547, 0.009778656005859376, 0.009871904373168944, 0.009703424453735352, 0.009547776222229003, 0.00951523208618164, 0.009572064399719239, 0.009545536041259765, 0.009475520133972168, 0.009401151657104492, 0.00942841625213623, 0.009470399856567382, 0.009504704475402833, 0.009680191993713378, 0.009623807907104492, 0.00953331184387207, 0.009396415710449219, 0.009484671592712402, 0.009615360260009765, 0.009778656005859376, 0.009747039794921876, 0.009885631561279296, 0.00993449592590332, 0.009886207580566407, 0.00998969554901123, 0.00991875171661377, 0.00992204761505127, 0.00982476806640625, 0.00984438419342041, 0.009713919639587402, 0.00964031982421875, 0.009664223670959473, 0.009707103729248047, 0.009703200340270996, 0.009585472106933593, 0.009570112228393554, 0.00941875171661377, 0.009395936012268066, 0.009357600212097169, 0.0097259521484375, 0.009965567588806153, 0.009844127655029298, 0.009667167663574219, 0.009570143699645995, 0.009584223747253418, 0.009840224266052246, 0.011205632209777832, 0.009805215835571288, 0.011686464309692383, 0.009758720397949219, 0.009872384071350097, 0.009802111625671387, 0.009834527969360352, 0.009747039794921876, 0.009660415649414063, 0.009620896339416504, 0.009603967666625976, 0.009686752319335938, 0.009595295906066895, 0.009614687919616699, 0.00988595199584961, 0.009885919570922852, 0.009971487998962402, 0.010090496063232422, 0.009975808143615723, 0.010016768455505372, 0.009874848365783692, 0.009917023658752442, 0.010260479927062988, 0.00986678409576416, 0.009841312408447265, 0.009613056182861329, 0.009680031776428222, 0.009558943748474122, 0.009566240310668946, 0.009944640159606933, 0.009962719917297363, 0.009788928031921386, 0.009909407615661621, 0.009777536392211914, 0.010347071647644044, 0.00962342357635498, 0.009500800132751465, 0.009459487915039063, 0.009586784362792969, 0.00988806438446045, 0.009510368347167968, 0.009646400451660157, 0.010149920463562011, 0.009768768310546875, 0.009682432174682617, 0.009644736289978027, 0.009652223587036133, 0.009596896171569825, 0.009519136428833008, 0.009637887954711915, 0.009711615562438965, 0.00940886402130127, 0.009385631561279298, 0.009439455986022948, 0.009330207824707032, 0.009674592018127441, 0.01010934352874756, 0.01005894374847412, 0.009902912139892578, 0.009905535697937012, 0.009790080070495605, 0.009715104103088379, 0.00973862361907959, 0.009885408401489259, 0.009767104148864746, 0.00968735980987549, 0.009541631698608399, 0.009695072174072266, 0.009814175605773925, 0.009621503829956055, 0.009669983863830566, 0.009956000328063964, 0.0097892484664917, 0.00976095962524414, 0.009565183639526367, 0.009593855857849122, 0.009506272315979005, 0.009421343803405762, 0.009439359664916993, 0.009432064056396485, 0.009431936264038087, 0.009459263801574707, 0.009688544273376464, 0.009898943901062012, 0.009838624000549316, 0.009644031524658203, 0.009453536033630371, 0.00957033634185791, 0.009560288429260254, 0.009435071945190429, 0.009424736022949218, 0.009457759857177735, 0.009454079627990723, 0.009742688179016114, 0.009935968399047852, 0.010724287986755371, 0.009986240386962891, 0.010014495849609374, 0.009878815650939942, 0.009883808135986329, 0.009866175651550292, 0.009821824073791504, 0.009651424407958985, 0.009535296440124512, 0.009546719551086425, 0.009489855766296386, 0.009504735946655274, 0.009435744285583495, 0.009771103858947755, 0.009936800003051758, 0.00972390365600586, 0.00961740779876709, 0.009775103569030762, 0.00970137596130371, 0.009811712265014648, 0.009906432151794434, 0.009761055946350098, 0.009721920013427735, 0.009657088279724122, 0.009710623741149902, 0.009738304138183594, 0.00953286361694336, 0.010322367668151856, 0.010501919746398925, 0.009730400085449219, 0.009576224327087402, 0.009659711837768554, 0.009697919845581054, 0.009689184188842773, 0.00962281608581543, 0.009476832389831543, 0.009308223724365234, 0.009374688148498535, 0.009348064422607423, 0.00959488010406494, 0.009645503997802734, 0.009455679893493652, 0.009574399948120118, 0.00942131233215332, 0.009461503982543945, 0.009394432067871094, 0.009488384246826171, 0.009897983551025391, 0.01001471996307373, 0.010024959564208985, 0.009777376174926758, 0.009917471885681152, 0.009753279685974121, 0.00965180778503418, 0.009503328323364257, 0.009557184219360351, 0.009501376152038575, 0.009440704345703125, 0.00946233558654785, 0.009644031524658203, 0.009865280151367187, 0.009760767936706542, 0.00949836826324463, 0.009394432067871094, 0.009359519958496093, 0.009307871818542481, 0.009514880180358887, 0.009690943717956544, 0.009639424324035644, 0.00956281566619873, 0.009689599990844726, 0.009675935745239258, 0.009586496353149414, 0.009454367637634277, 0.009360639572143555, 0.009288448333740235, 0.00930406379699707, 0.009449472427368164, 0.00929587173461914, 0.009260319709777831, 0.009263808250427247, 0.009195008277893067, 0.009209568023681641, 0.009429823875427247, 0.009452735900878906, 0.009394847869873046, 0.009441439628601073, 0.009596927642822266, 0.00962559986114502, 0.009453696250915527, 0.00934825611114502, 0.00930844783782959, 0.009316576004028321, 0.009537376403808594, 0.009916319847106933, 0.00980016040802002, 0.009843839645385741, 0.009824959754943848, 0.009953472137451172, 0.009904128074645996, 0.009957375526428223, 0.009776224136352539, 0.009560928344726563, 0.009496640205383301, 0.009430399894714355, 0.009455615997314454, 0.009442079544067382, 0.009514847755432128, 0.009721952438354492, 0.009608192443847656, 0.00938486385345459, 0.009518624305725098, 0.00965231990814209, 0.009873791694641114, 0.009743616104125977, 0.00960588836669922, 0.009569952011108399, 0.009538047790527344, 0.009674592018127441, 0.009702848434448243, 0.009605695724487304, 0.009518912315368652, 0.00969536018371582]",tokens/s,103.89544415345289,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-6.7b,EleutherAI/pythia-6.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.316416,14274.199552,0.0,13878.951936,13865.632768,s,1,7.819302734375,7.819302734375,0.0,7.819302734375,7.819302734375,7.819302734375,7.819302734375,[7.819302734375],,kWh,1.4235044483336877e-05,1.5627023503701438e-06,7.607783864008177e-06,2.3405530697715198e-05,,MB,1107.124224,14695.727104,0.0,14289.993728,14237.628416,s,10,13.329096923828125,1.3329096923828128,0.003811522174652249,1.3340584716796875,1.3368260620117187,1.336930487060547,1.3370140270996094,"[1.3257540283203124, 1.330975341796875, 1.328086669921875, 1.3329853515625, 1.3299989013671876, 1.335131591796875, 1.3368028564453125, 1.3357734375, 1.337034912109375, 1.3365538330078126]",tokens/s,192.06102368597422,kWh,3.8972799550415024e-05,4.298243802833858e-06,2.587240958680098e-05,6.914345294004986e-05,tokens/kWh,3702447.435217941,MB,1128.259584,14863.499264,0.0,14457.765888,14416.301056,s,10,39.33003002929687,3.933003002929688,0.003964859828145915,3.931811279296875,3.937063427734375,3.9396120361328126,3.9416509228515624,"[3.9292900390625, 3.932463134765625, 3.9364970703125, 3.936447265625, 3.930498779296875, 3.931708984375, 3.93191357421875, 3.928558837890625, 3.93049169921875, 3.94216064453125]",tokens/s,16.018294405844948,kWh,0.0001149224127012515,1.2676791999453186e-05,7.6365227758797e-05,0.00020396443245950175,tokens/kWh,308877.3823961145,,s,630,39.32551675796513,0.06242145517137316,0.00037507737989959206,0.06243038368225098,0.06289647064208984,0.0630232105255127,0.06322790199279785,"[0.06253164672851562, 0.06211529541015625, 0.06166787338256836, 0.06163679885864258, 0.06149305725097656, 0.06185779190063476, 0.061792255401611325, 0.062056190490722654, 0.062023937225341795, 0.062064319610595706, 0.06189433670043945, 0.06226803207397461, 0.062131263732910155, 0.06236236953735352, 0.0621278076171875, 0.06222438430786133, 0.062076545715332034, 0.06209628677368164, 0.06172243118286133, 0.062066783905029295, 0.06194182586669922, 0.06223664093017578, 0.06215663909912109, 0.06221382522583008, 0.06214912033081055, 0.06258281707763672, 0.06241686248779297, 0.062388225555419924, 0.06253567886352539, 0.062476192474365234, 0.06224700927734375, 0.06213836669921875, 0.06251113510131837, 0.06244121551513672, 0.06227347183227539, 0.06229840087890625, 0.06210332870483398, 0.06232291030883789, 0.06219728088378906, 0.06231216049194336, 0.0623480339050293, 0.06235548782348633, 0.06235337448120117, 0.06272576141357422, 0.06250739288330077, 0.06287753677368164, 0.06268739318847656, 0.0626237449645996, 0.06252339172363282, 0.06269747161865234, 0.06260121536254883, 0.06268918228149414, 0.06289212799072266, 0.06300262451171874, 0.0626954231262207, 0.06273023986816406, 0.06294118499755859, 0.06286342239379883, 0.06265350341796876, 0.06269744110107422, 0.0626956787109375, 0.06331052780151367, 0.06317254257202148, 0.0627589454650879, 0.06217107009887695, 0.06175132751464844, 0.061875743865966795, 0.06171855926513672, 0.06202188873291015, 0.06194764709472656, 0.06188899230957031, 0.062097312927246094, 0.06196620941162109, 0.061835582733154294, 0.061994976043701175, 0.06201068878173828, 0.0622086067199707, 0.06210150527954102, 0.0624005126953125, 0.06249430465698242, 0.06249123382568359, 0.062534912109375, 0.06223516845703125, 0.06198668670654297, 0.06207299041748047, 0.062096607208251955, 0.061991710662841794, 0.062134273529052736, 0.06238534545898437, 0.062284095764160156, 0.0627061767578125, 0.06261721420288086, 0.062495105743408205, 0.06240201568603516, 0.06247481536865234, 0.062342624664306644, 0.06245347213745117, 0.062323486328125, 0.06242303848266602, 0.06231654357910156, 0.06257449722290039, 0.06260131072998047, 0.06250291061401367, 0.062443424224853515, 0.06255814361572265, 0.062476062774658205, 0.06268352127075195, 0.06265568161010743, 0.06262815856933594, 0.06277785491943359, 0.06261324691772462, 0.06281027221679687, 0.06295356750488282, 0.06262924957275391, 0.06260758590698243, 0.062424766540527345, 0.06257123184204101, 0.06254163360595703, 0.062810302734375, 0.06270361709594727, 0.06291177749633789, 0.06300336074829102, 0.06306406402587891, 0.0628427848815918, 0.06287369537353515, 0.06279987335205078, 0.06265711975097656, 0.062024097442626956, 0.061591552734375, 0.06162636947631836, 0.06166883087158203, 0.06176432037353516, 0.06216819381713867, 0.06200697708129883, 0.06205542373657227, 0.062089214324951174, 0.0619716796875, 0.06223676681518555, 0.06245040130615234, 0.062324447631835936, 0.062220542907714844, 0.06241254425048828, 0.06230230331420898, 0.06220611190795899, 0.06226534271240235, 0.062268672943115236, 0.06210790252685547, 0.06226947021484375, 0.06221049499511719, 0.062283008575439454, 0.06230265426635742, 0.062427486419677734, 0.062330848693847654, 0.062443038940429685, 0.06251366424560546, 0.06286288070678711, 0.0627770881652832, 0.0626613426208496, 0.06235340881347656, 0.0629452781677246, 0.06268678283691406, 0.06251142501831054, 0.062255233764648435, 0.06233283233642578, 0.06226953506469726, 0.06270566558837891, 0.062371841430664064, 0.06268710327148437, 0.06260953521728516, 0.06267897415161133, 0.06282451248168945, 0.06287760162353516, 0.0626583366394043, 0.06265884780883789, 0.06318086242675781, 0.06301004791259765, 0.06292144012451172, 0.06283849716186524, 0.06267932891845703, 0.06270975875854493, 0.06251520156860352, 0.06253567886352539, 0.06259097671508788, 0.06289926528930664, 0.06281926345825195, 0.06308169555664063, 0.06313225555419921, 0.06302329635620117, 0.06314384078979492, 0.06244761657714844, 0.06200729751586914, 0.06171984100341797, 0.06178441619873047, 0.06207936096191406, 0.061927425384521485, 0.061908992767333984, 0.061808639526367185, 0.06221593475341797, 0.06214271926879883, 0.062203903198242184, 0.06217523193359375, 0.06214656066894531, 0.06234726333618164, 0.0621030387878418, 0.062280193328857425, 0.06217475128173828, 0.06256684875488282, 0.06223791885375977, 0.062134815216064454, 0.062144577026367186, 0.06213808059692383, 0.062292030334472656, 0.062211872100830075, 0.06230492782592773, 0.06229734420776367, 0.06241321563720703, 0.06245366287231445, 0.06277983856201172, 0.06272617721557618, 0.06254735946655274, 0.062363777160644535, 0.06255212783813477, 0.06247644805908203, 0.06233871841430664, 0.06235168075561524, 0.06225513458251953, 0.06242531204223633, 0.0626237449645996, 0.06254991912841797, 0.06274265670776367, 0.06308822250366211, 0.06275462341308594, 0.06289465713500976, 0.06258483123779297, 0.0627317771911621, 0.0626726417541504, 0.06296243286132812, 0.06285110473632813, 0.06291651153564454, 0.062476192474365234, 0.06287580871582031, 0.062574462890625, 0.06265024185180663, 0.06247862243652344, 0.06241001510620117, 0.06279238510131836, 0.06308031845092774, 0.06308236694335938, 0.06344729614257813, 0.06327267074584961, 0.06302953720092773, 0.06293830490112305, 0.06248716735839844, 0.061853919982910156, 0.061574176788330076, 0.06148342514038086, 0.061434432983398436, 0.06179008102416992, 0.06187417602539062, 0.06204428863525391, 0.06187007904052735, 0.06229811096191406, 0.0620871696472168, 0.06187417602539062, 0.06187606430053711, 0.06210323333740234, 0.062032352447509764, 0.06216198348999023, 0.062073089599609374, 0.06199363327026367, 0.06193532943725586, 0.06197484970092773, 0.06188032150268555, 0.062203903198242184, 0.062161121368408206, 0.0623733139038086, 0.06217536163330078, 0.06244169616699219, 0.062259040832519534, 0.0622573127746582, 0.06250495910644531, 0.06226067352294922, 0.0626099853515625, 0.06249062347412109, 0.0624189453125, 0.0625539207458496, 0.0624554557800293, 0.062497089385986325, 0.06244512176513672, 0.06227769470214844, 0.06232684707641602, 0.06250508880615234, 0.06261183929443359, 0.06266473770141602, 0.06286131286621094, 0.06283417510986328, 0.062687744140625, 0.06282649612426758, 0.06247423934936523, 0.06270156860351563, 0.062382080078125, 0.0626319351196289, 0.0628408317565918, 0.0626698226928711, 0.06259814453125, 0.0630231056213379, 0.06273027038574219, 0.06274003219604492, 0.06276723098754883, 0.0628834571838379, 0.06295804977416992, 0.06299423980712891, 0.06307068634033203, 0.06311312103271484, 0.0630804786682129, 0.06248534393310547, 0.06198006439208984, 0.06156681442260742, 0.06146255874633789, 0.061655040740966796, 0.06171526336669922, 0.061884414672851565, 0.06184755325317383, 0.06218547058105469, 0.06233292770385742, 0.0620052490234375, 0.06206259155273437, 0.06204620742797851, 0.061966335296630856, 0.062081024169921874, 0.06198601531982422, 0.06205721664428711, 0.06218889617919922, 0.061838016510009766, 0.06216640090942383, 0.062163551330566405, 0.06217119979858399, 0.062338943481445315, 0.06232281494140625, 0.0623185920715332, 0.06250492858886719, 0.062322689056396485, 0.06236972808837891, 0.06220751953125, 0.06234163284301758, 0.06273225784301757, 0.06248659133911133, 0.062457088470458985, 0.06250576019287109, 0.0623076171875, 0.06235420989990234, 0.06225017547607422, 0.06233065414428711, 0.062258113861083986, 0.06239849472045898, 0.062416446685791015, 0.06316057586669922, 0.06256454467773437, 0.06282032012939454, 0.06268518447875976, 0.06288179016113281, 0.06274867248535156, 0.06254147338867187, 0.06270985412597656, 0.06280755233764648, 0.06273875045776367, 0.0627388801574707, 0.06266060638427734, 0.06291024017333985, 0.06271958541870117, 0.06270012664794922, 0.06280787277221679, 0.0629065933227539, 0.06285107040405273, 0.06300467300415039, 0.06296371078491211, 0.0632463035583496, 0.06306800079345704, 0.06254748916625977, 0.061964767456054684, 0.06156902313232422, 0.0615997428894043, 0.06167552185058594, 0.06205440139770508, 0.06185574340820312, 0.06188851165771484, 0.06178201675415039, 0.06207692718505859, 0.06224281692504883, 0.06206054306030274, 0.0617696647644043, 0.06210671997070313, 0.06205952072143555, 0.06206870269775391, 0.062099071502685545, 0.06209302520751953, 0.06188304138183594, 0.06235340881347656, 0.062027488708496094, 0.062269729614257814, 0.062115009307861326, 0.06212239837646484, 0.06215107345581055, 0.06235744094848633, 0.062359615325927736, 0.06250495910644531, 0.06226313781738281, 0.06227164840698242, 0.06256435012817382, 0.06257209777832032, 0.06260940933227539, 0.06245833587646484, 0.06230422210693359, 0.06246809768676758, 0.06239187240600586, 0.06272998428344727, 0.062274368286132815, 0.06239779281616211, 0.06267734527587891, 0.06276496124267578, 0.06262607955932617, 0.06299142456054688, 0.06270425415039063, 0.06280838394165039, 0.06264422225952149, 0.06259916687011718, 0.06253158569335937, 0.06265024185180663, 0.06278566360473632, 0.06279782485961914, 0.06291251373291015, 0.06284288024902343, 0.06266831970214844, 0.06286998367309571, 0.06282444763183594, 0.06261967849731445, 0.0628996810913086, 0.06307481765747071, 0.06302931213378907, 0.06315615844726563, 0.0630374412536621, 0.06262937545776368, 0.06205283355712891, 0.06150761413574219, 0.061582847595214846, 0.061667839050292966, 0.061739009857177736, 0.06194796752929688, 0.0619150390625, 0.06198278427124024, 0.06221206283569336, 0.06199926376342774, 0.061978431701660154, 0.061830753326416014, 0.06192377471923828, 0.062089248657226564, 0.06226736068725586, 0.062279232025146486, 0.06219820785522461, 0.0620951042175293, 0.06233321762084961, 0.06200419235229492, 0.06215315246582031, 0.06205209732055664, 0.06231273651123047, 0.06213056182861328, 0.06240480041503906, 0.06223641586303711, 0.06218569564819336, 0.06225449752807617, 0.0626712303161621, 0.06251062393188477, 0.062438079833984375, 0.06248857498168945, 0.06258659362792969, 0.06239056015014648, 0.06226739120483398, 0.06217932891845703, 0.06223257446289063, 0.0621893424987793, 0.06226268768310547, 0.06253855895996094, 0.06280934524536133, 0.0626429443359375, 0.06243328094482422, 0.062814208984375, 0.06268246459960937, 0.06254659271240234, 0.0626237449645996, 0.0626297607421875, 0.06292051315307617, 0.06262406539916993, 0.06265404891967774, 0.06265488052368164, 0.06255523300170898, 0.06289289474487304, 0.06258678436279297, 0.06257676696777344, 0.06269136047363282, 0.06274867248535156, 0.06275276947021484, 0.06283468627929688, 0.06286540985107422, 0.06285830307006836, 0.0626032943725586, 0.062011360168457035, 0.06158940887451172, 0.061611198425292966, 0.06154678344726563, 0.061792896270751956, 0.06193971252441406, 0.06185776138305664, 0.06219782257080078, 0.06217315292358398, 0.062133663177490236, 0.062075328826904294, 0.06202703857421875, 0.062118335723876955, 0.062271041870117186, 0.062171520233154295, 0.062435264587402346, 0.0622023696899414, 0.06203193664550781, 0.062134273529052736, 0.062117889404296876, 0.06203801727294922, 0.06238956832885742, 0.062180030822753904, 0.06218137741088867, 0.062271488189697265, 0.06240995025634766, 0.062187999725341794, 0.062173534393310546, 0.06236569595336914, 0.06268684768676758, 0.06252169418334962, 0.06251625442504882, 0.06269846343994141, 0.06237596893310547, 0.06245782470703125, 0.062182910919189455, 0.062271617889404295, 0.06233945465087891, 0.06215388870239258, 0.06239443206787109, 0.062438175201416014, 0.06271180725097657, 0.0626954231262207, 0.06249676895141602, 0.06252134323120118, 0.06259711837768554, 0.06259097671508788, 0.06256841659545899, 0.06262377548217773, 0.06255007934570313, 0.06274809646606445, 0.06265087890625, 0.06268518447875976, 0.06278553771972656, 0.06267903900146485, 0.06273843383789063, 0.06275686264038086, 0.0629678077697754, 0.06298390579223632, 0.06294742584228516, 0.06331209564208984, 0.06311727905273437, 0.06260208129882812, 0.062222431182861325, 0.06179638290405273, 0.062175201416015624, 0.0620145263671875, 0.06187868881225586, 0.06187593460083008, 0.061975360870361325, 0.062107521057128905, 0.06224860763549805, 0.06231475067138672, 0.06227289581298828, 0.06224163055419922, 0.062320640563964844, 0.06227478408813476, 0.06262156677246093, 0.06252159881591797, 0.062466815948486326, 0.06228518295288086, 0.06224076843261719, 0.06211638259887695, 0.06216022491455078, 0.06221072006225586, 0.062348926544189456, 0.06224307250976562, 0.06250041580200195, 0.06257516860961915, 0.06287360000610352, 0.0626828498840332, 0.06289616012573242, 0.06279180908203125, 0.06262080001831055, 0.06258790588378907, 0.062438465118408205, 0.06229814529418945, 0.06256483078002929, 0.06267129516601562, 0.06263558578491212, 0.06262623977661133, 0.06263759994506836, 0.06262015914916992, 0.06260118484497071, 0.06275683212280274, 0.0628818244934082, 0.06258262252807617, 0.06284243011474609, 0.06312201690673828, 0.0629678077697754, 0.06282793426513672, 0.06294384002685546, 0.06269327926635743, 0.0626707534790039, 0.06270790481567383, 0.0627691535949707, 0.062963134765625, 0.06307020950317382, 0.06292537689208984, 0.0630231056213379, 0.06338150405883788, 0.06318284988403321, 0.0628592643737793, 0.0631009292602539, 0.06325775909423828]",tokens/s,16.020132777337203,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-13b,facebook/opt-13b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-6.7b,facebook/opt-6.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,llama,microsoft/rho-math-1b-v0.1,microsoft/rho-math-1b-v0.1,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,868.225024,2698.903552,0.0,2296.38144,2202.20672,s,1,7.67422705078125,7.67422705078125,0.0,7.67422705078125,7.67422705078125,7.67422705078125,7.67422705078125,[7.67422705078125],,kWh,5.329972345793976e-06,5.711282954124451e-07,1.8050014439913165e-06,7.706102085197738e-06,,MB,1193.754624,2795.372544,0.0,2382.364672,2267.889152,s,10,1.9515198211669922,0.19515198211669924,0.0009073049361826953,0.19517288970947266,0.1960868225097656,0.19666614532470703,0.19712960357666015,"[0.19552272033691406, 0.19458259582519533, 0.19525814819335938, 0.1952208251953125, 0.19430342102050782, 0.19413813781738282, 0.19416546630859374, 0.1959580841064453, 0.19724546813964844, 0.19512495422363282]",tokens/s,1311.7981033208987,kWh,5.89995248850073e-06,6.506518492640652e-07,3.905297568680133e-06,1.0455901906444928e-05,tokens/kWh,24483779.81073099,MB,1242.226688,2795.372544,0.0,2382.364672,2267.891712,s,10,13.482347045898436,1.3482347045898437,0.009345980627398406,1.3458636474609376,1.358974560546875,1.36278330078125,1.36583029296875,"[1.366592041015625, 1.358128173828125, 1.357618896484375, 1.3465054931640625, 1.343995849609375, 1.3401705322265625, 1.3360076904296876, 1.338094970703125, 1.3500115966796875, 1.3452218017578126]",tokens/s,46.72776912322969,kWh,3.966159736232915e-05,4.374376008741711e-06,2.296019614592054e-05,6.69961695169914e-05,tokens/kWh,940352.2687072745,,s,630,13.47601797485352,0.021390504721989705,0.0003820876318656627,0.021343520164489746,0.02166819190979004,0.021806668758392333,0.02317025087356569,"[0.02164735984802246, 0.02148761558532715, 0.021302431106567384, 0.02147823905944824, 0.021393760681152344, 0.021638816833496093, 0.021607936859130858, 0.022017919540405273, 0.021592256546020507, 0.021402015686035156, 0.02126032066345215, 0.02120479965209961, 0.021348480224609376, 0.02396086311340332, 0.02193824005126953, 0.021530464172363283, 0.022100000381469725, 0.021641536712646483, 0.02151683235168457, 0.021490751266479494, 0.021622880935668946, 0.02159056091308594, 0.021500223159790038, 0.02149100875854492, 0.02160211181640625, 0.02150489616394043, 0.021538591384887694, 0.021258464813232424, 0.021401695251464844, 0.02160006332397461, 0.021449920654296874, 0.02148624038696289, 0.02144895935058594, 0.021370655059814454, 0.022085439682006835, 0.021485599517822265, 0.021446880340576173, 0.021663904190063477, 0.0215285758972168, 0.02179817581176758, 0.02151318359375, 0.02165328025817871, 0.02147337532043457, 0.021434240341186524, 0.02164031982421875, 0.021610496520996093, 0.02167078399658203, 0.021598207473754884, 0.02169267272949219, 0.02173516845703125, 0.021509920120239258, 0.021559295654296876, 0.021897119522094728, 0.022868255615234374, 0.023341344833374023, 0.022320768356323243, 0.02164691162109375, 0.021661983489990235, 0.021907743453979493, 0.021585920333862304, 0.02137696075439453, 0.021667903900146484, 0.022147232055664063, 0.02145280075073242, 0.021456640243530275, 0.021551359176635743, 0.021374656677246095, 0.02139910316467285, 0.021324031829833983, 0.02136038398742676, 0.021555551528930662, 0.021367200851440428, 0.02159119987487793, 0.021552032470703125, 0.02140153694152832, 0.021304864883422852, 0.021322208404541014, 0.021700511932373046, 0.021534048080444335, 0.02165017509460449, 0.021435871124267578, 0.021439104080200194, 0.021505952835083008, 0.021485567092895508, 0.021550336837768556, 0.02152115249633789, 0.02154607963562012, 0.021303871154785155, 0.021367136001586913, 0.021215232849121093, 0.02135856056213379, 0.02169398307800293, 0.021471744537353517, 0.021436416625976562, 0.02126028823852539, 0.021643104553222655, 0.021319583892822267, 0.02135862350463867, 0.021573856353759767, 0.0214936637878418, 0.021653472900390627, 0.021502080917358397, 0.021745664596557617, 0.021624128341674806, 0.02153321647644043, 0.021491647720336914, 0.021309663772583008, 0.021396480560302734, 0.021359712600708007, 0.02165135955810547, 0.02207744026184082, 0.02327961540222168, 0.022902496337890627, 0.021465375900268556, 0.021720991134643555, 0.02139689636230469, 0.02144735908508301, 0.021721088409423828, 0.021692384719848634, 0.021573663711547852, 0.02138444709777832, 0.021465856552124022, 0.021411104202270506, 0.02144723129272461, 0.021487775802612304, 0.02184806442260742, 0.021670848846435546, 0.02136835289001465, 0.021240543365478516, 0.021532575607299806, 0.02144595146179199, 0.0213243522644043, 0.02126844787597656, 0.021619808197021483, 0.02147190475463867, 0.021418239593505858, 0.02157529640197754, 0.021741952896118164, 0.021424127578735352, 0.021329919815063478, 0.02165951919555664, 0.021811328887939452, 0.02165760040283203, 0.021563392639160156, 0.021458112716674804, 0.021547840118408202, 0.02151203155517578, 0.021373088836669923, 0.021432319641113282, 0.02171228790283203, 0.02137353515625, 0.021270719528198243, 0.021390783309936524, 0.02127225685119629, 0.021492095947265626, 0.02128518486022949, 0.02120832061767578, 0.021359359741210938, 0.02123356819152832, 0.02141152000427246, 0.021342624664306642, 0.021381120681762695, 0.02137932777404785, 0.021606239318847656, 0.021469247817993163, 0.021405696868896484, 0.021481311798095704, 0.021309440612792968, 0.021753503799438478, 0.024639871597290038, 0.02153059196472168, 0.021395456314086913, 0.02156118392944336, 0.021522592544555665, 0.021609792709350584, 0.021499744415283205, 0.021472063064575195, 0.021589824676513672, 0.02167193603515625, 0.021901792526245117, 0.021589759826660157, 0.021561344146728514, 0.021612543106079102, 0.021830720901489256, 0.02149849510192871, 0.02142617607116699, 0.02143673515319824, 0.021588031768798827, 0.02145692825317383, 0.02173776054382324, 0.021504064559936524, 0.02133033561706543, 0.021776512145996095, 0.02148080062866211, 0.021114816665649416, 0.021267040252685547, 0.0211079044342041, 0.021315967559814453, 0.021166528701782227, 0.02127667236328125, 0.021356544494628905, 0.02146713638305664, 0.021287200927734375, 0.02146646308898926, 0.021303680419921874, 0.021247135162353516, 0.02153331184387207, 0.021475072860717773, 0.02126870346069336, 0.021452192306518555, 0.021470048904418945, 0.021546848297119142, 0.021393440246582032, 0.02122332763671875, 0.02128713607788086, 0.02169856071472168, 0.021417503356933595, 0.02157734489440918, 0.021375936508178713, 0.021274528503417968, 0.021389440536499025, 0.021638463973999024, 0.02126518440246582, 0.02123081588745117, 0.021363264083862306, 0.02253411293029785, 0.021360671997070313, 0.02142207908630371, 0.02119647979736328, 0.021416255950927734, 0.021271839141845703, 0.021284927368164064, 0.02140166473388672, 0.021372703552246092, 0.021421920776367186, 0.02118252754211426, 0.021250175476074218, 0.02132863998413086, 0.02121116828918457, 0.021187807083129884, 0.021208927154541014, 0.02127926445007324, 0.02135843276977539, 0.021292768478393554, 0.021285024642944336, 0.02125619125366211, 0.02122003173828125, 0.02103500747680664, 0.02109244728088379, 0.02135148811340332, 0.021135295867919922, 0.021418304443359376, 0.021679040908813476, 0.0215817928314209, 0.021702655792236326, 0.021682367324829102, 0.021534528732299805, 0.021479232788085938, 0.021647552490234374, 0.02148761558532715, 0.021327360153198242, 0.02134681510925293, 0.021327871322631836, 0.021219104766845704, 0.021225055694580077, 0.021312320709228515, 0.0213656005859375, 0.021408544540405274, 0.02174995231628418, 0.021782527923583983, 0.021501951217651367, 0.021348352432250976, 0.021348480224609376, 0.021215103149414063, 0.021188608169555666, 0.021157888412475585, 0.02119203186035156, 0.021682912826538087, 0.02133328056335449, 0.02129782485961914, 0.021243904113769533, 0.021325536727905273, 0.021223968505859375, 0.021109535217285157, 0.0216135368347168, 0.02124799919128418, 0.021319679260253906, 0.021356351852416994, 0.02143052864074707, 0.02134441566467285, 0.021115936279296876, 0.021209695816040038, 0.021261728286743165, 0.020990175247192384, 0.020961887359619142, 0.02099580764770508, 0.02107209587097168, 0.021239519119262695, 0.021203231811523438, 0.021024768829345702, 0.02126643180847168, 0.021368831634521485, 0.021329919815063478, 0.021061632156372072, 0.02114371109008789, 0.0211691837310791, 0.021543712615966798, 0.021313568115234376, 0.021433855056762697, 0.021220096588134764, 0.021071168899536134, 0.02108870315551758, 0.021146879196166993, 0.02105936050415039, 0.02173807907104492, 0.02151340866088867, 0.021246335983276368, 0.02104537582397461, 0.021122400283813476, 0.021103551864624023, 0.021260480880737304, 0.021163551330566407, 0.02116649627685547, 0.021269407272338867, 0.02116531181335449, 0.021102176666259766, 0.02113065528869629, 0.0211627197265625, 0.02127052879333496, 0.02122547149658203, 0.0213721923828125, 0.02113737678527832, 0.021244319915771484, 0.021266143798828126, 0.021240447998046873, 0.021151744842529296, 0.021037055969238282, 0.020965503692626952, 0.021235584259033203, 0.021165824890136718, 0.021104799270629884, 0.020946752548217772, 0.021010496139526366, 0.021460832595825194, 0.021362239837646485, 0.021102912902832033, 0.02113497543334961, 0.021109920501708984, 0.021530336380004882, 0.0212541446685791, 0.02119702339172363, 0.021114656448364258, 0.02103091239929199, 0.02128678321838379, 0.021501888275146486, 0.02155743980407715, 0.02170275115966797, 0.02148547172546387, 0.021392608642578127, 0.021242271423339842, 0.021346591949462892, 0.021186656951904297, 0.02157788848876953, 0.021237600326538087, 0.02124799919128418, 0.02121667289733887, 0.02111052894592285, 0.021054304122924805, 0.02112828826904297, 0.02256988716125488, 0.021503679275512694, 0.021282527923583986, 0.021215744018554687, 0.02116953659057617, 0.02127846336364746, 0.021388095855712892, 0.021356704711914063, 0.021385215759277345, 0.021418176651000976, 0.021282432556152343, 0.02139583969116211, 0.021685792922973634, 0.02124233627319336, 0.021354496002197267, 0.021149280548095704, 0.02114521598815918, 0.021336864471435547, 0.02106572723388672, 0.021110784530639647, 0.021188608169555666, 0.02106096076965332, 0.02121897506713867, 0.021167104721069335, 0.022173311233520506, 0.021247583389282225, 0.02138377571105957, 0.021409984588623046, 0.021249055862426758, 0.020925504684448242, 0.0210533447265625, 0.020911487579345703, 0.021011072158813475, 0.021054624557495117, 0.021085023880004883, 0.02085273551940918, 0.02090595245361328, 0.02084662437438965, 0.02073574447631836, 0.020855039596557618, 0.02094816017150879, 0.021125343322753905, 0.020855392456054687, 0.02090982437133789, 0.02101091194152832, 0.021054304122924805, 0.021007455825805665, 0.020963167190551756, 0.02084003257751465, 0.020817920684814452, 0.020946880340576172, 0.02123209571838379, 0.021045248031616212, 0.02105251121520996, 0.02107382392883301, 0.021187807083129884, 0.02122137641906738, 0.02122319984436035, 0.02498908805847168, 0.021465696334838868, 0.021271680831909178, 0.021285760879516603, 0.021387264251708983, 0.021202943801879884, 0.021145503997802736, 0.021049535751342774, 0.02097990417480469, 0.021116191864013673, 0.021004735946655275, 0.021017631530761718, 0.021140607833862304, 0.021262367248535155, 0.021372224807739256, 0.02122822380065918, 0.02122956848144531, 0.021174272537231444, 0.021085439682006837, 0.02121548843383789, 0.021133792877197265, 0.021129247665405273, 0.021139104843139647, 0.02115727996826172, 0.021121248245239258, 0.021082176208496093, 0.021217952728271483, 0.021176191329956056, 0.021106271743774413, 0.021045087814331054, 0.022291135787963868, 0.02206105613708496, 0.02129100799560547, 0.0212992000579834, 0.021618688583374023, 0.0212040958404541, 0.021262624740600585, 0.0211494083404541, 0.02119148826599121, 0.021526592254638672, 0.02119385528564453, 0.021050239562988283, 0.0210533447265625, 0.021176671981811522, 0.0211408634185791, 0.02143270492553711, 0.02122764778137207, 0.021221216201782227, 0.021423168182373047, 0.02131657600402832, 0.021184511184692383, 0.02111471939086914, 0.02117545509338379, 0.021081087112426757, 0.0212620792388916, 0.021169408798217774, 0.021231647491455077, 0.021402591705322264, 0.021441631317138672, 0.02117955207824707, 0.021034175872802735, 0.02103334426879883, 0.02119487953186035, 0.02129702377319336, 0.02113475227355957, 0.021003040313720703, 0.02108415985107422, 0.021102592468261717, 0.021286048889160157, 0.021187328338623048, 0.021147743225097656, 0.02093062400817871, 0.02106172752380371, 0.021270303726196288, 0.021243967056274415, 0.021362688064575194, 0.021122880935668945, 0.0218701114654541, 0.02146352005004883, 0.021186559677124024, 0.021147647857666017, 0.02126032066345215, 0.02132371139526367, 0.021392927169799805, 0.02108671951293945, 0.021385215759277345, 0.021419744491577148, 0.02213302421569824, 0.02148486328125, 0.023978687286376952, 0.021235456466674806, 0.02146124839782715, 0.02134124755859375, 0.021076223373413087, 0.021072320938110352, 0.021417280197143555, 0.0212359676361084, 0.021133312225341795, 0.02106595230102539, 0.02097727966308594, 0.021109600067138672, 0.02134752082824707, 0.02114566421508789, 0.021074399948120118, 0.02111417579650879, 0.022088096618652343, 0.023899776458740234, 0.021408767700195314, 0.021440448760986327, 0.02162073516845703, 0.021585920333862304, 0.021362688064575194, 0.021174272537231444, 0.0211878719329834, 0.021103456497192384, 0.021196672439575195, 0.021323263168334963, 0.02110451126098633, 0.021170816421508788, 0.021202943801879884, 0.021155008316040037, 0.0211342716217041, 0.02149068832397461, 0.02148441505432129, 0.02169183921813965, 0.021385311126708984, 0.021481952667236327, 0.021372928619384765, 0.021397504806518555, 0.021391359329223633, 0.021219488143920898, 0.02119049644470215, 0.021226879119873046, 0.021392000198364257, 0.02173695945739746, 0.02195449638366699, 0.02132406425476074, 0.021163967132568358, 0.0212524471282959, 0.021141504287719725, 0.021654207229614256, 0.021307392120361326, 0.021214879989624024, 0.021287263870239256, 0.021309440612792968, 0.021271968841552736, 0.02121379280090332, 0.021374975204467773, 0.021342208862304687, 0.021212192535400392, 0.021256767272949218, 0.021219263076782225, 0.021131744384765627, 0.021315584182739256, 0.02136000061035156, 0.021167007446289063, 0.02110553550720215, 0.02114796829223633, 0.02111952018737793, 0.021216447830200196, 0.021291839599609376, 0.02122265625, 0.021809951782226562, 0.02139289665222168, 0.021436607360839844, 0.021412128448486327, 0.021335296630859375, 0.02136460876464844, 0.02125708770751953, 0.02130534362792969, 0.02109235191345215, 0.021024511337280272, 0.021238016128540038, 0.02128713607788086, 0.021180416107177736, 0.021348127365112303, 0.021542911529541017, 0.02145484733581543, 0.021432319641113282, 0.02141084861755371, 0.021750751495361327, 0.021589599609375, 0.02145871925354004, 0.02142393684387207, 0.021521215438842774, 0.02146873664855957, 0.021448640823364257, 0.021802656173706053, 0.021439615249633788, 0.02144643211364746, 0.02147724723815918, 0.021405759811401366, 0.021325824737548828, 0.02122969627380371, 0.021251968383789063, 0.021379072189331053, 0.021294111251831054, 0.021383615493774415, 0.021223968505859375, 0.021202943801879884, 0.02125209617614746, 0.021415935516357423, 0.021333055496215822]",tokens/s,46.74971502528351,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-2.7b,EleutherAI/pythia-2.7b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,816.427008,6223.233024,0.0,5827.985408,5712.718848,s,1,7.35060791015625,7.35060791015625,0.0,7.35060791015625,7.35060791015625,7.35060791015625,7.35060791015625,[7.35060791015625],,kWh,1.1335553629161647e-05,1.2427354602444235e-06,4.540559187998783e-06,1.7118848277404853e-05,,MB,1105.03936,6430.851072,0.0,6025.117696,5988.31104,s,10,5.266182556152343,0.5266182556152343,0.0033533696184772214,0.5274917602539062,0.5288304748535156,0.5293293914794922,0.5297285247802734,"[0.5171857299804687, 0.5271314697265626, 0.5255582275390625, 0.5278660278320313, 0.52785205078125, 0.5267911376953125, 0.5298283081054688, 0.5287196044921875, 0.52656787109375, 0.52868212890625]",tokens/s,486.12063343858426,kWh,1.5356614839999643e-05,1.6935863205025556e-06,1.0231910963300061e-05,2.7282112123802258e-05,tokens/kWh,9383437.72059543,MB,1126.424576,6514.737152,0.0,6109.003776,6092.423168,s,10,18.62453515625,1.8624535156250002,0.0026194074933142417,1.8625668945312501,1.8650244995117187,1.8663476379394532,1.8674061486816407,"[1.8676707763671876, 1.86459619140625, 1.859385498046875, 1.8589068603515626, 1.860238037109375, 1.8620662841796876, 1.86473046875, 1.8632889404296875, 1.8605845947265625, 1.8630675048828125]",tokens/s,33.82634759550416,kWh,5.470010627666928e-05,6.032732448828807e-06,3.625929289629987e-05,9.699213162179796e-05,tokens/kWh,649537.224789082,,s,630,18.621239629745485,0.029557523221818226,0.0002972871238932349,0.029545472145080565,0.029829120635986327,0.029975936222076418,0.03106435123443604,"[0.03123980712890625, 0.030093664169311522, 0.029759103775024415, 0.02931350326538086, 0.029312448501586916, 0.02931052780151367, 0.029238176345825196, 0.0293021125793457, 0.02966793632507324, 0.029489120483398436, 0.029528160095214844, 0.029437952041625977, 0.02928438377380371, 0.029335519790649415, 0.029214719772338867, 0.029234560012817382, 0.029180543899536133, 0.02935603141784668, 0.029238847732543944, 0.029530559539794922, 0.029437952041625977, 0.029650943756103516, 0.030299264907836913, 0.029453184127807615, 0.02954649543762207, 0.029711648941040038, 0.029430496215820313, 0.02941043281555176, 0.029365119934082032, 0.029321216583251954, 0.029754560470581056, 0.02950227165222168, 0.029672639846801758, 0.029654848098754884, 0.02979532814025879, 0.029829120635986327, 0.02959116744995117, 0.029962623596191406, 0.029722528457641603, 0.02966102409362793, 0.0296646728515625, 0.02979702377319336, 0.029825183868408205, 0.02982048034667969, 0.02979478454589844, 0.029757312774658203, 0.029504928588867187, 0.029434207916259766, 0.029677919387817383, 0.029751327514648436, 0.029405183792114258, 0.029521791458129883, 0.029572544097900392, 0.029698751449584962, 0.029912288665771485, 0.029696352005004884, 0.029651039123535155, 0.030071136474609374, 0.030113792419433592, 0.029861888885498046, 0.03012403106689453, 0.029782112121582032, 0.030015487670898438, 0.031291391372680666, 0.0302838077545166, 0.029650911331176758, 0.02939289665222168, 0.029255680084228516, 0.029275264739990235, 0.029143936157226564, 0.029147136688232423, 0.029216768264770508, 0.029492639541625978, 0.02929315185546875, 0.029251071929931642, 0.02932793617248535, 0.029304447174072264, 0.029268287658691407, 0.02928963279724121, 0.029251840591430663, 0.029573728561401367, 0.02946832084655762, 0.029384384155273436, 0.029442720413208008, 0.029511295318603515, 0.02957548713684082, 0.029345855712890626, 0.029420576095581054, 0.02939798355102539, 0.029380640029907226, 0.02952739143371582, 0.029395360946655274, 0.02963587188720703, 0.029402048110961913, 0.02954982376098633, 0.029772544860839845, 0.030074880599975585, 0.02993312072753906, 0.029774080276489256, 0.029765312194824218, 0.029778144836425782, 0.029794591903686524, 0.02962985610961914, 0.029843488693237306, 0.029688095092773436, 0.029591840744018556, 0.029642751693725586, 0.029638656616210936, 0.0297205753326416, 0.029634559631347656, 0.02953798484802246, 0.029428031921386717, 0.02968780708312988, 0.029706239700317383, 0.02970419120788574, 0.029681024551391603, 0.02964134407043457, 0.029714431762695313, 0.02998886489868164, 0.02958745574951172, 0.029742975234985352, 0.029710464477539063, 0.029814687728881836, 0.02962441635131836, 0.029609983444213867, 0.029659135818481445, 0.031364992141723634, 0.030175071716308594, 0.029565088272094725, 0.029237119674682618, 0.029116416931152345, 0.02909401512145996, 0.029038591384887694, 0.029128704071044922, 0.02931711959838867, 0.029304800033569337, 0.02952400016784668, 0.029266176223754884, 0.02921651268005371, 0.029190143585205077, 0.029216768264770508, 0.029212671279907225, 0.0291778564453125, 0.029427711486816405, 0.029310976028442383, 0.029279392242431642, 0.029344608306884765, 0.029425664901733397, 0.02947465515136719, 0.029384864807128906, 0.029349279403686524, 0.02930134391784668, 0.02930073547363281, 0.029294591903686523, 0.029398752212524415, 0.029543872833251952, 0.02957542419433594, 0.029645248413085936, 0.029735071182250977, 0.029857791900634766, 0.029857696533203124, 0.02988607978820801, 0.029718175888061523, 0.029610815048217772, 0.02951795196533203, 0.029519359588623048, 0.029373823165893556, 0.029500640869140626, 0.02950918388366699, 0.0295218563079834, 0.02955606460571289, 0.029524927139282228, 0.029665279388427734, 0.0294946231842041, 0.02952668762207031, 0.029593599319458007, 0.029605024337768553, 0.029560928344726563, 0.02965376091003418, 0.02953011131286621, 0.029550592422485353, 0.029478368759155275, 0.02959414482116699, 0.02960588836669922, 0.029568960189819336, 0.02961414337158203, 0.029752960205078127, 0.029723007202148436, 0.02967331123352051, 0.03100652885437012, 0.029876415252685546, 0.029519744873046875, 0.02930086326599121, 0.029236831665039063, 0.029284448623657228, 0.029288768768310547, 0.029230464935302736, 0.029237184524536133, 0.02929475212097168, 0.029342239379882812, 0.029247488021850586, 0.029351936340332032, 0.02918956756591797, 0.0291777286529541, 0.029239999771118165, 0.029250783920288084, 0.029338399887084962, 0.029269983291625976, 0.029337631225585938, 0.02918364715576172, 0.029255647659301758, 0.029260160446166993, 0.029254976272583007, 0.0292872314453125, 0.02935385513305664, 0.029417152404785155, 0.02935753631591797, 0.029416288375854492, 0.0294168643951416, 0.02959823989868164, 0.030027999877929687, 0.029759328842163087, 0.029698047637939453, 0.029797792434692383, 0.029878879547119142, 0.029638656616210936, 0.029663232803344725, 0.02955264091491699, 0.02958950424194336, 0.029490976333618163, 0.029546911239624024, 0.02951968002319336, 0.029634176254272462, 0.029423999786376952, 0.029462528228759766, 0.029378559112548826, 0.029365951538085938, 0.029335872650146484, 0.029408607482910156, 0.029678239822387695, 0.029817951202392577, 0.029723072052001955, 0.029770111083984373, 0.029704288482666017, 0.029566719055175782, 0.029573312759399416, 0.02957948875427246, 0.029576448440551757, 0.02954204750061035, 0.02964371109008789, 0.02959974479675293, 0.029824607849121092, 0.030799072265625, 0.02980944061279297, 0.029378400802612305, 0.02924995231628418, 0.02919808006286621, 0.029224960327148438, 0.029270015716552734, 0.0293703670501709, 0.0293253116607666, 0.029347328186035155, 0.02927462387084961, 0.029212671279907225, 0.02940835189819336, 0.029295520782470705, 0.029220767974853516, 0.029173856735229493, 0.029187999725341796, 0.02925168037414551, 0.029181951522827147, 0.02933977508544922, 0.029207904815673827, 0.029323808670043944, 0.029344768524169923, 0.029274560928344726, 0.02928611183166504, 0.029478944778442383, 0.029307104110717772, 0.02960223960876465, 0.029628576278686522, 0.029541824340820314, 0.029673120498657227, 0.02952272033691406, 0.02971558380126953, 0.029692928314208986, 0.02971238327026367, 0.029676992416381835, 0.029681440353393554, 0.029755807876586913, 0.029501823425292967, 0.029607936859130858, 0.02959062385559082, 0.029547168731689454, 0.029645055770874024, 0.029792255401611328, 0.029620223999023438, 0.029585535049438477, 0.029525888442993163, 0.029518848419189454, 0.029502464294433595, 0.029687007904052733, 0.02963462448120117, 0.029596000671386718, 0.029440383911132812, 0.029632448196411133, 0.029568544387817385, 0.029694528579711915, 0.029650911331176758, 0.02976723289489746, 0.029716447830200197, 0.029723104476928712, 0.029822975158691405, 0.02978611183166504, 0.029779136657714842, 0.03121183967590332, 0.03016694450378418, 0.029497440338134766, 0.02939388847351074, 0.02922300720214844, 0.02927299118041992, 0.02936832046508789, 0.029469919204711915, 0.029307680130004884, 0.029218368530273438, 0.02916806411743164, 0.0291409912109375, 0.029237247467041014, 0.029373472213745117, 0.02939523124694824, 0.029214719772338867, 0.02924799919128418, 0.029243167877197267, 0.029301151275634766, 0.029332735061645507, 0.029295295715332032, 0.029194303512573242, 0.029262943267822264, 0.029403968811035155, 0.029406848907470702, 0.029581632614135742, 0.029376640319824218, 0.029392927169799805, 0.029533599853515623, 0.029549152374267577, 0.029582815170288088, 0.02957776069641113, 0.029577215194702147, 0.029904064178466798, 0.02993235206604004, 0.029859071731567384, 0.02971468734741211, 0.02971820831298828, 0.02969385528564453, 0.029622623443603516, 0.029655616760253908, 0.029677536010742186, 0.029589536666870118, 0.02955673599243164, 0.029671424865722655, 0.02967046356201172, 0.029623231887817382, 0.029676959991455077, 0.029589151382446287, 0.029673759460449218, 0.02955740737915039, 0.029548479080200196, 0.029523359298706055, 0.02970419120788574, 0.02959552001953125, 0.02959974479675293, 0.029591487884521483, 0.029707103729248046, 0.029650943756103516, 0.029663232803344725, 0.029640703201293944, 0.029665279388427734, 0.02994790458679199, 0.031087968826293947, 0.03007535934448242, 0.029534591674804687, 0.02938012886047363, 0.02923356819152832, 0.029173824310302736, 0.029220224380493164, 0.02917849540710449, 0.029082944869995117, 0.029181663513183593, 0.029395744323730467, 0.029335264205932618, 0.029310976028442383, 0.029612096786499023, 0.029304479598999022, 0.029360895156860353, 0.029370431900024415, 0.029519807815551757, 0.029470720291137696, 0.02940880012512207, 0.029353696823120116, 0.02947052764892578, 0.02948601531982422, 0.029476863861083984, 0.02946227264404297, 0.02934982490539551, 0.02941276741027832, 0.029491167068481445, 0.029496063232421876, 0.029532384872436524, 0.02962784004211426, 0.02964739227294922, 0.02977984046936035, 0.029792383193969728, 0.029949951171875, 0.030045824050903322, 0.029994464874267577, 0.029977088928222657, 0.029831584930419923, 0.029569055557250975, 0.029677536010742186, 0.029697439193725587, 0.029565311431884764, 0.02966927909851074, 0.029509248733520507, 0.02956972885131836, 0.029517824172973633, 0.029587039947509764, 0.029468095779418946, 0.029522911071777343, 0.029605728149414062, 0.02965315246582031, 0.02958896064758301, 0.029678112030029298, 0.029775808334350586, 0.029788127899169924, 0.02968502426147461, 0.029637439727783203, 0.029717632293701172, 0.029637504577636718, 0.02994790458679199, 0.02997452735900879, 0.029928831100463866, 0.031112192153930664, 0.02998159980773926, 0.029485151290893553, 0.02937651252746582, 0.029343744277954102, 0.029326944351196288, 0.029262208938598634, 0.029322496414184572, 0.029220863342285155, 0.029161663055419923, 0.02926838493347168, 0.029343936920166017, 0.029327199935913085, 0.0292475528717041, 0.029377759933471678, 0.029299455642700194, 0.029251712799072266, 0.02929840087890625, 0.029220863342285155, 0.029360416412353516, 0.030070783615112305, 0.0293703670501709, 0.02963852882385254, 0.029519775390625, 0.0296080322265625, 0.02950752067565918, 0.029534400939941405, 0.029593088150024413, 0.029485504150390626, 0.029362239837646485, 0.029406368255615236, 0.02982793617248535, 0.029873695373535156, 0.02983500862121582, 0.02966912078857422, 0.029721567153930664, 0.029515104293823244, 0.02961193656921387, 0.02956159973144531, 0.02953011131286621, 0.029519872665405275, 0.029568864822387696, 0.029436063766479493, 0.029437856674194338, 0.02967955207824707, 0.029578592300415037, 0.02971321678161621, 0.029633983612060547, 0.029636703491210937, 0.029701663970947267, 0.029619136810302735, 0.029679616928100585, 0.02962403106689453, 0.029741344451904298, 0.029716480255126954, 0.029670495986938477, 0.029696800231933593, 0.029702207565307618, 0.029652223587036133, 0.029772480010986327, 0.029691520690917968, 0.029858047485351563, 0.029806848526000976, 0.031124223709106447, 0.030045759201049804, 0.02960633659362793, 0.0293621768951416, 0.029328895568847657, 0.029204992294311522, 0.029104320526123047, 0.029138751983642578, 0.029261823654174804, 0.029239295959472656, 0.02934377670288086, 0.02923491287231445, 0.029149440765380858, 0.029267967224121092, 0.029310400009155274, 0.029254207611083983, 0.02921062469482422, 0.02933964729309082, 0.029272064208984375, 0.029214208602905273, 0.02922694396972656, 0.029364479064941405, 0.02941574478149414, 0.02934988784790039, 0.0294071044921875, 0.029491327285766603, 0.02954444885253906, 0.02952351951599121, 0.029676095962524414, 0.029495168685913085, 0.02955459213256836, 0.029669471740722656, 0.029718528747558592, 0.029829120635986327, 0.029702144622802733, 0.029898719787597658, 0.02986358451843262, 0.029700128555297852, 0.02950793647766113, 0.029493247985839844, 0.029435808181762696, 0.029511775970458985, 0.029441696166992187, 0.029618528366088866, 0.029526016235351563, 0.029620031356811523, 0.029521663665771483, 0.029565376281738283, 0.029572544097900392, 0.029684127807617186, 0.02950364875793457, 0.02961750411987305, 0.029524639129638673, 0.02953763198852539, 0.029554847717285157, 0.029608448028564452, 0.029613567352294923, 0.02961609649658203, 0.029682207107543945, 0.029634559631347656, 0.029569023132324217, 0.02993561553955078, 0.02993270492553711, 0.030819936752319334, 0.02989481544494629, 0.029397247314453125, 0.029251552581787108, 0.02933660888671875, 0.02938982391357422, 0.029271263122558594, 0.029170272827148437, 0.02920822334289551, 0.02923980712890625, 0.02923936080932617, 0.029282272338867186, 0.029395040512084962, 0.029267871856689453, 0.029279935836791993, 0.02921504020690918, 0.02913443183898926, 0.029301151275634766, 0.02936835289001465, 0.029393983840942384, 0.029416351318359374, 0.029451583862304686, 0.029457088470458984, 0.029353599548339843, 0.029342079162597658, 0.029433088302612306, 0.029470624923706053, 0.029588287353515624, 0.029618207931518554, 0.029620223999023438, 0.029693151473999025, 0.02974799919128418, 0.029954048156738283, 0.030062591552734375, 0.030099456787109374, 0.029988447189331056, 0.0298721923828125, 0.029661184310913087, 0.02955094337463379, 0.029414432525634766, 0.02942460823059082, 0.029503328323364258, 0.029670751571655274, 0.029608768463134767, 0.029612031936645508, 0.029589056015014648, 0.029685983657836913, 0.029621631622314452, 0.029530975341796876, 0.029726240158081056, 0.029606367111206056, 0.02965456008911133, 0.02953878402709961, 0.02976095962524414, 0.029674047470092772, 0.02977382469177246, 0.029826271057128907, 0.02976028823852539, 0.02957107162475586, 0.029609983444213867, 0.029665279388427734, 0.029892608642578124, 0.02977382469177246]",tokens/s,33.832334072627525,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen2-beta-72B,Qwen/Qwen2-beta-72B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 1087, in __init__ self.model = Qwen2Model(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in __init__ [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 870, in [Qwen2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 659, in __init__ self.mlp = Qwen2MLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/qwen2/modeling_qwen2.py"", line 271, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 54.12 MiB is free. Process 88322 has 14.69 GiB memory in use. Of the allocated memory 14.57 GiB is allocated by PyTorch, and 1.40 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-0.5B,Qwen/Qwen1.5-0.5B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,836.345856,1903.099904,0.0,1507.852288,1469.840384,s,1,7.574193359375,7.574193359375,0.0,7.574193359375,7.574193359375,7.574193359375,7.574193359375,[7.574193359375],,kWh,9.962895483340617e-06,1.0918203991698297e-06,4.525003619984536e-06,1.5579719502494982e-05,,MB,1136.771072,1945.042944,0.0,1539.309568,1426.272256,s,10,0.8855196151733398,0.08855196151733398,0.0016090846363628177,0.08803173065185546,0.08919951705932616,0.09123092689514159,0.09285605476379395,"[0.09326233673095703, 0.08814620971679688, 0.08739705657958985, 0.08817842864990234, 0.08787843322753906, 0.08791725158691406, 0.0877531509399414, 0.08840086364746094, 0.08874809265136718, 0.0878377914428711]",tokens/s,2890.9579823354698,kWh,2.9496639118685594e-06,3.252961394549608e-07,1.9088932218383453e-06,5.183853273161865e-06,tokens/kWh,49384113.80688136,MB,1157.967872,1953.431552,0.0,1547.698176,1426.274816,s,10,14.043275268554689,1.404327526855469,0.012428507222118449,1.4076810913085938,1.4179456909179688,1.4183011535644532,1.4185855236816407,"[1.393094970703125, 1.39147119140625, 1.38444921875, 1.4186566162109375, 1.41786669921875, 1.41556591796875, 1.408018310546875, 1.4160279541015626, 1.4073438720703124, 1.390780517578125]",tokens/s,44.86132956538127,kWh,4.052118441813273e-05,4.469144923210777e-06,2.0030318773961722e-05,6.502064811530524e-05,tokens/kWh,968922.9779481451,,s,630,14.040806724548343,0.02228699480087038,0.00039382153057359665,0.022294223785400392,0.022652210426330564,0.022793475914001465,0.023854936161041264,"[0.02276905632019043, 0.022347488403320313, 0.022135679244995116, 0.02208131217956543, 0.021897472381591798, 0.022273632049560548, 0.022090112686157227, 0.02188697624206543, 0.02225766372680664, 0.022239231109619142, 0.022173696517944336, 0.021958656311035156, 0.02175103950500488, 0.021725631713867186, 0.021708959579467772, 0.021919904708862306, 0.022388736724853517, 0.022035743713378905, 0.021906400680541994, 0.0221910400390625, 0.021979135513305666, 0.022092607498168944, 0.022089279174804688, 0.02203487968444824, 0.022585344314575196, 0.021990560531616212, 0.02237696075439453, 0.022356063842773437, 0.022300928115844727, 0.022046720504760742, 0.02191708755493164, 0.022030208587646486, 0.02173103904724121, 0.021926912307739257, 0.02194755172729492, 0.021863264083862306, 0.021915456771850587, 0.02210361671447754, 0.02218422317504883, 0.02212284851074219, 0.022164928436279298, 0.02228486442565918, 0.022417407989501953, 0.022269952774047853, 0.02232729530334473, 0.02270412826538086, 0.02224947166442871, 0.022744287490844728, 0.02202908706665039, 0.02183932876586914, 0.022123039245605467, 0.022337440490722657, 0.02191574478149414, 0.022032384872436524, 0.02206924819946289, 0.021914655685424805, 0.021926816940307618, 0.022188095092773436, 0.021849119186401367, 0.021767135620117187, 0.02208358383178711, 0.022188032150268554, 0.022099967956542968, 0.02285206413269043, 0.022132736206054687, 0.02199510383605957, 0.022054399490356445, 0.02241219139099121, 0.023459487915039063, 0.02306083106994629, 0.022795808792114257, 0.022185951232910155, 0.022089759826660157, 0.0220164794921875, 0.021870559692382812, 0.022064735412597656, 0.02198512077331543, 0.021995391845703125, 0.022073663711547852, 0.022385055541992188, 0.02206924819946289, 0.021943935394287108, 0.021918079376220704, 0.021815296173095702, 0.02389606475830078, 0.022386688232421875, 0.021947872161865233, 0.022007904052734374, 0.021940671920776367, 0.02218943977355957, 0.02193471908569336, 0.022420608520507812, 0.022192640304565428, 0.021930368423461914, 0.021805055618286134, 0.02172313690185547, 0.02189107131958008, 0.022336736679077148, 0.02225027275085449, 0.02271353530883789, 0.02220524787902832, 0.022034431457519533, 0.0221276798248291, 0.021735647201538085, 0.021674463272094727, 0.021745759963989256, 0.02195804786682129, 0.021891359329223634, 0.02196726417541504, 0.021741632461547852, 0.021600255966186522, 0.022179840087890625, 0.02168828773498535, 0.021672128677368164, 0.022060895919799806, 0.022136831283569337, 0.021991424560546875, 0.02185215950012207, 0.02182963180541992, 0.02215116882324219, 0.02192915153503418, 0.021862432479858397, 0.021592031478881835, 0.02157651138305664, 0.02167807960510254, 0.02163711929321289, 0.022790624618530275, 0.021755903244018555, 0.021681184768676757, 0.02170159912109375, 0.021684415817260744, 0.02221558380126953, 0.021738399505615236, 0.022054912567138672, 0.021784000396728516, 0.0218670711517334, 0.021940223693847655, 0.021851680755615235, 0.02163145637512207, 0.02167398452758789, 0.02165551948547363, 0.021503679275512694, 0.02154924774169922, 0.021536928176879883, 0.021546464920043945, 0.021635488510131837, 0.02168025588989258, 0.021616544723510742, 0.021865760803222656, 0.021982303619384767, 0.02193142318725586, 0.02173129653930664, 0.021635168075561522, 0.021795360565185547, 0.021614240646362304, 0.021717023849487305, 0.021874208450317383, 0.02236400032043457, 0.022362783432006837, 0.022111583709716796, 0.021967519760131837, 0.022132736206054687, 0.02184601593017578, 0.021690336227416993, 0.021731359481811523, 0.02166281509399414, 0.021746591567993166, 0.021621824264526367, 0.021638015747070312, 0.02162486457824707, 0.021712703704833983, 0.021663808822631837, 0.02174991989135742, 0.021768192291259765, 0.021819263458251952, 0.02205299186706543, 0.02209382438659668, 0.022302047729492187, 0.022604448318481445, 0.02250102424621582, 0.023230815887451173, 0.02395132827758789, 0.022502559661865235, 0.02249817657470703, 0.022576223373413085, 0.022393280029296875, 0.022394336700439454, 0.022252544403076172, 0.022376447677612304, 0.023058080673217775, 0.022436511993408202, 0.022483104705810546, 0.022625600814819336, 0.022198368072509765, 0.022518367767333985, 0.022394880294799805, 0.022455808639526367, 0.022469120025634767, 0.022617984771728515, 0.022524032592773437, 0.022269952774047853, 0.02224127960205078, 0.02234163284301758, 0.022216512680053712, 0.022230239868164064, 0.022352863311767578, 0.022339584350585938, 0.02227609634399414, 0.02229862403869629, 0.02225152015686035, 0.02217532730102539, 0.022220544815063477, 0.022163103103637696, 0.02253926467895508, 0.022458368301391602, 0.023175071716308594, 0.023226335525512697, 0.022761152267456054, 0.022294111251831054, 0.02233942413330078, 0.02265190315246582, 0.02249728012084961, 0.022640159606933594, 0.022714847564697264, 0.025616384506225585, 0.02323420715332031, 0.022290239334106444, 0.02211484718322754, 0.022394880294799805, 0.022388736724853517, 0.022412960052490234, 0.022149471282958983, 0.022260896682739256, 0.022500160217285157, 0.02245430374145508, 0.022490432739257812, 0.023119647979736327, 0.022823423385620118, 0.022329727172851564, 0.02251094436645508, 0.022325952529907225, 0.022376447677612304, 0.02256889533996582, 0.02236422348022461, 0.022237184524536133, 0.02232294464111328, 0.022538496017456056, 0.02231648063659668, 0.0224835205078125, 0.02234761619567871, 0.022263967514038085, 0.022730752944946288, 0.022818559646606444, 0.022527711868286133, 0.022376991271972655, 0.022614015579223632, 0.022361471176147462, 0.022319616317749022, 0.02242777633666992, 0.024313343048095702, 0.024013311386108398, 0.02243315124511719, 0.0225798397064209, 0.022568960189819336, 0.022339040756225587, 0.022268447875976562, 0.022709760665893555, 0.022370336532592773, 0.022385120391845703, 0.022476383209228516, 0.022256032943725586, 0.022621728897094726, 0.022294336318969727, 0.022172319412231446, 0.022203744888305663, 0.022608543395996095, 0.022318304061889647, 0.022270751953125, 0.022501375198364256, 0.022623584747314452, 0.02252457618713379, 0.022332704544067383, 0.02242972755432129, 0.02259424018859863, 0.02265497589111328, 0.022255903244018556, 0.02257891273498535, 0.02246451187133789, 0.022420896530151366, 0.022642463684082032, 0.022350175857543945, 0.022372800827026366, 0.022321056365966797, 0.02235331153869629, 0.022323776245117187, 0.022300479888916015, 0.022298976898193358, 0.022301984786987306, 0.022430431365966796, 0.02242870330810547, 0.023782367706298827, 0.022384639739990234, 0.022468608856201173, 0.022405120849609376, 0.022693727493286135, 0.02232048034667969, 0.022401599884033202, 0.022456575393676757, 0.02232953643798828, 0.022341407775878907, 0.0222761287689209, 0.022481983184814452, 0.0222379207611084, 0.022356191635131837, 0.022468128204345704, 0.02289449691772461, 0.022697216033935548, 0.022674272537231446, 0.02261724853515625, 0.022661983489990236, 0.02253824043273926, 0.022392704010009767, 0.022351200103759766, 0.022364959716796876, 0.02268569564819336, 0.022662559509277345, 0.022364160537719727, 0.022413375854492188, 0.022536447525024414, 0.022421728134155272, 0.022339136123657226, 0.022247936248779295, 0.022237184524536133, 0.022495231628417968, 0.022433792114257813, 0.022585344314575196, 0.023002111434936523, 0.022559743881225586, 0.022480863571166992, 0.022325279235839844, 0.022558719635009765, 0.022511615753173828, 0.02247270393371582, 0.02236947250366211, 0.02255135917663574, 0.022509567260742186, 0.022329696655273436, 0.0224880313873291, 0.022534847259521484, 0.022196224212646484, 0.022179840087890625, 0.0224399356842041, 0.02272051239013672, 0.022430816650390626, 0.022321279525756837, 0.022450624465942384, 0.022301023483276367, 0.022478143692016603, 0.0224385929107666, 0.02243174362182617, 0.02243174362182617, 0.022579200744628908, 0.022388256072998047, 0.022551008224487305, 0.02246784019470215, 0.022472991943359374, 0.022351551055908202, 0.022077600479125978, 0.02275596809387207, 0.022779903411865234, 0.023166015625, 0.022340543746948244, 0.022200511932373046, 0.022136640548706055, 0.022357440948486327, 0.02224799919128418, 0.022161312103271484, 0.022155359268188478, 0.023228960037231447, 0.02249772834777832, 0.02221776008605957, 0.022274208068847657, 0.022690624237060548, 0.022362112045288086, 0.02231430435180664, 0.022206207275390626, 0.02275833511352539, 0.0224682559967041, 0.0221146240234375, 0.022351743698120118, 0.02217795181274414, 0.02223209571838379, 0.022152191162109376, 0.022208480834960936, 0.022726655960083008, 0.02239481544494629, 0.022226848602294923, 0.022134944915771483, 0.022402624130249023, 0.022571456909179687, 0.022457759857177736, 0.022146656036376954, 0.0221246395111084, 0.022731327056884767, 0.022401376724243163, 0.02246985626220703, 0.02228630447387695, 0.022487871170043944, 0.02231500816345215, 0.02240246391296387, 0.022377056121826173, 0.02216534423828125, 0.022248640060424804, 0.02211859130859375, 0.021860511779785156, 0.02221939277648926, 0.022689184188842772, 0.02243235206604004, 0.022142303466796874, 0.021970720291137696, 0.022243392944335937, 0.022143808364868164, 0.02215705680847168, 0.022161376953125, 0.022280479431152345, 0.02239897537231445, 0.022326656341552734, 0.02226598358154297, 0.022306880950927734, 0.02246905517578125, 0.02248908805847168, 0.02245631980895996, 0.02255820846557617, 0.022430208206176756, 0.022372352600097657, 0.022390783309936522, 0.022220800399780274, 0.022231039047241212, 0.022228031158447265, 0.022412223815917967, 0.02248089599609375, 0.0228985595703125, 0.022280736923217772, 0.022221183776855467, 0.022495231628417968, 0.022648832321166993, 0.02262835121154785, 0.022403072357177735, 0.022374399185180666, 0.022321151733398437, 0.02251753616333008, 0.0225118408203125, 0.022304479598999023, 0.022325088500976562, 0.02233184051513672, 0.02218707275390625, 0.022277055740356447, 0.022450176239013672, 0.022352991104125978, 0.02260188865661621, 0.02215769577026367, 0.02207974433898926, 0.02229055976867676, 0.022681600570678712, 0.022431232452392577, 0.02226777648925781, 0.022497215270996095, 0.022332096099853517, 0.024602624893188478, 0.022839296340942384, 0.02269932746887207, 0.022942399978637694, 0.022514944076538087, 0.02251238441467285, 0.02243174362182617, 0.02228223991394043, 0.022262815475463868, 0.022369247436523437, 0.02216134452819824, 0.022052928924560546, 0.022062431335449217, 0.02266339111328125, 0.02261235237121582, 0.022491199493408203, 0.0221265926361084, 0.022196224212646484, 0.022079488754272462, 0.02225971221923828, 0.02238198471069336, 0.022532928466796876, 0.022451583862304687, 0.022587263107299804, 0.023884576797485353, 0.02358246421813965, 0.022570016860961915, 0.022388927459716795, 0.02206800079345703, 0.022353536605834962, 0.02200739288330078, 0.022249568939208986, 0.02269046401977539, 0.022493215560913087, 0.022248992919921873, 0.022251264572143555, 0.022806144714355468, 0.022405664443969728, 0.022208063125610352, 0.022696224212646485, 0.022165216445922852, 0.022367807388305665, 0.022266592025756836, 0.022443296432495118, 0.022090463638305663, 0.022334943771362303, 0.022641023635864257, 0.022451616287231444, 0.02231068801879883, 0.022376895904541016, 0.02237289619445801, 0.022471904754638672, 0.022299423217773437, 0.022335487365722655, 0.022421503067016603, 0.022495391845703126, 0.022363391876220703, 0.022194784164428712, 0.022278144836425783, 0.022220800399780274, 0.02221820831298828, 0.022380224227905275, 0.02215407943725586, 0.02246806335449219, 0.022080032348632813, 0.022640640258789063, 0.022515071868896484, 0.02261225509643555, 0.022210912704467775, 0.022328575134277343, 0.02219843292236328, 0.022366815567016602, 0.022392831802368163, 0.022273887634277345, 0.02215888023376465, 0.02235228729248047, 0.022188255310058594, 0.02225971221923828, 0.02200134468078613, 0.022090047836303712, 0.022314399719238282, 0.022200927734375, 0.022147071838378905, 0.022050111770629884, 0.02229097557067871, 0.022304256439208983, 0.022566688537597655, 0.02243673515319824, 0.02241472053527832, 0.022432384490966798, 0.022421375274658203, 0.02226380729675293, 0.02237225532531738, 0.022184160232543944, 0.022525056838989258, 0.02232963180541992, 0.022243040084838867, 0.0224899845123291, 0.022179840087890625, 0.02281046485900879, 0.022216512680053712, 0.02213055992126465, 0.02256697654724121, 0.022438304901123047, 0.022362112045288086, 0.022208511352539064, 0.02233263969421387, 0.022172447204589843, 0.02232048034667969, 0.02208835220336914, 0.022054399490356445, 0.022006240844726563, 0.022118431091308594, 0.02178483200073242, 0.021734495162963868, 0.022206655502319338, 0.022274431228637696, 0.02213692855834961, 0.022119775772094726, 0.02189788818359375, 0.02198080062866211, 0.022495359420776368, 0.02198758316040039, 0.022261728286743165, 0.022109472274780273, 0.02275542449951172, 0.021857152938842772, 0.02172496032714844, 0.021753408432006835, 0.022812255859375, 0.02186262321472168, 0.02197977638244629, 0.023086336135864256, 0.021996288299560546, 0.02190745544433594, 0.021820703506469728, 0.02175424003601074, 0.022784351348876953, 0.02182931137084961, 0.021760095596313478, 0.021586143493652343, 0.021700607299804688, 0.0216944637298584, 0.021934080123901366, 0.02190336036682129, 0.021998783111572266, 0.02204345512390137, 0.021993471145629884, 0.02205081558227539, 0.022046367645263673, 0.021946016311645507, 0.021914304733276366, 0.02186614418029785, 0.02183612823486328, 0.02255023956298828, 0.02195484733581543, 0.021850208282470703, 0.021946144104003907, 0.021823616027832032, 0.021792768478393554, 0.021712896347045898, 0.021914976119995117]",tokens/s,44.869216730868835,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,qwen2,Qwen/Qwen1.5-1.8B,Qwen/Qwen1.5-1.8B,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,836.452352,4675.534848,0.0,4280.287232,4115.121152,s,1,7.65303857421875,7.65303857421875,0.0,7.65303857421875,7.65303857421875,7.65303857421875,7.65303857421875,[7.65303857421875],,kWh,1.0351801950006727e-05,1.1340980008129043e-06,4.7408371259966e-06,1.622673707681623e-05,,MB,1142.31296,4981.71904,0.0,4575.985664,4408.408064,s,10,3.030411315917968,0.3030411315917969,0.002580261041039593,0.3030240173339844,0.3054149444580078,0.3060226119995117,0.30650874603271483,"[0.2980228271484375, 0.29910711669921874, 0.30268963623046874, 0.30315701293945313, 0.3052799072265625, 0.3048192138671875, 0.3028910217285156, 0.30517254638671876, 0.30264175415039063, 0.3066302795410156]",tokens/s,844.7698127818426,kWh,9.061008412627508e-06,9.98694762992155e-07,5.996208500666343e-06,1.6055911676286006e-05,tokens/kWh,15944283.025553923,MB,1163.50976,4981.71904,0.0,4575.985664,4408.410624,s,10,14.811874389648438,1.4811874389648438,0.010192541895537985,1.4821577758789062,1.491000354003906,1.492011590576172,1.4928205798339844,"[1.458283935546875, 1.4831822509765624, 1.4930228271484376, 1.48113330078125, 1.489197265625, 1.478267822265625, 1.4705389404296876, 1.490775634765625, 1.4900311279296874, 1.4774412841796876]",tokens/s,42.533441982217155,kWh,4.301731266653926e-05,4.745208429294184e-06,2.843334682073334e-05,7.619586791656678e-05,tokens/kWh,826816.4891695172,,s,630,14.80945721435548,0.02350707494342138,0.0003652562417825801,0.02352883243560791,0.023837715148925784,0.024032551956176755,0.024826040382385256,"[0.024179264068603514, 0.02332838439941406, 0.02325542449951172, 0.023571935653686524, 0.023126623153686524, 0.022810495376586915, 0.02295327949523926, 0.022874719619750978, 0.02343731117248535, 0.023152799606323243, 0.023086463928222656, 0.023060895919799804, 0.022964096069335936, 0.022962528228759764, 0.022966272354125978, 0.022945535659790038, 0.02320518493652344, 0.02302867126464844, 0.02311577606201172, 0.023164735794067384, 0.02349056053161621, 0.02337606430053711, 0.02351286315917969, 0.023074464797973634, 0.023984703063964843, 0.022965503692626954, 0.0229117431640625, 0.022939104080200196, 0.022780384063720703, 0.02317888069152832, 0.022918912887573244, 0.02284547233581543, 0.022947872161865234, 0.02283955192565918, 0.022862207412719725, 0.022763519287109374, 0.022816064834594727, 0.02278223991394043, 0.023113536834716796, 0.022927967071533203, 0.02285971260070801, 0.022884416580200195, 0.024978752136230468, 0.024314559936523438, 0.023011007308959962, 0.023246335983276366, 0.02304083251953125, 0.02294169616699219, 0.02297622489929199, 0.02327516746520996, 0.02326927947998047, 0.0232138557434082, 0.023081920623779298, 0.02295199966430664, 0.023014495849609375, 0.02323747253417969, 0.023005247116088867, 0.022987775802612305, 0.02320649528503418, 0.023129600524902344, 0.023249759674072265, 0.023013376235961915, 0.022957727432250975, 0.02403536033630371, 0.023434751510620116, 0.023245279312133788, 0.0235479679107666, 0.024457151412963868, 0.02391766357421875, 0.02341731262207031, 0.023945087432861327, 0.023502399444580078, 0.02369843292236328, 0.023676288604736327, 0.02342732810974121, 0.02323289680480957, 0.023358463287353515, 0.023931327819824218, 0.02350761604309082, 0.023482271194458008, 0.02353561592102051, 0.023469823837280274, 0.023475456237792968, 0.02337436866760254, 0.023605728149414064, 0.02333286476135254, 0.023357376098632813, 0.023224384307861327, 0.02347804832458496, 0.02313852882385254, 0.023324544906616212, 0.023433055877685547, 0.02327071952819824, 0.02337276840209961, 0.023993728637695312, 0.023318656921386717, 0.023343616485595704, 0.023050111770629884, 0.0231200008392334, 0.023093183517456053, 0.023076927185058594, 0.023128320693969726, 0.02336105537414551, 0.023158496856689453, 0.024087039947509766, 0.02369740867614746, 0.02366854476928711, 0.023543231964111327, 0.023788032531738282, 0.023597055435180665, 0.023731584548950194, 0.023657344818115236, 0.023812095642089845, 0.023607295989990236, 0.023692607879638672, 0.02377801513671875, 0.023883520126342775, 0.023617759704589843, 0.023711103439331055, 0.023794303894042967, 0.02367283248901367, 0.023450944900512697, 0.023521280288696288, 0.023560895919799804, 0.023623392105102538, 0.023568672180175783, 0.02430156707763672, 0.02373222351074219, 0.023830528259277343, 0.023736320495605468, 0.023558143615722657, 0.02396272087097168, 0.023570655822753906, 0.023548608779907228, 0.02373129653930664, 0.02364009666442871, 0.02370035171508789, 0.02362892723083496, 0.02355289649963379, 0.023558143615722657, 0.023578624725341796, 0.023613439559936524, 0.02361657524108887, 0.02374732780456543, 0.02379385566711426, 0.02372812843322754, 0.023512895584106446, 0.023521472930908203, 0.023777280807495117, 0.025438207626342774, 0.023703136444091798, 0.02349507141113281, 0.02356153678894043, 0.023567039489746092, 0.023493824005126954, 0.023429759979248045, 0.023705791473388672, 0.023538976669311523, 0.02402911949157715, 0.023810527801513673, 0.023572799682617187, 0.02347417640686035, 0.02349056053161621, 0.02355948829650879, 0.02335136032104492, 0.023652416229248047, 0.0238209285736084, 0.02361529541015625, 0.02406617546081543, 0.024272159576416017, 0.024326879501342772, 0.023631872177124022, 0.023412704467773438, 0.023238687515258788, 0.023769088745117187, 0.023404544830322265, 0.02347007942199707, 0.02389401626586914, 0.023455743789672853, 0.023642112731933593, 0.02343731117248535, 0.023690591812133788, 0.02358937644958496, 0.023570816040039064, 0.023779104232788086, 0.02357596778869629, 0.024078943252563476, 0.023686336517333983, 0.023548736572265624, 0.024302879333496095, 0.0236430721282959, 0.023540767669677734, 0.023364416122436525, 0.023197696685791015, 0.023150047302246093, 0.02304368019104004, 0.023753664016723634, 0.023439359664916993, 0.02346931266784668, 0.023785280227661132, 0.02355295944213867, 0.023442880630493164, 0.023685440063476563, 0.024180992126464844, 0.02349875259399414, 0.02360425567626953, 0.023598047256469728, 0.023395999908447266, 0.02345404815673828, 0.023414783477783203, 0.023502975463867187, 0.023393728256225585, 0.02328767967224121, 0.02324127960205078, 0.02355606460571289, 0.023672447204589844, 0.023694847106933595, 0.023565216064453123, 0.023793664932250977, 0.02368694305419922, 0.023747840881347657, 0.02361356735229492, 0.023458656311035157, 0.023431167602539063, 0.023473567962646484, 0.02335804748535156, 0.023377311706542968, 0.023306848526000977, 0.023453567504882814, 0.023160959243774416, 0.023787071228027343, 0.023583168029785155, 0.023513088226318358, 0.023525375366210938, 0.023814079284667968, 0.02359225654602051, 0.023369951248168944, 0.023286304473876952, 0.023134208679199218, 0.02326323127746582, 0.023321983337402343, 0.023325311660766603, 0.02327347183227539, 0.02329395294189453, 0.023178752899169923, 0.02317568016052246, 0.023928831100463867, 0.02393907165527344, 0.023721055984497072, 0.023638015747070314, 0.02356425666809082, 0.023342016220092774, 0.025831424713134765, 0.023721887588500978, 0.0236046085357666, 0.023611839294433595, 0.023316768646240233, 0.023382015228271484, 0.023488512039184572, 0.02386124801635742, 0.023400447845458985, 0.023513088226318358, 0.023562175750732422, 0.02355558395385742, 0.023674720764160155, 0.023710399627685546, 0.02361142349243164, 0.023628992080688478, 0.023724863052368164, 0.023851007461547852, 0.02351513671875, 0.023566335678100587, 0.023569728851318358, 0.023458175659179688, 0.023562496185302734, 0.02359916877746582, 0.023357440948486328, 0.023615039825439454, 0.02359856033325195, 0.023700447082519532, 0.023582719802856447, 0.02348646354675293, 0.02349260711669922, 0.023721120834350587, 0.023495616912841796, 0.02343107223510742, 0.023459840774536132, 0.023379968643188476, 0.02353936004638672, 0.023435615539550782, 0.023619583129882812, 0.023319679260253905, 0.023900159835815428, 0.02398044776916504, 0.023576320648193358, 0.02442313575744629, 0.02351628875732422, 0.023556991577148436, 0.023638015747070314, 0.023642112731933593, 0.02405116844177246, 0.023553823471069334, 0.02335001564025879, 0.023465120315551757, 0.023391103744506835, 0.02346544075012207, 0.023545503616333008, 0.023515775680541993, 0.023990495681762695, 0.02370150375366211, 0.02369254493713379, 0.023599872589111327, 0.023695232391357422, 0.02352681541442871, 0.023613216400146485, 0.02434454345703125, 0.023666336059570314, 0.023396703720092775, 0.023341056823730468, 0.023412736892700195, 0.023330816268920897, 0.023379968643188476, 0.02332467269897461, 0.023430624008178712, 0.023685663223266602, 0.023452991485595702, 0.023552032470703126, 0.02374518394470215, 0.023538848876953126, 0.023513023376464843, 0.023753440856933594, 0.023427263259887695, 0.023392255783081056, 0.023533344268798828, 0.02328335952758789, 0.02318547248840332, 0.023097856521606445, 0.023435327529907228, 0.02374239921569824, 0.023386112213134767, 0.02349260711669922, 0.02345952033996582, 0.023737855911254883, 0.023669567108154297, 0.023530879974365235, 0.02348451232910156, 0.023444000244140624, 0.02351923179626465, 0.02361100769042969, 0.023677312850952148, 0.023586816787719726, 0.023395519256591796, 0.023919424057006835, 0.023500320434570312, 0.023755231857299806, 0.023317792892456054, 0.02369340705871582, 0.025008031845092774, 0.023364320755004882, 0.023582719802856447, 0.02351103973388672, 0.023239967346191406, 0.023140703201293945, 0.023365535736083985, 0.024134111404418946, 0.023357440948486328, 0.023257087707519532, 0.023230464935302734, 0.023150592803955077, 0.023173120498657225, 0.023085216522216796, 0.023066463470458983, 0.022986751556396484, 0.02304204750061035, 0.023078399658203123, 0.02300979232788086, 0.02308095932006836, 0.023035903930664063, 0.024086143493652342, 0.02361587142944336, 0.02334867286682129, 0.023151168823242186, 0.02297225570678711, 0.022929567337036133, 0.022986560821533202, 0.02316716766357422, 0.023166976928710937, 0.022994047164916993, 0.022838144302368163, 0.023007232666015624, 0.023027711868286133, 0.02325299263000488, 0.024681631088256835, 0.022922079086303712, 0.02310553550720215, 0.02289651107788086, 0.02310544013977051, 0.02300499153137207, 0.02368889617919922, 0.023000799179077148, 0.02292799949645996, 0.02284172821044922, 0.022769472122192384, 0.022839679718017578, 0.02281667137145996, 0.02285763168334961, 0.02290483283996582, 0.02292857551574707, 0.02302239990234375, 0.02296118354797363, 0.022874143600463866, 0.022854591369628908, 0.022844831466674806, 0.02282966423034668, 0.022916479110717772, 0.022880064010620118, 0.02298476791381836, 0.023325439453125, 0.023514528274536133, 0.023757024765014647, 0.02361382484436035, 0.02384022331237793, 0.023582752227783204, 0.023685632705688478, 0.023541759490966797, 0.02352742385864258, 0.023564287185668945, 0.025159616470336914, 0.024561376571655275, 0.02389027214050293, 0.02356617546081543, 0.02382035255432129, 0.02366454315185547, 0.023980224609375, 0.0235284481048584, 0.02371686363220215, 0.023527584075927734, 0.023736127853393553, 0.02367695999145508, 0.023696895599365234, 0.023783071517944336, 0.024363231658935548, 0.023821823120117186, 0.02373023986816406, 0.023821023941040038, 0.02371753692626953, 0.02376406478881836, 0.023622207641601563, 0.02389139175415039, 0.023638111114501953, 0.023595903396606447, 0.02348646354675293, 0.023637088775634765, 0.023548416137695313, 0.023691680908203124, 0.02383635139465332, 0.023651775360107423, 0.02378432083129883, 0.023602848052978517, 0.023853471755981445, 0.02371945571899414, 0.02358844757080078, 0.023946048736572266, 0.023836288452148437, 0.02375433540344238, 0.02369820785522461, 0.02404159927368164, 0.023909631729125976, 0.023734912872314454, 0.023791616439819335, 0.02362739181518555, 0.023517568588256835, 0.023578624725341796, 0.023517183303833008, 0.023740415573120118, 0.023608608245849608, 0.02367148780822754, 0.023701536178588868, 0.023754751205444336, 0.023582719802856447, 0.02360099220275879, 0.023549823760986327, 0.023707935333251953, 0.023563840866088866, 0.02372243118286133, 0.023609344482421874, 0.02352921676635742, 0.023581247329711914, 0.023511903762817383, 0.023356256484985353, 0.023278656005859374, 0.02328428840637207, 0.0238022403717041, 0.02360438346862793, 0.02350166320800781, 0.023476224899291992, 0.023459840774536132, 0.023385087966918947, 0.023770111083984375, 0.02351420783996582, 0.023833471298217772, 0.023539743423461913, 0.023586240768432618, 0.023394271850585936, 0.02433932876586914, 0.02374358367919922, 0.02349910354614258, 0.023532096862792968, 0.023459840774536132, 0.02337295913696289, 0.023837535858154298, 0.02351513671875, 0.023503936767578126, 0.023350208282470704, 0.023502559661865235, 0.024280416488647462, 0.024840192794799806, 0.024897823333740233, 0.023560863494873047, 0.02346700859069824, 0.023286367416381838, 0.023815967559814452, 0.023839328765869142, 0.023596832275390625, 0.02350105667114258, 0.023760255813598634, 0.02357062339782715, 0.023536064147949218, 0.023443456649780273, 0.023582719802856447, 0.023640064239501952, 0.023649503707885742, 0.02366761589050293, 0.02366041564941406, 0.02368297576904297, 0.023451648712158202, 0.023681119918823244, 0.024008319854736327, 0.02358310317993164, 0.023379648208618164, 0.023437631607055663, 0.023465375900268554, 0.023538272857666017, 0.023570432662963867, 0.023584800720214842, 0.02341036796569824, 0.023509151458740236, 0.02374790382385254, 0.023690048217773436, 0.02360051155090332, 0.0235784969329834, 0.02354979133605957, 0.023499616622924803, 0.023737951278686522, 0.023480800628662108, 0.02360121536254883, 0.02350796890258789, 0.02357734489440918, 0.023474367141723632, 0.023621631622314454, 0.023601152420043944, 0.023551616668701172, 0.02374835205078125, 0.023620223999023436, 0.02377654457092285, 0.023673599243164062, 0.023571807861328124, 0.0242587833404541, 0.02387353515625, 0.02352742385864258, 0.02353984069824219, 0.02375433540344238, 0.02374684715270996, 0.02353971290588379, 0.023556095123291015, 0.023582176208496095, 0.023566047668457032, 0.02340108871459961, 0.02341231918334961, 0.02359328079223633, 0.023549280166625976, 0.023749696731567384, 0.02352921676635742, 0.023634048461914064, 0.023614463806152345, 0.023577600479125976, 0.023496511459350587, 0.023660127639770507, 0.023634271621704103, 0.023431615829467775, 0.023521087646484376, 0.02326937675476074, 0.023581823348999022, 0.02360204887390137, 0.023846912384033202, 0.023967744827270508, 0.023348608016967774, 0.02336319923400879, 0.023318656921386717, 0.022993183135986327, 0.022948448181152343, 0.023076864242553712, 0.023250944137573244, 0.023510208129882814, 0.023992671966552734, 0.023560672760009765, 0.023645503997802735, 0.023485088348388673, 0.023425056457519532, 0.02351820755004883, 0.02350979232788086, 0.02392460823059082, 0.024791391372680664, 0.02327756881713867, 0.02331222343444824, 0.023272991180419922, 0.023020160675048827, 0.022982656478881838, 0.022880159378051757, 0.022990943908691407, 0.022978143692016603, 0.023241119384765627, 0.02336128044128418, 0.023355648040771483, 0.023216127395629883, 0.023003135681152344, 0.022994623184204102, 0.022875808715820314, 0.02285430335998535, 0.022939647674560547]",tokens/s,42.54038422078784,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-70m,EleutherAI/pythia-70m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,814.370816,569.311232,0.0,174.063616,172.57984,s,1,7.39778955078125,7.39778955078125,0.0,7.39778955078125,7.39778955078125,7.39778955078125,7.39778955078125,[7.39778955078125],,kWh,4.7222717499986785e-06,5.062745464826148e-07,2.0172238360122163e-06,7.245770132493509e-06,,MB,1102.393344,640.6144,0.0,234.881024,215.589888,s,28,0.2848372488021851,0.010172758885792324,0.00013117854918736572,0.010129647731781006,0.010273417854309084,0.010319713973999023,0.010636543121337891,"[0.010750176429748536, 0.010159199714660644, 0.010094880104064941, 0.010211104393005372, 0.010301888465881348, 0.010120927810668945, 0.010138175964355468, 0.010130271911621094, 0.010129023551940917, 0.010106335639953614, 0.010083200454711914, 0.010213151931762695, 0.01010598373413086, 0.010100319862365722, 0.010065631866455078, 0.010045984268188476, 0.01007094383239746, 0.010248096466064453, 0.010171072006225586, 0.010164031982421875, 0.010261216163635255, 0.010215295791625976, 0.010104384422302246, 0.010108320236206055, 0.010329312324523925, 0.010110719680786133, 0.010171232223510742, 0.010126367568969726]",tokens/s,25165.247979831674,kWh,3.3376456913332124e-07,3.6808116085848184e-08,2.205903352022912e-07,5.911630204214606e-07,tokens/kWh,433044678.29785556,MB,1124.192256,644.808704,0.0,239.075328,215.592448,s,28,9.718639923095703,0.3470942829677037,0.026224458851421606,0.3441082611083984,0.34980752868652343,0.35327261810302735,0.44566918975830083,"[0.35386810302734373, 0.4796230163574219, 0.34065325927734375, 0.3382584533691406, 0.34457293701171876, 0.3441163024902344, 0.34413015747070314, 0.34559478759765627, 0.340366455078125, 0.3450955505371094, 0.34164190673828126, 0.3381908264160156, 0.3309373474121094, 0.3296451416015625, 0.32842724609375, 0.33047039794921873, 0.346295166015625, 0.3521667175292969, 0.34715267944335937, 0.3487964477539062, 0.3454385986328125, 0.3441002197265625, 0.3431165466308594, 0.3409849548339844, 0.3409042663574219, 0.34522457885742186, 0.3434397583007813, 0.3454281005859375]",tokens/s,181.50687894177156,kWh,9.682858715171649e-06,1.0678501069536257e-06,4.092357559597695e-06,1.4843066381722967e-05,tokens/kWh,4244405.999394785,,s,1764,9.704967070102688,0.005501682012529869,0.0031853742987004437,0.005389279842376709,0.0056641695499420166,0.005749015855789184,0.00590723310947418,"[0.005378176212310791, 0.005633984088897705, 0.0066518402099609375, 0.0055623679161071774, 0.005505152225494385, 0.005490752220153808, 0.0054225921630859375, 0.005332320213317871, 0.00535427188873291, 0.005353663921356201, 0.005321023941040039, 0.005414271831512451, 0.005429056167602539, 0.0056276159286499025, 0.005731103897094726, 0.005851136207580566, 0.005887936115264893, 0.005803359985351563, 0.00578223991394043, 0.005713823795318603, 0.005698751926422119, 0.0057448320388793946, 0.005591968059539795, 0.006127327919006348, 0.007182432174682617, 0.007171648025512695, 0.007236159801483154, 0.006718624114990235, 0.005348320007324219, 0.0054720001220703125, 0.005443456172943115, 0.005308703899383545, 0.0053290238380432125, 0.005260447978973389, 0.005302879810333252, 0.00533296012878418, 0.005325024127960205, 0.005318496227264404, 0.005530687808990478, 0.005355519771575928, 0.005427328109741211, 0.005481215953826904, 0.005541855812072754, 0.0054354238510131835, 0.0054534077644348145, 0.005450143814086914, 0.005568511962890625, 0.0054570879936218265, 0.0053821120262146, 0.0054026880264282226, 0.005423903942108154, 0.005339136123657226, 0.0054778561592102055, 0.005446176052093506, 0.005436768054962158, 0.005599135875701904, 0.005528319835662842, 0.005575967788696289, 0.0053277120590209965, 0.005435135841369629, 0.005412992000579834, 0.005360799789428711, 0.005388224124908447, 0.00535811185836792, 0.00546230411529541, 0.005336927890777588, 0.005271071910858154, 0.005275712013244629, 0.005300543785095215, 0.005441535949707031, 0.005554080009460449, 0.0056622719764709475, 0.005759039878845215, 0.005617152214050293, 0.005502175807952881, 0.0057853121757507325, 0.0055066561698913575, 0.13898793029785156, 0.005687424182891846, 0.005420032024383545, 0.0054050559997558595, 0.005675519943237305, 0.005770559787750244, 0.0055764479637145994, 0.005662784099578857, 0.00575167989730835, 0.0056128320693969724, 0.005519999980926514, 0.005432767868041992, 0.005298175811767578, 0.005251743793487549, 0.005273791790008545, 0.005321824073791504, 0.005331679821014405, 0.005369855880737305, 0.005406720161437988, 0.005362688064575195, 0.005276735782623291, 0.005328703880310059, 0.005359744071960449, 0.005459775924682617, 0.005511360168457031, 0.005332863807678222, 0.005263423919677735, 0.005430560111999511, 0.0052623038291931155, 0.005242688179016113, 0.0053309440612792965, 0.005343232154846191, 0.005413055896759033, 0.005608575820922852, 0.005612095832824707, 0.005635551929473877, 0.0054811201095581056, 0.0055623679161071774, 0.005621024131774902, 0.005639039993286133, 0.005795072078704834, 0.005729119777679443, 0.005610527992248535, 0.005790624141693115, 0.005574463844299316, 0.005572832107543945, 0.005576191902160644, 0.005474336147308349, 0.00535920000076294, 0.005068895816802979, 0.005327104091644287, 0.005312128067016602, 0.005338848114013672, 0.005344064235687256, 0.005338624000549317, 0.005392288208007813, 0.005374239921569824, 0.005339263916015625, 0.005425087928771973, 0.005422336101531982, 0.005490975856781006, 0.005361567974090576, 0.005292543888092041, 0.005378208160400391, 0.005658207893371582, 0.005756735801696777, 0.005593855857849121, 0.005472095966339111, 0.00532480001449585, 0.0053433279991149905, 0.005289408206939697, 0.0054074878692626956, 0.005340896129608154, 0.005329055786132813, 0.005309855937957764, 0.005351071834564209, 0.005429183959960938, 0.005285920143127442, 0.005305151939392089, 0.005309919834136963, 0.005290527820587158, 0.005320576190948487, 0.005277535915374756, 0.005269792079925537, 0.0053920321464538574, 0.005368159770965576, 0.0054148478507995606, 0.005303423881530761, 0.0052973442077636716, 0.005244671821594238, 0.005277696132659912, 0.005253344058990478, 0.005240608215332032, 0.005270592212677002, 0.0052952318191528324, 0.005242688179016113, 0.005309887886047363, 0.0052271361351013184, 0.005231935977935791, 0.005292672157287598, 0.005343232154846191, 0.005462016105651855, 0.005594336032867432, 0.005765791893005371, 0.005807360172271728, 0.005733248233795166, 0.005727776050567627, 0.005752480030059815, 0.005575104236602783, 0.005610911846160888, 0.005672959804534912, 0.005542367935180664, 0.00521830415725708, 0.005402624130249023, 0.005409088134765625, 0.005375679969787598, 0.005371903896331787, 0.005281792163848877, 0.005269152164459228, 0.005302624225616455, 0.005264927864074707, 0.005231167793273926, 0.005488160133361816, 0.005222784042358398, 0.0053383359909057616, 0.005546207904815674, 0.005372032165527344, 0.005406400203704834, 0.005461984157562256, 0.005421855926513672, 0.005380064010620117, 0.0053080000877380375, 0.005230944156646728, 0.00530953598022461, 0.005556896209716797, 0.005435743808746338, 0.005816383838653565, 0.005511104106903076, 0.005392608165740967, 0.005389791965484619, 0.005269408226013184, 0.005357984066009521, 0.005326144218444824, 0.005290688037872315, 0.005296351909637451, 0.005381919860839844, 0.005318655967712403, 0.005351424217224121, 0.0055519680976867675, 0.00541212797164917, 0.005440095901489258, 0.0053619518280029295, 0.00532480001449585, 0.0053309440612792965, 0.005302271842956543, 0.005310463905334473, 0.005289472103118896, 0.005282303810119629, 0.005263199806213379, 0.005265567779541016, 0.005281599998474121, 0.005303616046905517, 0.005230815887451172, 0.0052681279182434084, 0.005255392074584961, 0.005246751785278321, 0.005390336036682129, 0.005337088108062744, 0.005269408226013184, 0.005267744064331055, 0.005246784210205078, 0.0052449598312377926, 0.005396480083465576, 0.005713888168334961, 0.005781504154205322, 0.005538527965545654, 0.005690752029418945, 0.005656288146972656, 0.005675360202789307, 0.005472832202911377, 0.005832704067230224, 0.0057588801383972165, 0.005496479988098144, 0.006107200145721436, 0.006009215831756592, 0.005462016105651855, 0.005294079780578613, 0.005320703983306885, 0.005304671764373779, 0.005467552185058594, 0.00540496015548706, 0.005365952014923096, 0.005340544223785401, 0.005360032081604004, 0.005434847831726074, 0.00538643217086792, 0.0054848318099975585, 0.005491903781890869, 0.0054908480644226074, 0.005515232086181641, 0.005409471988677979, 0.005442624092102051, 0.005352223873138428, 0.0053086400032043455, 0.005387584209442139, 0.005307040214538574, 0.0054570560455322265, 0.005337344169616699, 0.005367584228515625, 0.005270080089569092, 0.005333216190338135, 0.005369855880737305, 0.005388351917266845, 0.00551091194152832, 0.005633503913879395, 0.005518112182617187, 0.005492735862731934, 0.0053678078651428224, 0.005439616203308105, 0.005547904014587403, 0.005434912204742431, 0.005564896106719971, 0.005505023956298828, 0.005347583770751953, 0.005451519966125488, 0.005514463901519775, 0.005446303844451905, 0.005367936134338379, 0.005392608165740967, 0.005594912052154541, 0.00555344009399414, 0.005490784168243408, 0.0053909759521484375, 0.005305791854858398, 0.005332896232604981, 0.005329567909240723, 0.005256703853607178, 0.00524124813079834, 0.005526368141174316, 0.005921951770782471, 0.005810688018798828, 0.005886112213134766, 0.005806975841522217, 0.005666048049926758, 0.005688159942626953, 0.005590911865234375, 0.005550687789916992, 0.005392799854278565, 0.005404672145843506, 0.005369855880737305, 0.005322751998901368, 0.005306111812591553, 0.005367712020874023, 0.005265888214111328, 0.005279327869415283, 0.005340767860412598, 0.005349599838256836, 0.005403103828430175, 0.005331264019012451, 0.005340864181518554, 0.0053944320678710935, 0.005309887886047363, 0.0055277438163757325, 0.005498784065246582, 0.005364192008972168, 0.005250688076019287, 0.005273536205291748, 0.005276095867156982, 0.005295328140258789, 0.005274400234222412, 0.005273215770721436, 0.005263199806213379, 0.005396671772003174, 0.005368159770965576, 0.005261312007904053, 0.005273600101470947, 0.005285920143127442, 0.005311872005462646, 0.005298783779144287, 0.005586944103240967, 0.005760704040527344, 0.005683008193969726, 0.005568416118621826, 0.005773920059204102, 0.005560319900512695, 0.005451488018035889, 0.005357312202453613, 0.005304863929748535, 0.005320703983306885, 0.005483615875244141, 0.005403264045715332, 0.005568672180175782, 0.005611936092376709, 0.005553855895996094, 0.005567840099334717, 0.005594079971313477, 0.005526591777801513, 0.005443327903747559, 0.005384640216827392, 0.005511199951171875, 0.0055016641616821285, 0.005202239990234375, 0.005316160202026367, 0.005249824047088623, 0.005286911964416504, 0.005236671924591064, 0.005290719985961914, 0.0055268478393554685, 0.0057413439750671385, 0.005802976131439209, 0.005730624198913574, 0.005795519828796386, 0.005804512023925781, 0.005767648220062256, 0.005739776134490967, 0.005603295803070068, 0.005574624061584473, 0.0055642881393432615, 0.005499423980712891, 0.005550496101379395, 0.005480447769165039, 0.005539423942565918, 0.005405407905578613, 0.00534281587600708, 0.00537011194229126, 0.005443424224853516, 0.005476319789886474, 0.005423136234283447, 0.005400000095367431, 0.005296703815460205, 0.005311520099639893, 0.005340288162231446, 0.00534665584564209, 0.005382656097412109, 0.00536678409576416, 0.005385216236114502, 0.00548796796798706, 0.0054913277626037595, 0.005451807975769043, 0.005383264064788818, 0.005282688140869141, 0.005387872219085693, 0.005474751949310303, 0.005594399929046631, 0.00563705587387085, 0.005541855812072754, 0.0055001602172851565, 0.00539244794845581, 0.0053396477699279785, 0.005317728042602539, 0.005352352142333984, 0.005346496105194092, 0.005307199954986572, 0.005366943836212158, 0.005296512126922608, 0.005286399841308594, 0.005304287910461425, 0.005299583911895752, 0.005614463806152344, 0.005578656196594239, 0.005472383975982666, 0.0054633598327636715, 0.0054150080680847165, 0.005630496025085449, 0.005165440082550049, 0.005356575965881348, 0.00526639986038208, 0.0052527360916137696, 0.005282112121582031, 0.005341119766235351, 0.00539247989654541, 0.005685535907745362, 0.005811903953552246, 0.005803264141082763, 0.005734367847442627, 0.00583897590637207, 0.005826816082000732, 0.005702047824859619, 0.005752831935882568, 0.00571289587020874, 0.005614143848419189, 0.0055812478065490724, 0.005599232196807862, 0.005539455890655518, 0.005427584171295166, 0.00545798397064209, 0.005330880165100097, 0.005279232025146485, 0.005310976028442383, 0.005449567794799805, 0.0053517122268676755, 0.005365344047546386, 0.005306848049163818, 0.0053812160491943355, 0.005521471977233887, 0.0054217281341552735, 0.005291840076446533, 0.005421120166778565, 0.0054148159027099605, 0.005351647853851318, 0.005601344108581543, 0.005294015884399414, 0.005290272235870361, 0.005371615886688232, 0.005323935985565185, 0.005325407981872559, 0.005435200214385987, 0.005605823993682861, 0.005917920112609863, 0.005901088237762451, 0.0056761598587036135, 0.005596223831176758, 0.005486400127410889, 0.005455872058868408, 0.005433343887329102, 0.005492032051086426, 0.0055569281578063965, 0.005410816192626953, 0.005443776130676269, 0.005422912120819092, 0.005347583770751953, 0.005426943778991699, 0.005484543800354004, 0.00540499210357666, 0.005387968063354493, 0.005355519771575928, 0.0053350400924682614, 0.005181151866912842, 0.005290463924407959, 0.005240447998046875, 0.005209216117858887, 0.005266496181488037, 0.005335008144378662, 0.005355423927307129, 0.005326720237731933, 0.005258016109466553, 0.005236832141876221, 0.005240928173065185, 0.005281792163848877, 0.005209856033325196, 0.005236800193786621, 0.005252895832061767, 0.005208479881286621, 0.005242400169372559, 0.005273824214935303, 0.005395872116088867, 0.005614431858062744, 0.005890304088592529, 0.00583244800567627, 0.005791679859161377, 0.005686528205871582, 0.005611936092376709, 0.005642623901367188, 0.005592639923095703, 0.005513696193695068, 0.005351776123046875, 0.005400224208831787, 0.005304448127746582, 0.005351295948028564, 0.0052899842262268066, 0.005302527904510498, 0.005271615982055664, 0.005275167942047119, 0.005331103801727295, 0.005273600101470947, 0.005269504070281982, 0.005316800117492676, 0.005283648014068603, 0.005376319885253906, 0.0055047359466552735, 0.005543776035308838, 0.005496640205383301, 0.0054513921737670895, 0.005371808052062988, 0.005398719787597656, 0.005612480163574219, 0.005496479988098144, 0.005425151824951172, 0.005402624130249023, 0.005320703983306885, 0.005591040134429932, 0.005627711772918701, 0.0056274237632751465, 0.005378047943115235, 0.005319327831268311, 0.005281824111938477, 0.005272543907165527, 0.005268479824066162, 0.00543068790435791, 0.005451712131500244, 0.004999551773071289, 0.005257376194000244, 0.005280992031097412, 0.005440288066864014, 0.005332992076873779, 0.005283840179443359, 0.005238143920898437, 0.005415584087371826, 0.005429215908050537, 0.005433343887329102, 0.005259263992309571, 0.005292031764984131, 0.005340896129608154, 0.005253344058990478, 0.005330687999725342, 0.0053656001091003415, 0.005489120006561279, 0.005781311988830567, 0.0057079682350158695, 0.005606688022613525, 0.005571296215057373, 0.00565228796005249, 0.005763264179229737, 0.005723199844360352, 0.005710783958435058, 0.005598400115966797, 0.005547071933746338, 0.0056911039352416995, 0.005503007888793945, 0.005470208168029785, 0.005885216236114502, 0.005559008121490478, 0.00556166410446167, 0.005391039848327637, 0.005361663818359375, 0.0053309440612792965, 0.0052856321334838864, 0.005337344169616699, 0.005285855770111084, 0.005253151893615722, 0.005318655967712403, 0.005468255996704101, 0.005403679847717285, 0.005370304107666015, 0.00535920000076294, 0.0055409598350524905, 0.005597951889038086, 0.005573631763458252, 0.005423359870910645, 0.005391935825347901, 0.0053396477699279785, 0.0057560958862304685, 0.005466239929199219, 0.005472640037536621, 0.0055808000564575196, 0.005496096134185791, 0.005374112129211426, 0.005650752067565918, 0.006821248054504394, 0.005309247970581054, 0.0053023362159729005, 0.005296127796173096, 0.0053043198585510255, 0.005077375888824463, 0.005404319763183594, 0.005408768177032471, 0.005445600032806397, 0.005482880115509034, 0.005502655982971192, 0.005460256099700927, 0.00537824010848999, 0.0054898238182067875, 0.005333695888519287, 0.005311967849731446, 0.0054132800102233885, 0.005439743995666504, 0.005568416118621826, 0.005824480056762695, 0.005603456020355225, 0.005533408164978027, 0.005464352130889892, 0.005431392192840576, 0.005539391994476318, 0.005552351951599121, 0.00555622386932373, 0.0055582718849182125, 0.005433343887329102, 0.005575776100158691, 0.005616543769836425, 0.005609439849853516, 0.005599071979522705, 0.005853343963623047, 0.0055636482238769535, 0.0054477438926696774, 0.005331295967102051, 0.005398655891418457, 0.005279551982879638, 0.005256703853607178, 0.005305344104766845, 0.005315936088562011, 0.005304831981658936, 0.0053348479270935055, 0.005341087818145752, 0.00529856014251709, 0.005310463905334473, 0.005315616130828858, 0.005297376155853272, 0.005270400047302246, 0.005494944095611572, 0.0052453441619873045, 0.005327167987823486, 0.005253215789794922, 0.005355423927307129, 0.005453375816345215, 0.005355775833129883, 0.0052978239059448245, 0.0054198079109191895, 0.005316351890563965, 0.005314559936523438, 0.005344607830047607, 0.005275775909423828, 0.005343776226043701, 0.0054332160949707034, 0.005309855937957764, 0.005280543804168701, 0.005466047763824463, 0.00512937593460083, 0.005375616073608399, 0.005290976047515869, 0.005322080135345459, 0.005329567909240723, 0.005303775787353516, 0.005351359844207764, 0.005382688045501709, 0.005617248058319092, 0.005663424015045166, 0.005533184051513672, 0.0055418238639831545, 0.005617472171783447, 0.0055559039115905765, 0.005392352104187012, 0.005286303997039795, 0.005228576183319092, 0.00521670389175415, 0.005298431873321533, 0.0052139520645141605, 0.005230591773986816, 0.0052139520645141605, 0.005252863883972168, 0.005229023933410645, 0.005230751991271973, 0.005202112197875977, 0.005278687953948974, 0.005236544132232666, 0.005208992004394532, 0.005277696132659912, 0.005279263973236084, 0.005273888111114502, 0.0052778878211975095, 0.005283840179443359, 0.005268479824066162, 0.005398943901062012, 0.005375999927520752, 0.005277760028839111, 0.005212704181671143, 0.005308127880096435, 0.0052206401824951175, 0.005232480049133301, 0.005238944053649902, 0.005228544235229492, 0.0052408638000488285, 0.005246943950653076, 0.005304224014282226, 0.005230368137359619, 0.005599552154541015, 0.005486591815948487, 0.005773503780364991, 0.005395648002624512, 0.005311103820800781, 0.00522649621963501, 0.005406720161437988, 0.007766016006469726, 0.0053096961975097655, 0.005245728015899658, 0.00527180814743042, 0.005295839786529541, 0.005220384120941162, 0.005299903869628906, 0.005226655960083008, 0.004976640224456787, 0.0051970877647399905, 0.0052128958702087405, 0.005285888195037842, 0.005249023914337158, 0.005248000144958496, 0.0052715520858764645, 0.005233312129974365, 0.005207647800445556, 0.005250879764556885, 0.005237696170806885, 0.005224448204040527, 0.005181439876556396, 0.005240543842315674, 0.005212448120117188, 0.005224063873291016, 0.005310783863067627, 0.005218368053436279, 0.005232287883758545, 0.005248447895050049, 0.005247903823852539, 0.005236832141876221, 0.005330848217010498, 0.005312704086303711, 0.005246784210205078, 0.005274655818939209, 0.005253536224365234, 0.0052147841453552245, 0.005240384101867676, 0.00526310396194458, 0.005227200031280518, 0.005252255916595459, 0.005317599773406983, 0.005262400150299072, 0.005246880054473877, 0.005263936042785644, 0.005259488105773926, 0.005234816074371338, 0.005251071929931641, 0.005224480152130127, 0.0052326078414916995, 0.005250432014465332, 0.0052397122383117676, 0.005256703853607178, 0.0052144317626953126, 0.005251071929931641, 0.005197824001312256, 0.005221824169158936, 0.005327263832092285, 0.005251232147216797, 0.005228544235229492, 0.005254752159118652, 0.005242623805999756, 0.0052202558517456055, 0.005233151912689209, 0.0053127679824829105, 0.005238783836364746, 0.0053916797637939455, 0.005241536140441895, 0.005244607925415039, 0.005259583950042725, 0.0052796158790588375, 0.00527510404586792, 0.005088511943817139, 0.00527350378036499, 0.005271999835968017, 0.005260896205902099, 0.005222400188446045, 0.005268159866333008, 0.00525324821472168, 0.005214528083801269, 0.005248703956604004, 0.005226784229278565, 0.005215968132019043, 0.005230080127716064, 0.005251584053039551, 0.005238783836364746, 0.005220352172851562, 0.0052899842262268066, 0.005228384017944336, 0.005189792156219482, 0.005332992076873779, 0.005212160110473632, 0.005212160110473632, 0.00520911979675293, 0.005231584072113037, 0.005212160110473632, 0.005197824001312256, 0.005222400188446045, 0.005191967964172363, 0.005207776069641113, 0.005218495845794677, 0.005210080146789551, 0.005203999996185303, 0.0052260799407958984, 0.005193471908569336, 0.005199391841888428, 0.005278816223144531, 0.005220191955566406, 0.005212287902832031, 0.005203904151916504, 0.0053268160820007324, 0.0052013759613037105, 0.005208384037017822, 0.005271743774414062, 0.005228064060211182, 0.005199520111083984, 0.005235680103302002, 0.005223840236663818, 0.005200191974639893, 0.005218431949615478, 0.005210112094879151, 0.005195648193359375, 0.005214335918426513, 0.0052302079200744625, 0.005251455783843994, 0.005222623825073242, 0.0052631359100341795, 0.005242879867553711, 0.005185823917388916, 0.005249760150909424, 0.005186560153961181, 0.005183487892150879, 0.005264992237091064, 0.005220767974853516, 0.005183072090148926, 0.004925631999969483, 0.005192895889282226, 0.005238751888275147, 0.00518828821182251, 0.005187744140625, 0.0051866240501403805, 0.00522054386138916, 0.005200640201568603, 0.0052193598747253414, 0.005250016212463379, 0.0051948800086975095, 0.005206912040710449, 0.005230591773986816, 0.005180704116821289, 0.0051799359321594235, 0.005159327983856201, 0.00520579195022583, 0.005179391860961914, 0.005162720203399658, 0.005210400104522705, 0.005210112094879151, 0.005208000183105469, 0.005234752178192139, 0.005232480049133301, 0.005197984218597412, 0.005183680057525635, 0.005275455951690674, 0.005227871894836426, 0.005252863883972168, 0.005263743877410888, 0.005194399833679199, 0.005214079856872559, 0.0052408318519592285, 0.005236415863037109, 0.005179711818695068, 0.0052204480171203615, 0.005197343826293946, 0.0051849279403686525, 0.005196288108825684, 0.00520854377746582, 0.005205152034759522, 0.00517795181274414, 0.005218560218811035, 0.005205215930938721, 0.005191584110260009, 0.005239456176757812, 0.005232863903045655, 0.005223680019378662, 0.005216383934020996, 0.005259903907775879, 0.005177152156829834, 0.005197728157043457, 0.005248640060424805, 0.0052269759178161625, 0.005204031944274903, 0.0051979517936706545, 0.0051996479034423825, 0.0051833920478820805, 0.005202239990234375, 0.005214208126068115, 0.005197824001312256, 0.005217599868774414, 0.005253119945526123, 0.005162752151489257, 0.0052247681617736816, 0.005201695919036865, 0.005257472038269043, 0.0052427840232849125, 0.005235968112945556, 0.005248127937316894, 0.005237823963165284, 0.00520249605178833, 0.005218624114990234, 0.005228544235229492, 0.005193535804748535, 0.005195871829986572, 0.005207839965820312, 0.005232704162597656, 0.005203904151916504, 0.0052573118209838865, 0.005221536159515381, 0.0052269439697265625, 0.005212224006652832, 0.0052962880134582516, 0.005265471935272217, 0.0052432317733764645, 0.0052854719161987305, 0.005242208003997803, 0.0052473278045654295, 0.0052555837631225585, 0.005256383895874023, 0.005216447830200195, 0.0052271361351013184, 0.005259168148040771, 0.00520966386795044, 0.005237343788146972, 0.0052501440048217775, 0.005225279808044434, 0.005222432136535645, 0.00531660795211792, 0.005214208126068115, 0.005232639789581299, 0.005254655838012695, 0.005243199825286865, 0.005255167961120606, 0.005254816055297851, 0.005220928192138672, 0.0052399678230285645, 0.005241663932800293, 0.005252768039703369, 0.005216447830200195, 0.0052000322341918944, 0.0052408318519592285, 0.005221856117248535, 0.005253664016723633, 0.005216256141662597, 0.005240575790405273, 0.005226111888885498, 0.005245567798614502, 0.0052420802116394044, 0.005227295875549316, 0.005305535793304443, 0.005294911861419678, 0.005216256141662597, 0.005251232147216797, 0.005348896026611328, 0.005604864120483399, 0.005728767871856689, 0.005339136123657226, 0.006174719810485839, 0.005289599895477295, 0.005269599914550781, 0.005248799800872803, 0.005241343975067139, 0.005298175811767578, 0.005226624011993408, 0.005250944137573242, 0.005310719966888428, 0.005293248176574707, 0.005388031959533692, 0.0053209919929504395, 0.005282112121582031, 0.005320032119750976, 0.005798783779144287, 0.005392672061920166, 0.0054759359359741215, 0.005408063888549804, 0.0054421119689941405, 0.005431295871734619, 0.005436736106872559, 0.005401567935943603, 0.00540169620513916, 0.0054198079109191895, 0.005435488224029541, 0.00550707197189331, 0.00556387186050415, 0.005749087810516357, 0.00565993595123291, 0.0055708479881286625, 0.005675903797149658, 0.005707520008087158, 0.005700704097747803, 0.0055981121063232425, 0.005539999961853027, 0.005560383796691894, 0.005593952178955078, 0.005702591896057129, 0.005627007961273194, 0.005606272220611572, 0.005533696174621582, 0.0055380802154541015, 0.005522367954254151, 0.005548831939697266, 0.005569536209106446, 0.005645311832427978, 0.00566374397277832, 0.005610847949981689, 0.005437376022338868, 0.0053736639022827146, 0.005426591873168946, 0.005395040035247802, 0.005326848030090332, 0.00537395191192627, 0.005383840084075928, 0.005468480110168457, 0.005496863842010498, 0.005484543800354004, 0.0054906878471374515, 0.005484543800354004, 0.005518367767333984, 0.005724991798400879, 0.00569155216217041, 0.005462207794189453, 0.005486400127410889, 0.005445375919342041, 0.005543488025665283, 0.00569209623336792, 0.005556511878967285, 0.005652192115783691, 0.005482048034667969, 0.005448128223419189, 0.005453504085540771, 0.005513023853302002, 0.0056232957839965824, 0.005665760040283203, 0.0056044158935546875, 0.005661952018737793, 0.00562556791305542, 0.005561984062194824, 0.005500448226928711, 0.005627744197845459, 0.005917695999145508, 0.005674560070037842, 0.005596640110015869, 0.005516255855560303, 0.0053814082145690915, 0.005462751865386963, 0.005355584144592285, 0.0053450241088867185, 0.005416160106658936, 0.005639135837554932, 0.0057402877807617185, 0.0055567359924316405, 0.005443615913391113, 0.005477791786193848, 0.00555244779586792, 0.0056277761459350584, 0.0056423678398132325, 0.005641439914703369, 0.0056228160858154295, 0.0055577921867370605, 0.005554399967193604, 0.0057497601509094234, 0.005626880168914795, 0.005640543937683105, 0.005664351940155029, 0.005789023876190185, 0.005722911834716797, 0.0056191678047180175, 0.005606080055236816, 0.005684288024902344, 0.005619999885559082, 0.005630271911621094, 0.005586143970489502, 0.005583360195159912, 0.0055668802261352535, 0.005541888236999512, 0.005482336044311523, 0.005463488101959228, 0.005474944114685059, 0.005499008178710937, 0.005830944061279297, 0.0053056960105896, 0.005496479988098144, 0.005422080039978027, 0.005404672145843506, 0.005392064094543457, 0.005337408065795898, 0.005377439975738525, 0.005362271785736084, 0.005446720123291016, 0.005681375980377198, 0.005630688190460205, 0.00568342399597168, 0.005532608032226562, 0.005475168228149414, 0.005434976100921631, 0.005424863815307617, 0.005419936180114746, 0.0054878082275390625, 0.005521599769592285, 0.0054952001571655276, 0.005430335998535156, 0.005434304237365723, 0.005449728012084961, 0.005422880172729492, 0.005365119934082031, 0.005412000179290772, 0.005649824142456055, 0.005703328132629394, 0.0056267518997192385, 0.00551529598236084, 0.005459936141967774, 0.005442463874816895, 0.005399456024169922, 0.005410463809967041, 0.005502687931060791, 0.005513792037963867, 0.005478047847747803, 0.00540499210357666, 0.00555625581741333, 0.005732351779937744, 0.005436704158782959, 0.005614048004150391, 0.0055032958984375, 0.005623744010925293, 0.005784992218017578, 0.005710432052612305, 0.005737792015075684, 0.00557535982131958, 0.005709824085235596, 0.005525440216064453, 0.005477920055389405, 0.005519904136657715, 0.005490176200866699, 0.005538144111633301, 0.005533823966979981, 0.005490880012512207, 0.0055103998184204105, 0.005476831912994385, 0.005520927906036377, 0.005498784065246582, 0.005364511966705322, 0.0054107198715209965, 0.005371903896331787, 0.005285151958465576, 0.005688096046447754, 0.005588992118835449, 0.005592639923095703, 0.005468607902526856, 0.005525440216064453, 0.005451839923858642, 0.005455872058868408, 0.00571398401260376, 0.005529088020324707, 0.005572480201721191, 0.005821280002593994, 0.0060403838157653805, 0.00577785587310791, 0.005677536010742188, 0.005631392002105713, 0.005624127864837646, 0.005697824001312256, 0.005711775779724121, 0.005779551982879639, 0.00578547191619873, 0.005885600090026855, 0.005789152145385742, 0.005808703899383545, 0.005624256134033203, 0.005570559978485107, 0.005687295913696289, 0.005611199855804444, 0.005517632007598877, 0.0054243202209472655, 0.0053703680038452144, 0.005337247848510742, 0.005327072143554688, 0.0053861761093139645, 0.00555622386932373, 0.005566463947296142, 0.005538847923278809, 0.005415296077728272, 0.005361695766448974, 0.005362239837646485, 0.005447360038757324, 0.005349696159362793, 0.005309760093688965, 0.00531657600402832, 0.005264095783233643, 0.0052408318519592285, 0.005450816154479981, 0.005534656047821045, 0.0054941439628601076, 0.0053684477806091304, 0.005308671951293946, 0.005352640151977539, 0.005372479915618896, 0.005410367965698242, 0.005562816143035889, 0.005493792057037353, 0.005399519920349121, 0.005451807975769043, 0.005361184120178223, 0.005347775936126709, 0.005390143871307373, 0.0055809922218322755, 0.005825952053070068, 0.005263904094696045, 0.005511168003082275, 0.0054988799095153805, 0.005541888236999512, 0.005471360206604004, 0.0054767999649047855, 0.005482175827026367, 0.0055016961097717285, 0.005541888236999512, 0.005490719795227051, 0.005435008049011231, 0.005509471893310547, 0.005458943843841553, 0.005501440048217773, 0.0054563841819763184, 0.00538431978225708, 0.005452864170074463, 0.0053870720863342285, 0.0053283519744873045, 0.005314112186431885, 0.005314911842346191, 0.0052865281105041505, 0.00542080020904541, 0.005564671993255615, 0.005592576026916504, 0.00558735990524292, 0.005447711944580078, 0.005423423767089844, 0.0054204797744750975, 0.005386559963226318, 0.005389535903930664, 0.005389023780822754, 0.005519423961639404, 0.005634272098541259, 0.005537568092346191, 0.005502336025238037, 0.005610112190246582, 0.005505311965942383, 0.005487520217895508, 0.005463871955871582, 0.005438464164733887, 0.005418591976165772, 0.005404416084289551, 0.005333631992340088, 0.0053719358444213865, 0.005339136123657226, 0.00533894395828247, 0.005411007881164551, 0.005482272148132324, 0.005494368076324463, 0.005536384105682373, 0.005531775951385498, 0.005558144092559814, 0.005553567886352539, 0.0054462399482727054, 0.005535776138305664, 0.005467552185058594, 0.0054332160949707034, 0.005562719821929932, 0.005671103954315185, 0.0056934719085693355, 0.005641759872436524, 0.005732031822204589, 0.005318304061889648, 0.005591296195983887, 0.005571839809417725, 0.005614431858062744, 0.0055400958061218265, 0.005603072166442871, 0.005578752040863037, 0.005601024150848389, 0.00554531192779541, 0.005465216159820556, 0.005381919860839844, 0.005421055793762207, 0.005400415897369385, 0.005387455940246582, 0.0055075201988220215, 0.00529257583618164, 0.005347583770751953, 0.005404384136199951, 0.005566239833831787, 0.005705344200134277, 0.005734111785888672, 0.005567391872406006, 0.0053678078651428224, 0.0053137922286987304, 0.005336095809936523, 0.005276800155639649, 0.005247776031494141, 0.005303679943084717, 0.005304768085479737, 0.005286240100860596, 0.00538588809967041, 0.0053658242225646975, 0.005375264167785644, 0.005402560234069824, 0.005441472053527832, 0.005563519954681397, 0.005666463851928711, 0.00543120002746582, 0.005387968063354493, 0.0053844799995422365, 0.005400703907012939, 0.005537792205810547, 0.005482495784759522, 0.005416959762573242, 0.005445087909698486, 0.005423744201660156, 0.005482272148132324, 0.00556982421875, 0.005486688137054443, 0.005471007823944092, 0.005639135837554932, 0.005635072231292724, 0.0056113600730896, 0.005513567924499512, 0.005424960136413574, 0.005619967937469482, 0.0053942399024963375, 0.005428768157958984, 0.005386367797851562, 0.005331232070922852, 0.005334943771362305, 0.005324895858764648, 0.005301951885223389, 0.005069920063018799, 0.005314879894256592, 0.005540703773498535, 0.005608607769012451, 0.005528384208679199, 0.005474080085754394, 0.00542310380935669, 0.005393695831298828, 0.005314367771148682, 0.005271679878234863, 0.0052947840690612796, 0.0052952318191528324, 0.005307360172271729, 0.005419072151184082, 0.0055008001327514645, 0.005521471977233887, 0.005597184181213379, 0.0055328960418701174, 0.005595935821533203, 0.005476480007171631, 0.005515359878540039, 0.00556828784942627, 0.005486591815948487, 0.005426432132720947, 0.0054709758758544925, 0.005359615802764893, 0.005302239894866943, 0.005374239921569824, 0.005324543952941895, 0.0053637118339538575, 0.005394144058227539, 0.005372032165527344, 0.005407040119171143, 0.005287295818328858, 0.005272031784057617, 0.005322751998901368, 0.005285888195037842, 0.005283616065979004, 0.005280255794525146, 0.005348576068878174, 0.005296639919281006, 0.0053034558296203614, 0.00539737606048584, 0.00536572790145874, 0.0054291200637817385, 0.005400191783905029, 0.005394944190979004, 0.005560319900512695, 0.005602496147155762, 0.00560211181640625, 0.005695680141448975, 0.005558080196380615, 0.005596799850463867, 0.0056590080261230465, 0.00559497594833374, 0.005595295906066894, 0.005424352169036866, 0.005425951957702637, 0.005554175853729248, 0.0056501121520996095, 0.005589119911193848, 0.005474495887756348, 0.00555017614364624, 0.00501475191116333, 0.005307007789611817, 0.005322944164276123, 0.005328864097595215, 0.0053043198585510255, 0.005266687870025635, 0.005282432079315186, 0.00527782392501831, 0.0053203201293945315, 0.005292416095733643, 0.0053012480735778805, 0.005286911964416504, 0.005327936172485352, 0.005403327941894531, 0.005370272159576416, 0.005329792022705078, 0.00532374382019043, 0.00532480001449585, 0.0054028801918029785, 0.005366752147674561, 0.0053194561004638674, 0.005334752082824707, 0.005406847953796387, 0.0054150080680847165, 0.005520864009857178, 0.005522016048431396, 0.0053647680282592775, 0.005336351871490478, 0.0053060479164123535, 0.0053678078651428224, 0.005488639831542969, 0.005432511806488037, 0.005348512172698975, 0.005375648021697998, 0.005293375968933105, 0.005268159866333008, 0.005359936237335205, 0.00544326400756836, 0.005414912223815918, 0.005441120147705078, 0.005458335876464843, 0.005660128116607666, 0.0056977920532226565, 0.005727903842926025, 0.005786240100860596, 0.0057077760696411135, 0.005668352127075196, 0.00548038387298584, 0.005464191913604736, 0.005527999877929688, 0.005474239826202393, 0.005443583965301513, 0.005468416213989258, 0.005414720058441162, 0.005347487926483155, 0.005322207927703857, 0.0052986559867858884, 0.0053144640922546385, 0.0053678078651428224, 0.005310175895690918, 0.00537663984298706, 0.005631775856018067, 0.005568384170532226, 0.0055214080810546875, 0.005724319934844971, 0.005629792213439942, 0.005629216194152832, 0.0057494077682495115, 0.0055929598808288575, 0.005554368019104004, 0.005402624130249023, 0.005398528099060058, 0.005337088108062744, 0.0054448962211608885, 0.00552623987197876, 0.005389696121215821, 0.005328735828399658, 0.005276480197906494, 0.005248991966247558, 0.005264544010162354, 0.0053277120590209965, 0.005354976177215576, 0.005306335926055908, 0.005352000236511231, 0.005308351993560791, 0.005287487983703613, 0.005326560020446777, 0.005333087921142578, 0.005500703811645508, 0.005460256099700927, 0.0053536958694458, 0.005320223808288574, 0.005356416225433349, 0.005326496124267578, 0.0053005762100219725, 0.0053077759742736815, 0.005330624103546143, 0.005253056049346924, 0.005263904094696045, 0.005267199993133545, 0.005374752044677735, 0.005581952095031738, 0.005599552154541015, 0.005429247856140137, 0.0055157761573791505, 0.005314752101898194, 0.0052856321334838864, 0.005307839870452881, 0.0054358081817626955, 0.005390560150146485, 0.005498303890228272, 0.005425024032592774, 0.005444223880767822, 0.005435103893280029, 0.005507423877716065, 0.005478432178497314, 0.005367455959320068, 0.005327167987823486, 0.005320191860198975, 0.005449632167816162, 0.005322336196899414, 0.005485856056213379, 0.0053901119232177734, 0.005326720237731933, 0.005349696159362793, 0.005432479858398438, 0.0050178241729736325, 0.005277696132659912, 0.005289663791656494, 0.0053517441749572755, 0.00536575984954834, 0.005322815895080567, 0.005273344039916993, 0.005247168064117432, 0.0052899842262268066, 0.005337088108062744, 0.005449440002441406, 0.005685535907745362, 0.0059985918998718265, 0.005867519855499268, 0.005826560020446778, 0.00578172779083252, 0.0057710399627685545, 0.005748608112335205, 0.005746816158294678, 0.0056835198402404785, 0.0056761598587036135, 0.005699584007263184, 0.005796031951904297, 0.005676928043365478, 0.005626336097717285, 0.005672863960266113, 0.005572383880615235, 0.005527999877929688, 0.00545577621459961, 0.0055214080810546875, 0.005361663818359375, 0.005310400009155274, 0.005254816055297851, 0.005291808128356934, 0.005278048038482666, 0.0052165441513061525, 0.005492512226104736, 0.00551142406463623, 0.005343167781829834, 0.00535964822769165, 0.005297408103942871, 0.005296895980834961, 0.005325119972229004, 0.005265183925628662, 0.0052854399681091305, 0.005297535896301269, 0.005495935916900635, 0.005672575950622559, 0.005738272190093994, 0.005566336154937744, 0.005630527973175049, 0.005587135791778565, 0.005408448219299316, 0.005414559841156006, 0.005397151947021484, 0.0053860158920288085, 0.005349408149719239, 0.005315648078918457, 0.00528275203704834, 0.005418399810791016, 0.005460031986236572, 0.005444191932678223, 0.005426623821258545, 0.005340256214141846, 0.00556387186050415, 0.005488863945007324, 0.005500927925109863, 0.005345280170440674, 0.005418752193450928, 0.00542464017868042, 0.005534048080444336, 0.0057358717918396, 0.005736800193786621, 0.005816095829010009, 0.005698560237884521, 0.005691232204437256, 0.0056193599700927735, 0.00564086389541626, 0.005473983764648437, 0.005380095958709717, 0.0053268160820007324, 0.005341216087341309, 0.005326687812805176, 0.0053597760200500486, 0.005506400108337402, 0.005333663940429688, 0.0053455038070678714, 0.005314176082611084, 0.005314720153808594, 0.0053143038749694825, 0.005275904178619385, 0.005253024101257324, 0.005275296211242676, 0.005398975849151611, 0.005367008209228516, 0.005306784152984619, 0.005275296211242676, 0.005241856098175048, 0.005261023998260498, 0.005282911777496338, 0.005302207946777344, 0.0053155522346496585, 0.005453504085540771, 0.005680863857269287, 0.005891776084899903, 0.0058520641326904295, 0.005741727828979492, 0.005641056060791016, 0.005574656009674072, 0.005638144016265869, 0.005578495979309082, 0.00550870418548584, 0.005355552196502685, 0.00528659200668335, 0.005281727790832519, 0.005322751998901368, 0.005342656135559082, 0.005333568096160889, 0.005344768047332763, 0.005302432060241699, 0.005351967811584472, 0.005465919971466064, 0.005303743839263916, 0.005349760055541992, 0.005454304218292236, 0.0054802241325378415, 0.005073184013366699, 0.005455743789672851, 0.005568640232086182, 0.005574431896209717, 0.005541600227355957, 0.005525951862335205, 0.005571936130523682, 0.00550601577758789, 0.005465439796447754, 0.005462431907653808, 0.005544032096862793, 0.005397984027862549, 0.00543993616104126, 0.00539024019241333, 0.005515007972717285, 0.005378399848937988, 0.0052973442077636716, 0.005288767814636231, 0.005285888195037842, 0.0052715520858764645, 0.0052367358207702636, 0.005336927890777588, 0.005451295852661133, 0.005819136142730713, 0.005812255859375, 0.005937280178070069, 0.005826623916625977, 0.005701280117034912, 0.00569536018371582, 0.00558406400680542, 0.00560591983795166, 0.0055586881637573245, 0.00541868782043457, 0.005742623805999756, 0.005367904186248779, 0.005406911849975586, 0.005342720031738281, 0.00530847978591919, 0.005339583873748779, 0.0053136320114135745, 0.005258143901824951, 0.00525497579574585, 0.005311999797821045, 0.005245471954345703, 0.005255328178405762, 0.005348896026611328, 0.0052731199264526366, 0.005337791919708252, 0.005435328006744385, 0.005411136150360108, 0.005629119873046875, 0.005626688003540039, 0.005475999832153321, 0.0054926080703735355, 0.0054635519981384275, 0.005616511821746826, 0.0058635520935058595, 0.00572822380065918, 0.00564415979385376, 0.005487071990966797, 0.005461664199829102, 0.005510496139526367, 0.005443935871124268]",tokens/s,181.76259509774255,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-2b,google/gemma-2b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.223-212.873.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.0,,,,1.21.4,,,,0.12.0,,,MB,937.55392,6533.61152,0.0,6138.363904,6060.931072,s,1,7.04798779296875,7.04798779296875,0.0,7.04798779296875,7.04798779296875,7.04798779296875,7.04798779296875,[7.04798779296875],,kWh,5.2098881333222375e-06,5.626032814737453e-07,2.771391105993448e-06,8.543882520789431e-06,,MB,1266.315264,6556.680192,0.0,6150.946816,5419.87328,s,10,4.7371629638671875,0.47371629638671875,0.004476699079665267,0.4723685760498047,0.47963414916992186,0.4801933258056641,0.48064066711425785,"[0.478864501953125, 0.472303955078125, 0.47571908569335936, 0.4724331970214844, 0.4686256408691406, 0.47137091064453124, 0.4675152893066406, 0.4700679931640625, 0.4795098876953125, 0.48075250244140627]",tokens/s,540.4078389378739,kWh,1.3981853269446116e-05,1.5419492338972889e-06,9.305999508285457e-06,2.482980201162886e-05,tokens/kWh,10310190.950379074,MB,1314.455552,6556.680192,0.0,6150.946816,5419.87584,s,10,15.723749145507812,1.5723749145507813,0.005431990629563022,1.5700950317382811,1.578046875,1.5826051025390624,1.5862516845703125,"[1.5701103515625, 1.5695615234375, 1.5698424072265624, 1.5700797119140626, 1.587163330078125, 1.570642333984375, 1.5691669921875, 1.5683338623046874, 1.571814697265625, 1.577033935546875]",tokens/s,40.06678014066305,kWh,4.585031890888735e-05,5.0572313042450124e-06,3.0434060061514248e-05,8.13416102746466e-05,tokens/kWh,774511.3452669929,,s,630,15.721630178451525,0.02495496853722466,0.0005131725795462734,0.024830032348632812,0.025127049827575684,0.025381708717346192,0.02711325262069703,"[0.026511871337890625, 0.025247104644775392, 0.024973535537719728, 0.02480988883972168, 0.024764415740966796, 0.024729600906372072, 0.02481510353088379, 0.02480588722229004, 0.02476851272583008, 0.024821760177612305, 0.02490313529968262, 0.024819488525390624, 0.0248035831451416, 0.024856096267700196, 0.024773088455200196, 0.024805919647216797, 0.024835296630859375, 0.02483635139465332, 0.024773120880126953, 0.024930303573608398, 0.02491801643371582, 0.024852479934692383, 0.02475212860107422, 0.024674016952514647, 0.0247237434387207, 0.024739328384399413, 0.024746496200561522, 0.024885248184204102, 0.024731647491455077, 0.025380863189697265, 0.02478489685058594, 0.02510211181640625, 0.026751039505004882, 0.024944448471069337, 0.02479952049255371, 0.024698944091796876, 0.024936447143554686, 0.024993471145629883, 0.02494905662536621, 0.02490572738647461, 0.024930303573608398, 0.025268224716186522, 0.024894784927368165, 0.02480384063720703, 0.02477280044555664, 0.02490598487854004, 0.02489727973937988, 0.02484223937988281, 0.02477027130126953, 0.024883487701416015, 0.02485862350463867, 0.024841632843017578, 0.024773216247558592, 0.02512076759338379, 0.024887296676635744, 0.024786304473876954, 0.024807680130004884, 0.02481376075744629, 0.024899776458740235, 0.024860671997070313, 0.02483795166015625, 0.02478713607788086, 0.024811519622802734, 0.026420736312866212, 0.02518627166748047, 0.024943136215209962, 0.025980928421020507, 0.02473084831237793, 0.02465430450439453, 0.024874303817749025, 0.02502134323120117, 0.024809568405151368, 0.02507980728149414, 0.024715263366699217, 0.02511440086364746, 0.024717056274414062, 0.024793567657470702, 0.024771808624267578, 0.02476688003540039, 0.02479142379760742, 0.024803327560424804, 0.024653823852539062, 0.024774368286132813, 0.024833663940429688, 0.02474777603149414, 0.024738719940185547, 0.024805376052856445, 0.024799232482910157, 0.02480963134765625, 0.02488876724243164, 0.025104000091552735, 0.024797407150268555, 0.02473353576660156, 0.02482863998413086, 0.024741216659545897, 0.024762592315673827, 0.02534979248046875, 0.024746240615844725, 0.024817216873168946, 0.02505958366394043, 0.025047775268554687, 0.025038848876953124, 0.024999935150146483, 0.02518134307861328, 0.025051456451416015, 0.02494921684265137, 0.024891456604003905, 0.024960447311401367, 0.02489401626586914, 0.02480668830871582, 0.02482044792175293, 0.024786720275878905, 0.024817695617675783, 0.024772960662841795, 0.024715103149414063, 0.024765600204467775, 0.024867679595947264, 0.02487500762939453, 0.025149440765380858, 0.024788991928100586, 0.025161727905273438, 0.024827903747558593, 0.024750080108642578, 0.024723455429077147, 0.02471481513977051, 0.024836544036865235, 0.026320831298828125, 0.025217599868774414, 0.024973119735717773, 0.024823551177978517, 0.024680992126464844, 0.024773632049560547, 0.024742496490478515, 0.024780960083007814, 0.024723360061645508, 0.024755935668945312, 0.024758367538452147, 0.02473423957824707, 0.0247126407623291, 0.02512544059753418, 0.024872575759887695, 0.025110015869140623, 0.024699039459228515, 0.02473846435546875, 0.024847488403320312, 0.024779712677001953, 0.024749536514282227, 0.024756767272949218, 0.02486188888549805, 0.024802047729492186, 0.02471743965148926, 0.02478463935852051, 0.025382400512695313, 0.024787647247314453, 0.024759552001953126, 0.02478976058959961, 0.0247825927734375, 0.026972320556640624, 0.024827072143554688, 0.02478326416015625, 0.02475833511352539, 0.02491334342956543, 0.025279199600219727, 0.02505958366394043, 0.025096000671386717, 0.02517628860473633, 0.02507081604003906, 0.024971967697143556, 0.02488534355163574, 0.024922111511230468, 0.024920352935791014, 0.024870111465454103, 0.024807167053222657, 0.024735679626464845, 0.024741920471191406, 0.024772960662841795, 0.024772287368774414, 0.024762943267822267, 0.02483340835571289, 0.024795488357543947, 0.024812000274658203, 0.024816768646240234, 0.0247869758605957, 0.025086816787719728, 0.024774528503417968, 0.024797311782836916, 0.02480121612548828, 0.024856288909912108, 0.024807775497436523, 0.02648678398132324, 0.025312288284301758, 0.024957920074462892, 0.024856576919555663, 0.024792320251464845, 0.02478761672973633, 0.024711231231689453, 0.024697055816650392, 0.024694591522216796, 0.02477670478820801, 0.02471116828918457, 0.024754175186157225, 0.024721408843994142, 0.024846336364746095, 0.02490719985961914, 0.02485702323913574, 0.025407615661621093, 0.025269472122192382, 0.02475254440307617, 0.02475257682800293, 0.024698816299438476, 0.024849599838256835, 0.02482851219177246, 0.024840415954589842, 0.025183456420898438, 0.024984352111816405, 0.024938432693481446, 0.024846399307250976, 0.024762367248535155, 0.024788223266601562, 0.024834463119506836, 0.024843807220458983, 0.024883935928344727, 0.024803199768066407, 0.024883424758911133, 0.02489753532409668, 0.02492755126953125, 0.024953535079956055, 0.025007295608520507, 0.02508678436279297, 0.02504640007019043, 0.024998176574707032, 0.024865119934082032, 0.024933887481689454, 0.024904191970825194, 0.02489334487915039, 0.02483737564086914, 0.024918880462646485, 0.0249036808013916, 0.02513920021057129, 0.024915712356567383, 0.024928512573242186, 0.024936447143554686, 0.024899072647094726, 0.024871456146240235, 0.02484003257751465, 0.024935808181762695, 0.024906496047973632, 0.02488115119934082, 0.024886688232421874, 0.024799840927124023, 0.02483404731750488, 0.02489068794250488, 0.026496864318847655, 0.025480768203735352, 0.02512886428833008, 0.024951263427734374, 0.02487055969238281, 0.025016544342041015, 0.02478316879272461, 0.02481990432739258, 0.024792896270751954, 0.02489958381652832, 0.024879104614257814, 0.02476470375061035, 0.025194208145141603, 0.02476201629638672, 0.0247459831237793, 0.024940160751342772, 0.024969343185424805, 0.024922719955444338, 0.02488115119934082, 0.02487295913696289, 0.0248026237487793, 0.024867584228515625, 0.024837568283081056, 0.02483456039428711, 0.02476851272583008, 0.02468659210205078, 0.024766464233398438, 0.02478489685058594, 0.024782432556152343, 0.02476278305053711, 0.02485043144226074, 0.02483184051513672, 0.024778911590576172, 0.024731647491455077, 0.02477670478820801, 0.02482585525512695, 0.02490572738647461, 0.024965152740478516, 0.024997791290283202, 0.025106496810913086, 0.025067520141601563, 0.025051136016845704, 0.025087711334228515, 0.025051424026489258, 0.024963071823120117, 0.024921119689941405, 0.02487295913696289, 0.024930879592895507, 0.024928159713745117, 0.026630399703979492, 0.02582143974304199, 0.02493440055847168, 0.02533171272277832, 0.02492620849609375, 0.027625471115112304, 0.024797183990478516, 0.024819711685180663, 0.02500383949279785, 0.02479532814025879, 0.024860671997070313, 0.028368896484375, 0.03222073745727539, 0.02508336067199707, 0.026275840759277344, 0.025219072341918947, 0.02495078468322754, 0.024815616607666017, 0.024719295501708986, 0.024713279724121094, 0.024823808670043947, 0.025126848220825195, 0.024694847106933593, 0.024758399963378905, 0.024850240707397463, 0.024857824325561523, 0.024728063583374024, 0.024815967559814453, 0.024819711685180663, 0.02481705665588379, 0.024819999694824218, 0.024831647872924804, 0.024820383071899415, 0.024788991928100586, 0.024846336364746095, 0.024736831665039063, 0.02536134338378906, 0.02717081642150879, 0.024759328842163086, 0.02474492835998535, 0.02477670478820801, 0.025792255401611328, 0.024741920471191406, 0.024688831329345705, 0.024790592193603515, 0.02477507209777832, 0.024792448043823242, 0.024770303726196288, 0.02468550491333008, 0.02479017639160156, 0.024879968643188477, 0.024977407455444335, 0.02501593589782715, 0.025045120239257812, 0.025045120239257812, 0.024995967864990233, 0.024911359786987306, 0.024955392837524414, 0.024921472549438477, 0.024881023406982422, 0.024840959548950194, 0.024806400299072266, 0.02476255989074707, 0.024750911712646484, 0.024782848358154298, 0.024766368865966795, 0.024858720779418947, 0.024829376220703126, 0.02480508804321289, 0.02505561637878418, 0.024927776336669923, 0.024905887603759766, 0.024859424591064452, 0.024827711105346678, 0.024832191467285155, 0.02488118362426758, 0.024833375930786133, 0.02630463981628418, 0.025261152267456056, 0.02492860794067383, 0.024837888717651368, 0.024789663314819337, 0.024817440032958986, 0.024738016128540038, 0.024694400787353514, 0.024697216033935546, 0.024790687561035155, 0.024772960662841795, 0.024764415740966796, 0.02472915267944336, 0.024670656204223634, 0.02474563217163086, 0.02473119926452637, 0.024746431350708007, 0.0260380802154541, 0.025131263732910157, 0.024922399520874022, 0.024792255401611327, 0.025768415451049805, 0.02473206329345703, 0.02482912063598633, 0.024756767272949218, 0.02474732780456543, 0.02487388801574707, 0.024848543167114257, 0.024854272842407227, 0.025032800674438478, 0.02477791976928711, 0.024765247344970702, 0.024778751373291014, 0.024786943435668944, 0.024698879241943358, 0.024866783142089843, 0.024972543716430665, 0.025036735534667967, 0.025026655197143553, 0.025066240310668945, 0.02509004783630371, 0.02508732795715332, 0.024960800170898436, 0.024953664779663084, 0.02497952079772949, 0.024917280197143555, 0.0248306884765625, 0.0248789119720459, 0.024879199981689453, 0.024801599502563478, 0.024759967803955077, 0.024694911956787108, 0.02482585525512695, 0.024823007583618165, 0.024838943481445313, 0.024848384857177733, 0.024761375427246095, 0.02486739158630371, 0.02480169677734375, 0.024832000732421877, 0.024782848358154298, 0.024778751373291014, 0.02484592056274414, 0.02619171142578125, 0.025360736846923828, 0.02499564743041992, 0.02483308792114258, 0.024732608795166016, 0.024838048934936522, 0.024775936126708985, 0.024733983993530273, 0.024640064239501953, 0.024778751373291014, 0.02479033660888672, 0.024814271926879884, 0.024795135498046874, 0.024666112899780275, 0.024729280471801757, 0.024700511932373048, 0.024709856033325196, 0.024989696502685548, 0.024726943969726564, 0.02481622314453125, 0.02482784080505371, 0.024762432098388673, 0.024758272171020508, 0.02476054382324219, 0.024796031951904298, 0.024757152557373048, 0.025095487594604494, 0.024799936294555663, 0.024764415740966796, 0.024756223678588866, 0.024778751373291014, 0.024868864059448242, 0.024854528427124024, 0.025179967880249024, 0.024794815063476562, 0.024991327285766602, 0.02511350440979004, 0.025002080917358397, 0.024975263595581054, 0.02505891227722168, 0.025076128005981444, 0.024971263885498047, 0.02487612724304199, 0.024816287994384765, 0.024996095657348633, 0.024836095809936523, 0.02480476760864258, 0.024783456802368164, 0.02489139175415039, 0.02490163230895996, 0.02483171272277832, 0.02479158401489258, 0.024880895614624022, 0.024782848358154298, 0.024754400253295897, 0.024763551712036133, 0.02476915168762207, 0.025782272338867186, 0.02481942367553711, 0.024799167633056642, 0.02504243278503418, 0.02499260711669922, 0.024854528427124024, 0.0265031681060791, 0.02537433624267578, 0.024932607650756836, 0.024808736801147462, 0.025052000045776367, 0.02517967987060547, 0.02468502426147461, 0.024758272171020508, 0.02478816032409668, 0.024732479095458983, 0.024763519287109376, 0.02472435188293457, 0.024728927612304687, 0.02474665641784668, 0.024649471282958985, 0.024735391616821287, 0.024775264739990234, 0.02479705619812012, 0.025040639877319335, 0.02826278305053711, 0.02463871955871582, 0.024678176879882812, 0.024706016540527342, 0.024684576034545897, 0.02475619125366211, 0.02478489685058594, 0.024780799865722656, 0.024795135498046874, 0.024762048721313476, 0.024717248916625977, 0.024737951278686523, 0.0247743034362793, 0.024793439865112305, 0.02493462371826172, 0.024845792770385743, 0.024818208694458006, 0.024901248931884765, 0.02504105567932129, 0.025058591842651367, 0.025009088516235352, 0.02509619140625, 0.024913183212280275, 0.02492201614379883, 0.02492089653015137, 0.024799232482910157, 0.02483404731750488, 0.02493235206604004, 0.024871999740600587, 0.02482067108154297, 0.024770624160766603, 0.02481123161315918, 0.024815391540527344, 0.02476812744140625, 0.024998687744140626, 0.024737823486328126, 0.024907167434692384, 0.024803936004638674, 0.024814687728881835, 0.024715200424194336, 0.024844415664672853, 0.026590047836303712, 0.02488319969177246, 0.024778751373291014, 0.02622502326965332, 0.025255039215087892, 0.02494963264465332, 0.024844287872314453, 0.024718719482421873, 0.02480191993713379, 0.024805376052856445, 0.024754079818725586, 0.024707168579101563, 0.024690303802490234, 0.024712799072265625, 0.024783584594726564, 0.02471903991699219, 0.02483238410949707, 0.02470911979675293, 0.024726688385009767, 0.024740703582763673, 0.024786943435668944, 0.027172864913940428, 0.026746879577636717, 0.024756223678588866, 0.02530713653564453, 0.024762367248535155, 0.02467635154724121, 0.02469478416442871, 0.024791040420532227, 0.02495033645629883, 0.02492460823059082, 0.024848447799682618, 0.02481862449645996, 0.024826047897338867, 0.0248384952545166, 0.024784671783447267, 0.024803264617919922, 0.02479520034790039, 0.02523103904724121, 0.02497817611694336, 0.024965375900268556, 0.024946464538574218, 0.025161951065063477, 0.02512227249145508, 0.025318975448608397, 0.0249149112701416, 0.02495078468322754, 0.0248209285736084, 0.024819999694824218, 0.024832191467285155, 0.02489788818359375, 0.02485641670227051, 0.02481782341003418, 0.024765535354614256, 0.02472809600830078, 0.024803712844848634, 0.028482656478881836, 0.024695552825927735, 0.02481939125061035, 0.02480748748779297, 0.024835744857788087, 0.02481158447265625, 0.024740543365478516, 0.024983552932739257, 0.026660064697265624, 0.024795679092407225]",tokens/s,40.07218035592097,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,falcon,tiiuae/falcon-40b,tiiuae/falcon-40b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: FalconForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gemma,google/gemma-7b,google/gemma-7b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 990, in __init__ self.model = GemmaModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 775, in __init__ [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 775, in [GemmaDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 565, in __init__ self.mlp = GemmaMLP(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gemma/modeling_gemma.py"", line 140, in __init__ self.up_proj = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 144.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 34.12 MiB is free. Process 138954 has 14.71 GiB memory in use. Of the allocated memory 14.59 GiB is allocated by PyTorch, and 1.69 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-125m,facebook/opt-125m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,dbrx,databricks/dbrx-base,databricks/dbrx-base,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1234, in __init__ self.transformer = DbrxModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1009, in __init__ self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 1009, in self.blocks = nn.ModuleList([DbrxBlock(config, block_idx) for block_idx in range(config.n_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 788, in __init__ self.ffn = DbrxFFN(config=config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 765, in __init__ self.experts = DbrxExperts( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 704, in __init__ self.mlp = DbrxExpertGLU( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/dbrx/modeling_dbrx.py"", line 682, in __init__ self.w1 = nn.Parameter(torch.empty(moe_num_experts * ffn_hidden_size, hidden_size)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.97 GiB. GPU 0 has a total capacity of 14.74 GiB of which 1.17 GiB is free. Process 110623 has 13.57 GiB memory in use. Of the allocated memory 13.45 GiB is allocated by PyTorch, and 1.36 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,opt,facebook/opt-350m,facebook/opt-350m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: OPTForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,stablelm,stabilityai/stablelm-2-12b,stabilityai/stablelm-2-12b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 1133, in __init__ self.model = StableLmModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 914, in __init__ [StableLmDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 914, in [StableLmDecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 688, in __init__ self.self_attn = ATTENTION_CLASSES[config._attn_implementation](config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/stablelm/modeling_stablelm.py"", line 339, in __init__ self.k_proj = nn.Linear(self.hidden_size, self.num_key_value_heads * self.head_dim, bias=config.use_qkv_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 14.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 190362 has 14.73 GiB memory in use. Of the allocated memory 14.54 GiB is allocated by PyTorch, and 78.97 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/polyglot-ko-12.8b,EleutherAI/polyglot-ko-12.8b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3886, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 1120, in __init__ self.gpt_neox = GPTNeoXModel(config) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in __init__ self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 882, in self.layers = nn.ModuleList([GPTNeoXLayer(config, i) for i in range(config.num_hidden_layers)]) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 743, in __init__ self.attention = GPT_NEOX_ATTENTION_CLASSES[config._attn_implementation](config, layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 471, in __init__ super().__init__(config, layer_idx=layer_idx) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt_neox/modeling_gpt_neox.py"", line 175, in __init__ self.dense = nn.Linear(config.hidden_size, config.hidden_size, bias=config.attention_bias) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 50.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 8.12 MiB is free. Process 41762 has 14.73 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 20.86 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,,MB,789.172224,763.232256,0.0,360.710144,345.493504,s,1,7.6105244140625,7.6105244140625,0.0,7.6105244140625,7.6105244140625,7.6105244140625,7.6105244140625,[7.6105244140625],,kWh,2.5832863291801306e-06,2.776496179829498e-07,8.777784799851585e-07,3.7387144271482386e-06,,MB,1154.752512,775.815168,0.0,362.807296,319.011328,s,19,0.44184643745422364,0.023255075655485458,0.0003493080803328801,0.023152128219604492,0.023347532272338867,0.02351442203521728,0.024452081794738772,"[0.024686496734619142, 0.023384191513061522, 0.023152128219604492, 0.02309708786010742, 0.023126880645751954, 0.023157951354980468, 0.023204416275024415, 0.02304502487182617, 0.023150592803955077, 0.023211456298828124, 0.023079200744628905, 0.0232108154296875, 0.02311884880065918, 0.02328214454650879, 0.023091104507446288, 0.023295936584472657, 0.023098304748535158, 0.023338367462158203, 0.023115488052368165]",tokens/s,11008.349480024768,kWh,7.956104997730181e-07,8.77414730085392e-08,5.268956682615272e-07,1.4102476410430847e-06,tokens/kWh,181528401.50163308,MB,1193.828352,798.88384,0.0,385.875968,319.013888,s,19,9.716356506347656,0.5113871845446135,0.009614222632072221,0.5108898315429687,0.5229644287109375,0.5240051391601562,0.5302006372070313,"[0.5231446533203125, 0.53174951171875, 0.5174469604492188, 0.51473681640625, 0.5182813110351563, 0.5113012084960937, 0.520412841796875, 0.5027524108886718, 0.5034724426269531, 0.5005283508300781, 0.49448150634765625, 0.5038409729003906, 0.5075123596191407, 0.5229193725585938, 0.5108898315429687, 0.4967735900878906, 0.5091379699707032, 0.5083453674316406, 0.5186290283203125]",tokens/s,123.19432692882405,kWh,1.437201849043555e-05,1.5849636465697255e-06,5.9121372607394155e-06,2.186911939774469e-05,tokens/kWh,2880774.4314797167,,s,1197,9.704699440479262,0.008107518329556623,0.000263398318828182,0.008112992286682129,0.008345254516601562,0.008433657836914063,0.008859627265930176,"[0.00819660758972168, 0.008466431617736817, 0.008217920303344726, 0.008261759757995606, 0.008258111953735351, 0.008372223854064942, 0.008239104270935058, 0.008251392364501953, 0.008243200302124023, 0.008286208152770995, 0.008222271919250488, 0.008221280097961426, 0.008224608421325684, 0.008206111907958984, 0.008239168167114258, 0.008402079582214355, 0.008290719985961915, 0.008385024070739745, 0.008181856155395508, 0.008409088134765624, 0.008204287528991699, 0.008083456039428711, 0.008208383560180664, 0.00820348834991455, 0.008243680000305176, 0.008234784126281739, 0.008182304382324219, 0.008219936370849609, 0.008329471588134766, 0.008298975944519043, 0.008252479553222657, 0.008203200340270996, 0.008240511894226074, 0.008311519622802735, 0.008312735557556152, 0.008229984283447265, 0.00815401554107666, 0.008205792427062989, 0.008511072158813476, 0.008251680374145507, 0.008348416328430176, 0.00837606430053711, 0.008352992057800292, 0.008451007843017578, 0.008282112121582032, 0.008319071769714356, 0.0084683837890625, 0.00836739158630371, 0.008340224266052246, 0.008428959846496583, 0.0083088960647583, 0.008468895912170411, 0.008447584152221679, 0.008335776329040527, 0.008291680335998535, 0.008239551544189453, 0.008392352104187012, 0.00830726432800293, 0.008409088134765624, 0.008249343872070313, 0.008345600128173827, 0.008286080360412597, 0.008204416275024414, 0.00838105583190918, 0.008341504096984862, 0.00832921600341797, 0.00854377555847168, 0.00845241641998291, 0.008423295974731446, 0.00828262424468994, 0.008239232063293456, 0.008277664184570313, 0.008515007972717285, 0.008395327568054198, 0.008509440422058106, 0.00830463981628418, 0.008280192375183106, 0.008466143608093262, 0.008292511940002441, 0.008302847862243652, 0.008345024108886719, 0.008271807670593262, 0.008369759559631347, 0.008372415542602539, 0.008317536354064941, 0.00834182357788086, 0.008545984268188476, 0.008267775535583496, 0.008230912208557128, 0.00820796775817871, 0.008291775703430176, 0.00843273639678955, 0.008396575927734375, 0.008310527801513671, 0.008245247840881348, 0.008229536056518555, 0.008368000030517578, 0.008323007583618165, 0.008513471603393555, 0.00834937572479248, 0.008866047859191895, 0.008658559799194335, 0.008379903793334961, 0.008333184242248536, 0.008395392417907715, 0.00855078411102295, 0.00843734359741211, 0.00830508804321289, 0.008310976028442383, 0.00841436767578125, 0.008458880424499512, 0.008842656135559082, 0.009976351737976074, 0.009744447708129883, 0.008357312202453613, 0.00874550437927246, 0.008261664390563965, 0.0082227201461792, 0.008623519897460937, 0.008309344291687011, 0.008310688018798829, 0.008278400421142577, 0.008339167594909668, 0.008358943939208984, 0.008281311988830566, 0.008263487815856934, 0.008099871635437012, 0.008167391777038574, 0.00819814395904541, 0.00817535972595215, 0.008138912200927734, 0.008298591613769531, 0.008140416145324706, 0.008350079536437988, 0.008163328170776368, 0.008148159980773926, 0.008156160354614257, 0.008354687690734863, 0.008163616180419922, 0.008136768341064454, 0.008170080184936524, 0.008247296333312988, 0.008224767684936523, 0.008251232147216797, 0.008135135650634766, 0.008164671897888184, 0.008157567977905273, 0.008079039573669433, 0.008296735763549805, 0.00828444766998291, 0.008165151596069335, 0.008209759712219239, 0.008225376129150391, 0.008181792259216308, 0.00819200038909912, 0.008185567855834961, 0.008181471824645997, 0.008202816009521484, 0.010534208297729492, 0.008587039947509766, 0.008108896255493164, 0.008155200004577636, 0.008178943634033204, 0.008106752395629883, 0.008125599861145019, 0.008112992286682129, 0.008150176048278808, 0.008255616188049316, 0.008133472442626953, 0.008515456199645995, 0.008128512382507324, 0.008216575622558593, 0.008240544319152832, 0.00814675235748291, 0.008381216049194336, 0.008088607788085938, 0.008079487800598145, 0.008165920257568359, 0.008142656326293945, 0.008249024391174316, 0.007928703784942627, 0.007954368114471436, 0.008173567771911621, 0.008133983612060547, 0.007750304222106934, 0.007907296180725098, 0.007850175857543945, 0.007990623950958252, 0.008102399826049805, 0.008157983779907226, 0.00819711971282959, 0.008166367530822755, 0.008159263610839844, 0.008036352157592774, 0.008046624183654785, 0.00828822422027588, 0.008214655876159669, 0.008621952056884766, 0.008151007652282714, 0.008074943542480468, 0.008083392143249511, 0.008189727783203125, 0.008313471794128419, 0.008308768272399902, 0.008336480140686036, 0.008264575958251953, 0.008388607978820802, 0.008093695640563964, 0.008050687789916992, 0.008160832405090333, 0.008151136398315429, 0.008187583923339844, 0.008155808448791504, 0.008056096076965332, 0.008231040000915527, 0.008073823928833008, 0.008121824264526368, 0.008129088401794434, 0.008034272193908691, 0.008098848342895508, 0.008095999717712403, 0.008128512382507324, 0.008329312324523925, 0.008026752471923828, 0.008030207633972167, 0.007974912166595459, 0.008089407920837403, 0.00814515209197998, 0.008095199584960938, 0.008271360397338867, 0.008240447998046876, 0.00810972785949707, 0.008185248374938964, 0.008178367614746093, 0.008085408210754395, 0.008267775535583496, 0.00818995189666748, 0.008110367774963378, 0.008175328254699707, 0.008073216438293456, 0.007944255828857423, 0.008014016151428223, 0.00813644790649414, 0.00820633602142334, 0.008167424201965333, 0.008112223625183105, 0.008175519943237305, 0.008051008224487304, 0.008304320335388183, 0.008231072425842286, 0.008200032234191895, 0.00821225643157959, 0.008163328170776368, 0.008202239990234375, 0.008216575622558593, 0.008268927574157715, 0.00817801570892334, 0.008182271957397461, 0.008151071548461914, 0.008212736129760742, 0.008160608291625977, 0.008158847808837891, 0.008108832359313966, 0.008165632247924805, 0.008083200454711914, 0.010094592094421387, 0.009353055953979492, 0.008345760345458985, 0.008322175979614257, 0.008186752319335938, 0.008116255760192871, 0.008150176048278808, 0.00814572811126709, 0.008077376365661621, 0.008221664428710938, 0.008162272453308106, 0.007984320163726806, 0.008135680198669434, 0.008056768417358398, 0.008179807662963867, 0.00820201587677002, 0.008214655876159669, 0.008297727584838868, 0.008075679779052734, 0.008361856460571289, 0.008079551696777343, 0.008179871559143067, 0.008050687789916992, 0.008103936195373536, 0.00810374355316162, 0.008155327796936035, 0.00803603172302246, 0.007996928215026856, 0.008071840286254883, 0.008107839584350586, 0.008055104255676269, 0.008196127891540528, 0.008168864250183105, 0.00829635238647461, 0.008186816215515137, 0.008582912445068359, 0.008214431762695313, 0.00824124813079834, 0.008279295921325683, 0.008087583541870117, 0.008165184020996094, 0.008240032196044921, 0.008029791831970215, 0.007910048007965088, 0.00832476806640625, 0.008044032096862793, 0.008491359710693359, 0.008144767761230468, 0.008016256332397461, 0.008146688461303711, 0.008185536384582519, 0.008193759918212891, 0.008270432472229004, 0.008253439903259278, 0.008174688339233398, 0.008356767654418945, 0.008296640396118164, 0.008209728240966796, 0.008373984336853027, 0.008262432098388672, 0.008182784080505372, 0.008179840087890624, 0.008372096061706543, 0.008236031532287597, 0.008282112121582032, 0.008427136421203614, 0.008293888092041016, 0.008340576171875, 0.008167200088500977, 0.008142720222473145, 0.00805008029937744, 0.009013983726501465, 0.008160287857055665, 0.008215040206909179, 0.007930560111999512, 0.007910655975341798, 0.008002143859863281, 0.008034111976623536, 0.007952032089233398, 0.007979487895965576, 0.007919616222381591, 0.007909535884857178, 0.007933792114257812, 0.00789299201965332, 0.00806816005706787, 0.007998559951782227, 0.007839583873748779, 0.007917888164520263, 0.007873631954193116, 0.00802672004699707, 0.007989247798919678, 0.007967936038970948, 0.007981215953826904, 0.007770783901214599, 0.0078050241470336915, 0.008051872253417969, 0.007843999862670899, 0.007887680053710938, 0.007911295890808105, 0.007985055923461914, 0.008042240142822265, 0.0079651198387146, 0.007984960079193115, 0.008027744293212891, 0.00825385570526123, 0.008531968116760253, 0.008196096420288086, 0.008146240234375, 0.008073920249938965, 0.008085503578186035, 0.008151040077209473, 0.0081081600189209, 0.008107135772705078, 0.008251520156860351, 0.008347583770751952, 0.008167488098144531, 0.008242752075195313, 0.008253888130187988, 0.0081428804397583, 0.008140992164611817, 0.008458144187927246, 0.008339136123657227, 0.008245344161987305, 0.008189248085021974, 0.008147135734558105, 0.008208992004394532, 0.008539520263671874, 0.008309375762939454, 0.008198431968688965, 0.008132320404052734, 0.008238207817077637, 0.008258432388305663, 0.008214783668518067, 0.008267807960510253, 0.008213248252868652, 0.008198911666870118, 0.008275679588317871, 0.00828985595703125, 0.008497632026672364, 0.008274399757385253, 0.008441375732421876, 0.008235648155212402, 0.008180928230285645, 0.008163935661315918, 0.008197471618652344, 0.008159104347229005, 0.008266336441040039, 0.008245792388916016, 0.008306207656860352, 0.008253631591796875, 0.008238207817077637, 0.008477727890014648, 0.008273759841918945, 0.008369471549987793, 0.008264384269714355, 0.008342880249023438, 0.008088352203369141, 0.00825331211090088, 0.008120320320129394, 0.008201696395874024, 0.008274144172668457, 0.008859359741210937, 0.008172320365905763, 0.008256959915161132, 0.0084584321975708, 0.008043807983398437, 0.008069120407104492, 0.008177984237670898, 0.00841379165649414, 0.008110400199890136, 0.008068639755249024, 0.008087008476257325, 0.008127103805541993, 0.008166815757751465, 0.008169792175292968, 0.008247039794921874, 0.00802406406402588, 0.008005536079406739, 0.007983071804046631, 0.008130687713623047, 0.00801587200164795, 0.00807423973083496, 0.007982079982757568, 0.008390720367431641, 0.00842124843597412, 0.008261695861816406, 0.008134688377380371, 0.008078880310058593, 0.008020031929016113, 0.007956960201263429, 0.007948416233062744, 0.00820406436920166, 0.008033535957336425, 0.008112895965576171, 0.007931903839111328, 0.008042495727539062, 0.00786464023590088, 0.007874239921569824, 0.007806975841522217, 0.007824960231781006, 0.007897535800933837, 0.007811071872711181, 0.007834784030914307, 0.007797599792480469, 0.00786358404159546, 0.007936287879943847, 0.008098336219787597, 0.007867392063140868, 0.007777184009552002, 0.007817215919494629, 0.007899072170257568, 0.007903007984161377, 0.00787017583847046, 0.007792799949645996, 0.007842432022094727, 0.007902527809143066, 0.00782374382019043, 0.007891039848327636, 0.007902847766876222, 0.007920191764831544, 0.007808383941650391, 0.008104384422302246, 0.008005632400512695, 0.007983104228973388, 0.007968959808349609, 0.007984960079193115, 0.0079683837890625, 0.007930240154266358, 0.00794649600982666, 0.007927167892456054, 0.007870143890380859, 0.007894976139068604, 0.007969215869903564, 0.007997471809387207, 0.007962495803833007, 0.008290399551391601, 0.007999807834625243, 0.00806009578704834, 0.007928607940673828, 0.008063136100769043, 0.008124128341674804, 0.008104448318481445, 0.008016096115112305, 0.007997504234313965, 0.007981056213378907, 0.008036383628845215, 0.008114368438720702, 0.007863296031951903, 0.007808864116668701, 0.007760447978973389, 0.007756159782409668, 0.007937920093536378, 0.00789686393737793, 0.007807104110717773, 0.00783404779434204, 0.007723008155822754, 0.0077454719543457035, 0.007754720211029052, 0.007709663867950439, 0.007685088157653809, 0.0076390719413757325, 0.007652128219604492, 0.007755904197692871, 0.007997632026672363, 0.008019231796264648, 0.00804419231414795, 0.008018303871154785, 0.007979199886322022, 0.00787443208694458, 0.007856128215789794, 0.007972511768341064, 0.008237855911254883, 0.00809382438659668, 0.007980576038360596, 0.008011808395385742, 0.008138336181640626, 0.008045472145080567, 0.00798905611038208, 0.008042495727539062, 0.008030207633972167, 0.00801587200164795, 0.008011775970458984, 0.008026111602783203, 0.008052736282348634, 0.007929855823516846, 0.007941184043884277, 0.008168383598327637, 0.008060928344726562, 0.00805679988861084, 0.008097824096679687, 0.008118335723876952, 0.008146880149841308, 0.008122719764709473, 0.008142623901367187, 0.008117152214050292, 0.008119263648986817, 0.00814236831665039, 0.008397343635559083, 0.008164799690246582, 0.008013792037963867, 0.007928351879119873, 0.0079967041015625, 0.008234399795532227, 0.008280447959899902, 0.008158528327941894, 0.00807795238494873, 0.008055168151855469, 0.007958432197570801, 0.007866079807281495, 0.007884223937988282, 0.007836512088775635, 0.007945759773254395, 0.007876992225646973, 0.00785584020614624, 0.007843200206756593, 0.007853216171264648, 0.00779260778427124, 0.007783552169799804, 0.007757791996002197, 0.007758592128753662, 0.00782972812652588, 0.008107839584350586, 0.008017184257507325, 0.007926112174987792, 0.007870783805847168, 0.00797215986251831, 0.007917759895324707, 0.00792633581161499, 0.00791756820678711, 0.00790553617477417, 0.007939839839935302, 0.007788544178009033, 0.007804704189300537, 0.007809247970581055, 0.0078087677955627445, 0.00776204776763916, 0.007728288173675537, 0.007726047992706299, 0.007743648052215576, 0.007851840019226074, 0.008089632034301757, 0.008126399993896484, 0.00786028814315796, 0.007897088050842285, 0.00790127992630005, 0.0078065600395202635, 0.007838016033172607, 0.007874559879302979, 0.007915520191192627, 0.007939775943756104, 0.007931327819824219, 0.008062944412231446, 0.008022944450378417, 0.007962368011474609, 0.008052096366882325, 0.008018815994262695, 0.008021120071411134, 0.008100031852722168, 0.008089599609375, 0.008165151596069335, 0.008070048332214355, 0.008043871879577637, 0.008047264099121093, 0.00799129581451416, 0.007970816135406494, 0.007945312023162841, 0.007885727882385254, 0.007818463802337647, 0.007791552066802978, 0.007714655876159668, 0.007705920219421387, 0.00770524787902832, 0.007739136219024658, 0.007694975852966308, 0.007834784030914307, 0.007692512035369873, 0.007764256000518799, 0.007726111888885498, 0.00784278392791748, 0.008533856391906739, 0.00927519989013672, 0.007962975978851318, 0.007785600185394287, 0.00774015998840332, 0.007808320045471192, 0.007832575798034667, 0.007933599948883057, 0.007966432094573974, 0.007809855937957764, 0.00788419198989868, 0.007718463897705078, 0.007879136085510254, 0.007803071975708007, 0.007761568069458008, 0.007764319896697998, 0.007632480144500732, 0.007659904003143311, 0.00798076820373535, 0.007858496189117431, 0.0077619199752807615, 0.007899392127990722, 0.007866112232208252, 0.0076861119270324706, 0.007725088119506836, 0.00771615982055664, 0.007696512222290039, 0.007608255863189697, 0.007802720069885254, 0.0076409921646118165, 0.00751910400390625, 0.007565311908721924, 0.0076137280464172365, 0.007794591903686523, 0.007658368110656739, 0.0076054720878601075, 0.007655295848846435, 0.007666528224945068, 0.007919136047363282, 0.008012255668640137, 0.007888895988464355, 0.007860223770141601, 0.007869887828826904, 0.007965248107910156, 0.007921919822692872, 0.008289312362670899, 0.007912000179290772, 0.007975071907043456, 0.008316927909851075, 0.008118847846984863, 0.008080896377563476, 0.008118144035339355, 0.007965216159820557, 0.008130975723266601, 0.008064224243164062, 0.008067872047424316, 0.008177663803100586, 0.008130559921264649, 0.008112128257751466, 0.008087039947509766, 0.00809011173248291, 0.008069375991821288, 0.008292096138000488, 0.008252896308898926, 0.00805942440032959, 0.008014880180358886, 0.007932415962219238, 0.007829343795776367, 0.00790387201309204, 0.00831283187866211, 0.008325119972229005, 0.007880703926086426, 0.0079137601852417, 0.00798076820373535, 0.008026111602783203, 0.00828825569152832, 0.008079680442810058, 0.007994976043701172, 0.007968704223632812, 0.007878431797027588, 0.00786243200302124, 0.007815392017364501, 0.007728447914123535, 0.00795308780670166, 0.00783564805984497, 0.007809023857116699, 0.00782099199295044, 0.007901504039764404, 0.008036352157592774, 0.007816383838653565, 0.0077504639625549316, 0.007768064022064209, 0.007792352199554443, 0.007807263851165772, 0.007849984169006348, 0.007726111888885498, 0.007793856143951416, 0.007742303848266602, 0.007874911785125732, 0.007903103828430175, 0.00817535972595215, 0.00803324794769287, 0.00791865587234497, 0.007963583946228028, 0.00799129581451416, 0.007964672088623047, 0.007927552223205566, 0.00812435245513916, 0.008096063613891601, 0.008000736236572265, 0.00804691219329834, 0.008221152305603028, 0.008112128257751466, 0.008250207901000976, 0.008070367813110352, 0.007969567775726319, 0.00791756820678711, 0.007958528041839599, 0.00789254379272461, 0.007948544025421143, 0.007907296180725098, 0.007962495803833007, 0.008083807945251464, 0.008044544219970704, 0.008107359886169434, 0.008174528121948242, 0.00794595193862915, 0.007925024032592773, 0.008064895629882813, 0.007969632148742675, 0.008052800178527832, 0.008241087913513183, 0.008317952156066894, 0.008010751724243164, 0.00800160026550293, 0.007863711833953858, 0.00779318380355835, 0.00793724822998047, 0.007829951763153075, 0.007886528015136719, 0.0077892160415649414, 0.0078113598823547365, 0.007868127822875976, 0.0077859840393066405, 0.007696896076202392, 0.00783510398864746, 0.007875103950500489, 0.007785888195037842, 0.007733856201171875, 0.007822976112365723, 0.007802624225616455, 0.007981919765472412, 0.008029696464538574, 0.008001824378967286, 0.007986432075500488, 0.00801420783996582, 0.008087264060974122, 0.008193728446960449, 0.008094688415527344, 0.00816857624053955, 0.008335328102111817, 0.008229791641235351, 0.008122367858886719, 0.008235008239746093, 0.008238271713256836, 0.008290911674499512, 0.008337632179260254, 0.008333344459533692, 0.00818172836303711, 0.008523776054382324, 0.008266847610473632, 0.008328096389770508, 0.008194047927856446, 0.008269824028015137, 0.008359935760498047, 0.008279264450073242, 0.00826857566833496, 0.008255488395690918, 0.008330944061279297, 0.008288576126098632, 0.008244799613952638, 0.008228416442871095, 0.008338303565979003, 0.008447423934936523, 0.00849567985534668, 0.008294400215148925, 0.008343423843383789, 0.008339584350585938, 0.008237055778503418, 0.008130623817443847, 0.008140735626220703, 0.008390591621398926, 0.008426624298095703, 0.008257984161376953, 0.008151616096496582, 0.008185312271118165, 0.00824163246154785, 0.008199199676513672, 0.008375264167785645, 0.008138751983642578, 0.008206463813781739, 0.008361408233642578, 0.00817523193359375, 0.008176416397094726, 0.008406975746154784, 0.008319071769714356, 0.008263680458068847, 0.008179264068603516, 0.008209024429321289, 0.008475456237792969, 0.0091146240234375, 0.008327487945556641, 0.008321855545043945, 0.008301152229309081, 0.008208992004394532, 0.008201151847839356, 0.008392704010009766, 0.008217344284057617, 0.008299584388732911, 0.008270400047302246, 0.008309184074401855, 0.008203295707702637, 0.008304896354675293, 0.008218688011169433, 0.008302751541137696, 0.008245823860168457, 0.008185728073120117, 0.00830668830871582, 0.00822268772125244, 0.008330464363098145, 0.008387392044067382, 0.008458016395568847, 0.008270048141479492, 0.008186911582946777, 0.008244192123413086, 0.008241151809692383, 0.008182944297790528, 0.008208255767822265, 0.008181759834289551, 0.00819916820526123, 0.008154111862182617, 0.008118016242980957, 0.008129887580871582, 0.008190272331237793, 0.008153632164001464, 0.008130720138549805, 0.00803388786315918, 0.008063296318054199, 0.008185855865478516, 0.008132608413696289, 0.008150176048278808, 0.008215488433837891, 0.008370079994201661, 0.008281248092651367, 0.008153951644897461, 0.008286208152770995, 0.008159232139587403, 0.008150336265563964, 0.008085599899291992, 0.0081496000289917, 0.008065024375915527, 0.00808291244506836, 0.008107551574707031, 0.008110176086425782, 0.00837724781036377, 0.00813043212890625, 0.008327296257019043, 0.00820412826538086, 0.008077471733093262, 0.008097663879394531, 0.00806924819946289, 0.008010080337524414, 0.007999135971069335, 0.008049663543701171, 0.008094176292419434, 0.007965216159820557, 0.008118016242980957, 0.008138336181640626, 0.008104191780090333, 0.008259360313415526, 0.007943903923034668, 0.007823391914367676, 0.007879487991333008, 0.00792409610748291, 0.007939775943756104, 0.007979135990142822, 0.00799295997619629, 0.008018143653869629, 0.008052288055419922, 0.007929471969604493, 0.0079552001953125, 0.00831702423095703, 0.008535136222839355, 0.008061856269836425, 0.007994559764862061, 0.008047136306762695, 0.008085151672363282, 0.008071295738220215, 0.007858784198760986, 0.007873983860015869, 0.007968480110168458, 0.007970655918121338, 0.008070207595825196, 0.007965951919555665, 0.007886655807495117, 0.007821728229522705, 0.007874207973480225, 0.007834015846252441, 0.007813375949859619, 0.0078067841529846195, 0.007906976222991944, 0.008096287727355957, 0.009002335548400879, 0.008319647789001464, 0.010919455528259278, 0.00809932804107666, 0.007987936019897461, 0.008009984016418456, 0.008187647819519042, 0.00803270435333252, 0.008011360168457032, 0.008032480239868163, 0.00830668830871582, 0.007933343887329102, 0.007914144039154053, 0.007903327941894531, 0.007995232105255127, 0.007880576133728027, 0.007802879810333252, 0.007815296173095704, 0.007835423946380616, 0.007880288124084473, 0.007972479820251465, 0.007805056095123291, 0.007701375961303711, 0.007737343788146973, 0.0077127041816711425, 0.007638239860534668, 0.007582272052764892, 0.007583456039428711, 0.0076109437942504885, 0.0076574721336364745, 0.007579648017883301, 0.0077842559814453125, 0.007688384056091308, 0.007686240196228027, 0.007662975788116455, 0.007632895946502686, 0.007549280166625976, 0.007542975902557373, 0.007786496162414551, 0.007788544178009033, 0.007657216072082519, 0.007632607936859131, 0.00759657621383667, 0.007558784008026123, 0.0075411200523376465, 0.00750601577758789, 0.007519231796264648, 0.007662015914916992, 0.007629216194152832, 0.007720416069030762, 0.007788608074188233, 0.007811615943908692, 0.008153087615966797, 0.008132608413696289, 0.00805078411102295, 0.008138272285461426, 0.008026495933532714, 0.007997439861297608, 0.008093695640563964, 0.008146944046020508, 0.008079360008239746, 0.008228863716125488, 0.00821452808380127, 0.00815897560119629, 0.008093952178955078, 0.008201760292053223, 0.008165696144104003, 0.008103103637695312, 0.008055520057678223, 0.00828752040863037, 0.008219615936279297, 0.008174880027770996, 0.008042464256286621, 0.008051487922668457, 0.00816534423828125, 0.008048543930053711, 0.007953567981719971, 0.008017919540405273, 0.007936960220336913, 0.00799948787689209, 0.007896768093109132, 0.007932223796844483, 0.007875616073608399, 0.00784835195541382, 0.007940671920776366, 0.007995391845703125, 0.008164480209350585, 0.008174464225769043, 0.008168831825256347, 0.0081845121383667, 0.00799235200881958, 0.00786729621887207, 0.0077844481468200685, 0.007872511863708496, 0.00789414405822754, 0.00802195167541504, 0.007887807846069336, 0.007833600044250488, 0.007792640209197998, 0.007814655780792237, 0.00793446397781372, 0.007958528041839599, 0.008245247840881348, 0.008130559921264649, 0.008019968032836914, 0.008013952255249023, 0.008001055717468262, 0.008240832328796386, 0.008424063682556153, 0.008220704078674316, 0.008201472282409668, 0.008343584060668945, 0.008415424346923828, 0.008284543991088867, 0.008202400207519531, 0.008096351623535156, 0.008214655876159669, 0.008195648193359375, 0.008255935668945313, 0.008159232139587403, 0.008220512390136719, 0.008188063621520997, 0.008144895553588867, 0.008072223663330078, 0.008192255973815918, 0.008198880195617675, 0.008151328086853028, 0.008056544303894042, 0.00812019157409668, 0.008165599822998047, 0.008021439552307129, 0.007946656227111817, 0.008061247825622559, 0.007961631774902344, 0.007899199962615967, 0.007754496097564697, 0.007823423862457275, 0.007819104194641113, 0.007925824165344238, 0.007935935974121094, 0.008015551567077638, 0.007849472045898438, 0.007908160209655762, 0.008158656120300293, 0.008022144317626953, 0.008015583992004395, 0.00794108819961548, 0.00787660789489746, 0.00802175998687744, 0.008007200241088867, 0.008068608283996583, 0.00804758358001709, 0.008140480041503906, 0.008091008186340332, 0.008112992286682129, 0.0080098237991333, 0.008066143989562988, 0.007905248165130616, 0.007969823837280274, 0.008160832405090333, 0.008148320198059082, 0.008023039817810058, 0.007944191932678223, 0.007832704067230224, 0.007715295791625976, 0.007707039833068848, 0.007747231960296631, 0.007820767879486084, 0.007857024192810058, 0.008040384292602539, 0.007913055896759034, 0.00792419195175171, 0.008546303749084473, 0.008369855880737305, 0.008392543792724609, 0.008286687850952148, 0.008732607841491699, 0.00873692798614502, 0.00811580753326416, 0.008149791717529296, 0.008406399726867676, 0.008247743606567383, 0.008229023933410644, 0.008281920433044434, 0.008278240203857421, 0.008234208106994628, 0.008208959579467774, 0.008273823738098145, 0.008111712455749511, 0.008713855743408203, 0.008239616394042968, 0.008350303649902344, 0.008167008399963378, 0.008107359886169434, 0.008090751647949219, 0.008029919624328613, 0.007944191932678223, 0.007950335979461669, 0.008275808334350586, 0.00842467212677002, 0.008163328170776368, 0.008075360298156739, 0.008043359756469727, 0.00819206428527832, 0.008200127601623535, 0.008234720230102539, 0.00821664047241211, 0.008142271995544433, 0.008114975929260254, 0.008099840164184571, 0.008091648101806641, 0.008031328201293946, 0.008010656356811523, 0.008032256126403809, 0.008058879852294922, 0.00812060832977295, 0.008656607627868653, 0.00850438404083252, 0.008929344177246093, 0.008329440116882324, 0.008320672035217285, 0.008219903945922851, 0.008172320365905763, 0.008159520149230957, 0.008512191772460938, 0.008269311904907227, 0.008322784423828125, 0.0082128324508667, 0.008257823944091797, 0.008466367721557616, 0.008380191802978516, 0.008272319793701173, 0.008127519607543945, 0.008188384056091309, 0.008150943756103516, 0.008065759658813476, 0.00817692756652832, 0.008147551536560058, 0.008238207817077637, 0.008010144233703614, 0.008280223846435546]",tokens/s,123.34230517301675,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,codegen,Salesforce/codegen-6B-nl,Salesforce/codegen-6B-nl,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/update_llm_perf_cuda_pytorch.py"", line 153, in benchmark_cuda_pytorch benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 564, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3880, in from_pretrained config = cls._autoset_attn_implementation( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1581, in _autoset_attn_implementation config = cls._check_and_enable_sdpa( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 1776, in _check_and_enable_sdpa raise ValueError( ValueError: CodeGenForCausalLM does not support an attention implementation through torch.nn.functional.scaled_dot_product_attention yet. Please request the support for this architecture: https://github.com/huggingface/transformers/issues/28005. If you believe this error is a bug, please open an issue in Transformers GitHub repository and load your model with the argument `attn_implementation=""eager""` meanwhile. Example: `model = AutoModel.from_pretrained(""openai/whisper-tiny"", attn_implementation=""eager"")` ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.3.1+cu121,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt_neox,EleutherAI/pythia-410m,EleutherAI/pythia-410m,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.75552,Linux,x86_64,Linux-5.10.220-209.869.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.43.4,,0.33.0,,,,1.21.2,,,,0.12.0,,,MB,815.054848,1326.383104,0.0,931.135488,917.648384,s,1,7.24853369140625,7.24853369140625,0.0,7.24853369140625,7.24853369140625,7.24853369140625,7.24853369140625,[7.24853369140625],,kWh,9.548473929161597e-06,1.0427212493425665e-06,4.293058989995879e-06,1.4884254168500042e-05,,MB,1105.32608,1456.406528,0.0,1050.673152,1018.330112,s,10,0.6607934722900392,0.06607934722900391,0.001316728408823133,0.06577188873291015,0.06745689849853516,0.06821476974487305,0.06882106674194335,"[0.06897264099121093, 0.0657655029296875, 0.06409379577636719, 0.06572882843017579, 0.06587734222412109, 0.06715283203125, 0.06728848266601563, 0.064994140625, 0.06514163208007813, 0.06577827453613282]",tokens/s,3874.1302802645587,kWh,2.2091043958334123e-06,2.4347515192221887e-07,1.460139012721776e-06,3.912718560477407e-06,tokens/kWh,65427654.97776165,MB,1127.124992,1473.183744,0.0,1067.450368,1033.282048,s,10,11.373588623046876,1.1373588623046875,0.018928872592497294,1.143686767578125,1.158686865234375,1.1600309326171876,1.1611061865234376,"[1.161375, 1.15838818359375, 1.14709521484375, 1.1516285400390625, 1.153206298828125, 1.1402783203125, 1.10854150390625, 1.1196385498046875, 1.11889013671875, 1.114546875]",tokens/s,55.391488199546735,kWh,3.215545112375109e-05,3.5464884643910856e-06,1.5936513793477674e-05,5.163845338161984e-05,tokens/kWh,1220021.0477725924,,s,630,11.3675133228302,0.01804367194100032,0.0004886498748275712,0.018087935447692872,0.018502759170532224,0.01859877986907959,0.01939240159988404,"[0.018602048873901367, 0.01836595153808594, 0.018436063766479494, 0.018344032287597657, 0.018258527755737306, 0.01824291229248047, 0.01822313690185547, 0.01821571159362793, 0.018474176406860353, 0.01845030403137207, 0.018693023681640625, 0.018503679275512695, 0.01834156799316406, 0.018278112411499025, 0.018426464080810546, 0.018339136123657226, 0.018713279724121092, 0.018345951080322265, 0.018307167053222655, 0.01839052772521973, 0.01839689636230469, 0.018373600006103517, 0.01833679962158203, 0.01827299118041992, 0.01840127944946289, 0.018490720748901367, 0.018332319259643556, 0.018313472747802734, 0.01859584045410156, 0.018231039047241212, 0.018224512100219727, 0.018334335327148437, 0.018231296539306642, 0.018925695419311522, 0.018544511795043947, 0.01851571273803711, 0.01840563201904297, 0.018528127670288087, 0.01841574478149414, 0.01842585563659668, 0.018329599380493163, 0.018364416122436524, 0.019517696380615235, 0.01843120002746582, 0.018443967819213865, 0.0187544002532959, 0.018328960418701173, 0.018360960006713868, 0.018145280838012694, 0.018983104705810546, 0.018695775985717773, 0.018256095886230467, 0.018345983505249023, 0.018284543991088868, 0.01828659248352051, 0.018501632690429686, 0.0181711368560791, 0.018123519897460937, 0.018309120178222657, 0.018300928115844727, 0.018783584594726562, 0.018365087509155272, 0.018380416870117187, 0.018537567138671874, 0.018504159927368164, 0.018549280166625978, 0.018623712539672852, 0.01839583969116211, 0.019228128433227538, 0.020404767990112305, 0.018491392135620118, 0.018616031646728516, 0.018491680145263673, 0.018509952545166016, 0.01833888053894043, 0.018274112701416014, 0.01834623908996582, 0.01821891212463379, 0.01858236885070801, 0.01846681594848633, 0.01826201629638672, 0.018351776123046875, 0.018395456314086914, 0.018472991943359374, 0.018345504760742187, 0.018198335647583008, 0.018132768630981445, 0.017873056411743166, 0.018178335189819338, 0.01828316879272461, 0.018507551193237305, 0.018448640823364258, 0.01831500816345215, 0.0190928955078125, 0.018149215698242186, 0.018524448394775392, 0.018030656814575195, 0.018165151596069337, 0.018405567169189452, 0.018248512268066407, 0.018253311157226563, 0.018196672439575196, 0.01806982421875, 0.01813043212890625, 0.017992191314697266, 0.0184237117767334, 0.018450527191162108, 0.01842492866516113, 0.01839606475830078, 0.018404832839965822, 0.018065471649169922, 0.018008544921875, 0.018055200576782227, 0.01806035232543945, 0.01815750312805176, 0.018205280303955077, 0.01822774314880371, 0.01811622428894043, 0.01825404739379883, 0.018325504302978517, 0.018348031997680665, 0.018665184020996095, 0.018236928939819336, 0.01837148857116699, 0.018499456405639648, 0.018391040802001952, 0.01832476806640625, 0.018405664443969728, 0.018299232482910155, 0.01825391960144043, 0.01830611228942871, 0.018119232177734375, 0.017961536407470703, 0.01797715187072754, 0.01803264045715332, 0.01816761589050293, 0.018270399093627928, 0.01835811233520508, 0.01821126365661621, 0.018275360107421874, 0.01820128059387207, 0.018135040283203126, 0.018426048278808595, 0.0184237117767334, 0.019324832916259766, 0.018457599639892578, 0.018535423278808593, 0.018289920806884765, 0.017920896530151366, 0.018113920211791992, 0.01800595283508301, 0.0184102725982666, 0.018552608489990234, 0.018280448913574218, 0.01841971206665039, 0.018200128555297852, 0.018384992599487306, 0.018127199172973632, 0.01810371208190918, 0.017928319931030272, 0.018102975845336915, 0.018106271743774414, 0.017757728576660158, 0.017502080917358397, 0.018143711090087892, 0.01843731117248535, 0.018502656936645507, 0.018238271713256836, 0.018144287109375, 0.018056991577148438, 0.01793667221069336, 0.018529535293579102, 0.017931135177612304, 0.017847808837890625, 0.018087711334228516, 0.017950687408447265, 0.018184736251831056, 0.01821286392211914, 0.01818828773498535, 0.018397184371948243, 0.018085887908935547, 0.0179814395904541, 0.01852822494506836, 0.018233375549316408, 0.018313215255737304, 0.017970848083496093, 0.018266815185546875, 0.0178449592590332, 0.017841184616088867, 0.018251775741577148, 0.018103647232055663, 0.018148000717163087, 0.017978944778442384, 0.018205120086669923, 0.018142847061157225, 0.0183855037689209, 0.019420000076293947, 0.01832441520690918, 0.018442176818847657, 0.018222463607788085, 0.018020832061767578, 0.018223072052001955, 0.018020511627197266, 0.018534496307373048, 0.018228607177734377, 0.01834409523010254, 0.018298847198486328, 0.018164319992065428, 0.018437536239624023, 0.01828096008300781, 0.018295936584472657, 0.01809702491760254, 0.018120447158813478, 0.018286144256591797, 0.018243839263916015, 0.018163839340209962, 0.01805958366394043, 0.01799734306335449, 0.01784009552001953, 0.018194944381713866, 0.01821392059326172, 0.018659872055053713, 0.018347904205322264, 0.018702911376953124, 0.018316640853881835, 0.01809270477294922, 0.018299104690551758, 0.018269983291625977, 0.018524160385131837, 0.018271392822265625, 0.018383808135986328, 0.018132896423339845, 0.017944639205932617, 0.01784419250488281, 0.018128543853759765, 0.018186208724975585, 0.01803228759765625, 0.018426080703735352, 0.01831164741516113, 0.018203935623168944, 0.018208511352539064, 0.018209184646606445, 0.018217536926269533, 0.017903615951538086, 0.01846428871154785, 0.01873673629760742, 0.018449407577514648, 0.018370431900024416, 0.018280031204223633, 0.018104736328125, 0.018309120178222657, 0.018927104949951173, 0.0182457275390625, 0.018450464248657227, 0.018698400497436523, 0.018311071395874023, 0.01810188865661621, 0.017799360275268555, 0.017690303802490235, 0.018329919815063475, 0.018507680892944335, 0.01839321517944336, 0.018307039260864258, 0.018308448791503906, 0.018172576904296876, 0.018300384521484376, 0.018313760757446288, 0.018118656158447266, 0.018096128463745118, 0.018062976837158202, 0.018320991516113282, 0.018471616744995117, 0.018317407608032226, 0.018436031341552736, 0.018286048889160158, 0.01812950325012207, 0.018257919311523436, 0.018544544219970704, 0.018512256622314455, 0.018417375564575195, 0.018251136779785158, 0.018045215606689452, 0.018035295486450196, 0.01819241523742676, 0.018130176544189452, 0.01831545639038086, 0.018499872207641602, 0.018318912506103516, 0.018329599380493163, 0.018298431396484374, 0.01827315139770508, 0.018126848220825196, 0.018591360092163087, 0.018057760238647462, 0.017848031997680664, 0.018159263610839842, 0.01857174491882324, 0.018435903549194336, 0.018415456771850587, 0.01839344024658203, 0.018255168914794923, 0.01822585678100586, 0.01922649574279785, 0.018708608627319337, 0.018305023193359374, 0.018187711715698242, 0.01836031913757324, 0.018277952194213867, 0.018109439849853515, 0.018284799575805664, 0.018140224456787108, 0.01817865562438965, 0.018294591903686524, 0.01814556884765625, 0.018601184844970704, 0.01856492805480957, 0.018177824020385744, 0.018069856643676756, 0.01821273612976074, 0.018489343643188477, 0.018249631881713867, 0.01846895980834961, 0.018286720275878906, 0.018195743560791015, 0.018244192123413085, 0.01839923286437988, 0.018274303436279296, 0.018341888427734376, 0.018507360458374023, 0.018532960891723634, 0.01878611183166504, 0.02018284797668457, 0.018112703323364256, 0.01820057678222656, 0.01817190361022949, 0.018309343338012696, 0.01808380889892578, 0.018263872146606446, 0.01829043197631836, 0.01812879943847656, 0.018049375534057617, 0.018296831130981444, 0.018077695846557617, 0.017874399185180665, 0.018088159561157228, 0.018441984176635742, 0.01851375961303711, 0.023320735931396483, 0.01804319953918457, 0.01761859130859375, 0.017739616394042968, 0.017676544189453126, 0.01774608039855957, 0.018483072280883788, 0.017612800598144532, 0.017564352035522462, 0.01760220718383789, 0.017729663848876955, 0.017915903091430666, 0.018087167739868164, 0.017988351821899413, 0.01759436798095703, 0.01763759994506836, 0.017624927520751954, 0.01747551918029785, 0.017548799514770508, 0.01741423988342285, 0.017392032623291014, 0.01751795196533203, 0.017545087814331055, 0.017449728012084963, 0.01739072036743164, 0.01744985580444336, 0.017315200805664063, 0.0174881591796875, 0.017439071655273437, 0.01792745590209961, 0.017510143280029297, 0.018091936111450196, 0.017770015716552734, 0.01772172737121582, 0.01758969688415527, 0.017731679916381835, 0.01771404838562012, 0.01783488082885742, 0.017795711517333983, 0.017840415954589843, 0.018069759368896484, 0.017423744201660155, 0.017448448181152345, 0.01730191993713379, 0.017428064346313478, 0.017398591995239257, 0.01769683265686035, 0.017327871322631836, 0.017377536773681642, 0.017440479278564455, 0.01744540786743164, 0.017411840438842772, 0.017630912780761718, 0.017655136108398438, 0.017818592071533204, 0.017680383682250975, 0.017537023544311522, 0.01749545669555664, 0.017599071502685547, 0.018300832748413084, 0.017415456771850586, 0.01753152084350586, 0.017335903167724608, 0.017446624755859376, 0.0174553279876709, 0.01749260711669922, 0.01756777572631836, 0.018521663665771484, 0.017494112014770507, 0.01761110305786133, 0.017811456680297853, 0.01792201614379883, 0.01772265625, 0.017702816009521484, 0.017543327331542968, 0.017402559280395507, 0.01736000061035156, 0.01736342430114746, 0.017349023818969727, 0.017287168502807617, 0.017297407150268555, 0.017494016647338868, 0.01728339195251465, 0.017450815200805665, 0.017456480026245117, 0.01728156852722168, 0.01740595245361328, 0.017708959579467772, 0.017415584564208983, 0.01854879951477051, 0.017461824417114257, 0.01737481689453125, 0.017507904052734374, 0.017447872161865233, 0.017247871398925783, 0.01726268768310547, 0.017420576095581054, 0.01731279945373535, 0.017401056289672853, 0.017981184005737304, 0.0175710391998291, 0.017330976486206056, 0.017487871170043946, 0.017476640701293945, 0.017689119338989256, 0.01747603225708008, 0.017484960556030275, 0.01746614456176758, 0.017461280822753906, 0.017868831634521486, 0.017969120025634767, 0.02079350471496582, 0.018648895263671875, 0.017985599517822266, 0.017680383682250975, 0.017411775588989258, 0.017582399368286133, 0.01802239990234375, 0.017630495071411133, 0.017949344635009766, 0.017629247665405273, 0.017514432907104492, 0.017762208938598634, 0.01782707214355469, 0.017906591415405272, 0.017696767807006835, 0.017616287231445312, 0.017547168731689454, 0.018108383178710937, 0.01999331283569336, 0.01765376091003418, 0.017557504653930665, 0.01755945587158203, 0.017801023483276366, 0.017637088775634767, 0.017638015747070312, 0.01749100875854492, 0.017578367233276368, 0.017654144287109375, 0.017601728439331055, 0.017603519439697266, 0.017477344512939454, 0.017469728469848633, 0.01746086311340332, 0.017596960067749023, 0.01790755271911621, 0.01789743995666504, 0.01787718391418457, 0.017915903091430666, 0.017829727172851563, 0.017753503799438478, 0.017830751419067384, 0.017673471450805663, 0.017689088821411132, 0.017702911376953127, 0.017903167724609374, 0.018074047088623046, 0.017911104202270507, 0.017998559951782227, 0.017673696517944336, 0.017389184951782228, 0.017359552383422853, 0.01735055923461914, 0.01751219177246094, 0.017731456756591796, 0.018528959274291993, 0.01789673614501953, 0.017691360473632813, 0.01770086479187012, 0.01821468734741211, 0.017905439376831055, 0.017641183853149413, 0.01768726348876953, 0.017543167114257813, 0.017561279296875, 0.017733407974243165, 0.017960704803466798, 0.017903743743896486, 0.017914079666137697, 0.017827648162841797, 0.01804147148132324, 0.01772115135192871, 0.017637727737426757, 0.01756991958618164, 0.017615840911865233, 0.01762326431274414, 0.017822240829467772, 0.017873184204101562, 0.01782086372375488, 0.017714879989624024, 0.017674495697021484, 0.01761075210571289, 0.018047584533691406, 0.01782508850097656, 0.01776710319519043, 0.017827520370483397, 0.01782406425476074, 0.01798684883117676, 0.017783424377441407, 0.017854560852050783, 0.017834047317504882, 0.0176661434173584, 0.01764726448059082, 0.017645055770874024, 0.017521631240844725, 0.01752444839477539, 0.01758950424194336, 0.017572608947753907, 0.017613983154296874, 0.01789014434814453, 0.01771660804748535, 0.017483488082885742, 0.017656736373901367, 0.017628543853759764, 0.017766496658325196, 0.017629728317260743, 0.017671455383300783, 0.017918399810791016, 0.017917535781860353, 0.01805564880371094, 0.018364864349365233, 0.018130943298339842, 0.01801625633239746, 0.017974592208862303, 0.017605056762695314, 0.017854719161987304, 0.01759846305847168, 0.017757728576660158, 0.01793276786804199, 0.01787254333496094, 0.017768064498901368, 0.0176790714263916, 0.017541120529174805, 0.01760870361328125, 0.01790732765197754, 0.017666528701782227, 0.01757379150390625, 0.017600223541259764, 0.017612991333007814, 0.017760351181030275, 0.017522079467773437, 0.017424991607666016, 0.01745305633544922, 0.017502208709716797, 0.01778483200073242, 0.01775619125366211, 0.017782272338867186, 0.01877244758605957, 0.018358272552490236, 0.017922048568725587, 0.017746143341064453, 0.017565216064453125, 0.01750383949279785, 0.017511072158813475, 0.01744076728820801, 0.017622432708740234, 0.017857280731201172, 0.017608543395996094, 0.01753001594543457, 0.01749206352233887, 0.017695487976074217, 0.017648704528808595, 0.017670719146728516, 0.017641183853149413, 0.017678112030029298, 0.01777324867248535, 0.017604799270629884, 0.017543167114257813, 0.01736832046508789, 0.017274784088134765, 0.01750511932373047, 0.017573888778686524, 0.017704959869384765, 0.017737728118896484, 0.018524160385131837, 0.01763737678527832, 0.017358175277709963, 0.017361568450927733, 0.01739731216430664, 0.017336736679077147, 0.017293312072753905, 0.01738140869140625, 0.017378623962402345]",tokens/s,55.42109185258005,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 1138, in __init__ self.model = InternLM2Model(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 918, in __init__ [InternLM2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 918, in [InternLM2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 709, in __init__ self.feed_forward = InternLM2MLP(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 205, in __init__ self.w3 = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 136.12 MiB is free. Process 459248 has 14.61 GiB memory in use. Of the allocated memory 14.49 GiB is allocated by PyTorch, and 3.07 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 906, in __init__ self.model = InternLMModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in __init__ self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 547, in __init__ self.mlp = InternLMMLP( File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 276, in __init__ self.up_proj = nn.Linear(hidden_size, intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 136.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 457747 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 9.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 906, in __init__ self.model = InternLMModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in __init__ self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 547, in __init__ self.mlp = InternLMMLP( File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 276, in __init__ self.up_proj = nn.Linear(hidden_size, intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 136.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 455854 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 9.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 906, in __init__ self.model = InternLMModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in __init__ self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 547, in __init__ self.mlp = InternLMMLP( File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 276, in __init__ self.up_proj = nn.Linear(hidden_size, intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 136.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 458112 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 9.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 906, in __init__ self.model = InternLMModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in __init__ self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 545, in __init__ self.self_attn = INTERNLM_ATTENTION_CLASSES[config.attn_implementation](config=config) KeyError: 'sdpa' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 1138, in __init__ self.model = InternLM2Model(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 918, in __init__ [InternLM2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 918, in [InternLM2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 709, in __init__ self.feed_forward = InternLM2MLP(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 205, in __init__ self.w3 = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 136.12 MiB is free. Process 460010 has 14.61 GiB memory in use. Of the allocated memory 14.49 GiB is allocated by PyTorch, and 3.07 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 906, in __init__ self.model = InternLMModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in __init__ self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 547, in __init__ self.mlp = InternLMMLP( File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 275, in __init__ self.down_proj = nn.Linear(intermediate_size, hidden_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 270.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 128.12 MiB is free. Process 455448 has 14.61 GiB memory in use. Of the allocated memory 14.48 GiB is allocated by PyTorch, and 17.60 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 1138, in __init__ self.model = InternLM2Model(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 918, in __init__ [InternLM2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 918, in [InternLM2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 709, in __init__ self.feed_forward = InternLM2MLP(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 205, in __init__ self.w3 = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 180.12 MiB is free. Process 459613 has 14.56 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 1.62 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 1138, in __init__ self.model = InternLM2Model(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 918, in __init__ [InternLM2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 918, in [InternLM2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 709, in __init__ self.feed_forward = InternLM2MLP(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 205, in __init__ self.w3 = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 136.12 MiB is free. Process 458868 has 14.61 GiB memory in use. Of the allocated memory 14.49 GiB is allocated by PyTorch, and 3.07 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 1138, in __init__ self.model = InternLM2Model(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 918, in __init__ [InternLM2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 918, in [InternLM2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 709, in __init__ self.feed_forward = InternLM2MLP(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 205, in __init__ self.w3 = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 136.12 MiB is free. Process 460906 has 14.61 GiB memory in use. Of the allocated memory 14.49 GiB is allocated by PyTorch, and 3.07 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 906, in __init__ self.model = InternLMModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in __init__ self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 545, in __init__ self.self_attn = INTERNLM_ATTENTION_CLASSES[config.attn_implementation](config=config) KeyError: 'sdpa' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 1138, in __init__ self.model = InternLM2Model(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 918, in __init__ [InternLM2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 918, in [InternLM2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 709, in __init__ self.feed_forward = InternLM2MLP(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 205, in __init__ self.w3 = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 384.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 180.12 MiB is free. Process 458474 has 14.56 GiB memory in use. Of the allocated memory 14.45 GiB is allocated by PyTorch, and 1.62 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 906, in __init__ self.model = InternLMModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in __init__ self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 547, in __init__ self.mlp = InternLMMLP( File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 276, in __init__ self.up_proj = nn.Linear(hidden_size, intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 136.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 18.12 MiB is free. Process 456214 has 14.72 GiB memory in use. Of the allocated memory 14.60 GiB is allocated by PyTorch, and 9.56 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 1138, in __init__ self.model = InternLM2Model(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 918, in __init__ [InternLM2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 918, in [InternLM2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 709, in __init__ self.feed_forward = InternLM2MLP(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 205, in __init__ self.w3 = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 136.12 MiB is free. Process 460503 has 14.61 GiB memory in use. Of the allocated memory 14.49 GiB is allocated by PyTorch, and 3.07 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm,internlm/internlm-20b,internlm/internlm-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 906, in __init__ self.model = InternLMModel(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in __init__ self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 729, in self.layers = nn.ModuleList([InternLMDecoderLayer(config) for _ in range(config.num_hidden_layers)]) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm-20b/80729bcf52fbc4553d965926b27304ac5e156d98/modeling_internlm.py"", line 545, in __init__ self.self_attn = INTERNLM_ATTENTION_CLASSES[config.attn_implementation](config=config) KeyError: 'sdpa' ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,internlm2,internlm/internlm2-20b,internlm/internlm2-20b,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.224-212.876.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.4.0,,4.44.2,,0.34.2,,,,1.22.0,,,,0.12.0,,"Traceback (most recent call last): File ""/workspace/llm_perf/common/benchmark_runner.py"", line 111, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/workspace/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/workspace/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 89, in run self.run_model_loading_tracking(backend) File ""/workspace/optimum_benchmark/scenarios/inference/scenario.py"", line 182, in run_model_loading_tracking backend.load() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 71, in load self.load_transformers_model() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 151, in load_transformers_model self.load_transformers_model_with_no_weights() File ""/workspace/optimum_benchmark/backends/pytorch/backend.py"", line 126, in load_transformers_model_with_no_weights self.pretrained_model = self.automodel_loader.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/models/auto/auto_factory.py"", line 559, in from_pretrained return model_class.from_pretrained( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_utils.py"", line 3832, in from_pretrained model = cls(config, *model_args, **model_kwargs) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 1138, in __init__ self.model = InternLM2Model(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 918, in __init__ [InternLM2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 918, in [InternLM2DecoderLayer(config, layer_idx) for layer_idx in range(config.num_hidden_layers)] File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 709, in __init__ self.feed_forward = InternLM2MLP(config) File ""/root/.cache/huggingface/modules/transformers_modules/internlm/internlm2-20b/b43f37b9cd705c287752cb00fa725cc983401edf/modeling_internlm2.py"", line 205, in __init__ self.w3 = nn.Linear(self.hidden_size, self.intermediate_size, bias=False) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/linear.py"", line 99, in __init__ self.weight = Parameter(torch.empty((out_features, in_features), **factory_kwargs)) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_device.py"", line 79, in __torch_function__ return func(*args, **kwargs) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 192.00 MiB. GPU 0 has a total capacity of 14.74 GiB of which 136.12 MiB is free. Process 461296 has 14.61 GiB memory in use. Of the allocated memory 14.49 GiB is allocated by PyTorch, and 3.07 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,809.070592,763.232256,0.0,360.710144,345.493504,s,1,9.15378515625,9.15378515625,0.0,9.15378515625,9.15378515625,9.15378515625,9.15378515625,[9.15378515625],,kWh,2.871913799999959e-06,3.0986136161193685e-07,1.0483341719999886e-06,4.230109333611885e-06,,MB,1283.514368,777.91232,0.0,362.807296,344.082944,s,17,0.19694867038726804,0.011585215905133416,0.00015554005537153206,0.011542528152465821,0.011833555221557619,0.011935212707519531,0.011940475769042969,"[0.011300031661987304, 0.011565183639526367, 0.01151632022857666, 0.011542528152465821, 0.01150380802154541, 0.011617919921875, 0.01159222412109375, 0.011941791534423828, 0.011488960266113282, 0.011525407791137695, 0.011576031684875488, 0.011550751686096192, 0.011473024368286133, 0.011515263557434083, 0.011933568000793458, 0.011766880035400391, 0.011538975715637207]",tokens/s,22097.128106742166,kWh,3.352307860265979e-07,3.697020559884174e-08,2.0908614762342139e-07,5.812871392488611e-07,tokens/kWh,440401967.83091235,MB,1330.417664,803.078144,0.0,387.97312,344.085504,s,17,10.29884619140625,0.6058144818474265,0.004536597127132912,0.6052348022460937,0.6116901855468749,0.6134692504882813,0.6151925317382813,"[0.5992588500976562, 0.6029649047851563, 0.6129307250976562, 0.6052348022460937, 0.6035680541992188, 0.6156233520507812, 0.6062072143554688, 0.6009053344726563, 0.6026465454101563, 0.6008970336914062, 0.6073819580078125, 0.6030621337890625, 0.6009525756835937, 0.609990478515625, 0.60780419921875, 0.6108631591796875, 0.6085548706054688]",tokens/s,103.99223176026099,kWh,1.7481147616914546e-05,1.9278707643195314e-06,7.084645177553058e-06,2.649366355878714e-05,tokens/kWh,2377927.0790620735,,s,1071,10.2902377243042,0.009608065102058076,0.00027950604635052385,0.009539967536926269,0.009824064254760742,0.009938015937805175,0.010923267459869383,"[0.009171584129333495, 0.009590208053588867, 0.009528927803039551, 0.009581631660461425, 0.00944035243988037, 0.00954422378540039, 0.009492128372192384, 0.009491071701049805, 0.009483872413635254, 0.009466272354125976, 0.00947216033935547, 0.009439040184020996, 0.009463839530944824, 0.009496576309204101, 0.009600031852722167, 0.009468768119812012, 0.009469440460205078, 0.009495488166809082, 0.00948192024230957, 0.00958243179321289, 0.009414912223815918, 0.00950499153137207, 0.01000534439086914, 0.00953007984161377, 0.009499808311462403, 0.009487360000610352, 0.009463520050048828, 0.009640192031860352, 0.009527008056640624, 0.009627936363220214, 0.009465984344482422, 0.009483903884887694, 0.009955583572387695, 0.00947209644317627, 0.009482272148132324, 0.009450655937194824, 0.009581119537353515, 0.009519264221191407, 0.00944547176361084, 0.00947804832458496, 0.009443327903747559, 0.009497792243957519, 0.009482208251953126, 0.009433728218078613, 0.009469568252563476, 0.00942140769958496, 0.009424896240234374, 0.00947811222076416, 0.009398303985595702, 0.009528639793395997, 0.009515711784362793, 0.009473183631896972, 0.009529855728149414, 0.009471967697143554, 0.009504351615905762, 0.009509119987487793, 0.0095414400100708, 0.009482975959777831, 0.009508864402770996, 0.009504063606262207, 0.009458368301391601, 0.009484288215637206, 0.00941875171661377, 0.00920576000213623, 0.009495552062988282, 0.009454591751098633, 0.009498047828674316, 0.009460288047790527, 0.00946940803527832, 0.00971174430847168, 0.009477984428405762, 0.012077631950378418, 0.011182080268859864, 0.009543680191040039, 0.009555968284606933, 0.00948134422302246, 0.00948953628540039, 0.00943289566040039, 0.009477151870727538, 0.009525792121887207, 0.009464127540588379, 0.009441151618957519, 0.009462207794189452, 0.009434880256652832, 0.00946345615386963, 0.009564512252807618, 0.009460800170898438, 0.009532352447509766, 0.009489983558654784, 0.00943660831451416, 0.009497599601745605, 0.009463808059692384, 0.009493824005126952, 0.009631999969482422, 0.009390527725219727, 0.00947209644317627, 0.009498527526855469, 0.009486335754394531, 0.009504672050476073, 0.00947824001312256, 0.009508128166198731, 0.009497535705566407, 0.009508000373840333, 0.009506655693054198, 0.009450240135192871, 0.009531423568725586, 0.009656384468078613, 0.00949062442779541, 0.00955571174621582, 0.00949891185760498, 0.00951471996307373, 0.009640959739685059, 0.009436256408691406, 0.009498527526855469, 0.009510911941528321, 0.009461759567260742, 0.009474047660827637, 0.00942899227142334, 0.009446559906005859, 0.009570848464965821, 0.009726271629333497, 0.009493824005126952, 0.009468607902526856, 0.00943513584136963, 0.009464927673339844, 0.009491264343261719, 0.009191231727600098, 0.009602304458618163, 0.00955078411102295, 0.009576448440551758, 0.009508671760559083, 0.009563584327697754, 0.009513728141784668, 0.009539104461669922, 0.009488127708435058, 0.009808608055114746, 0.009477503776550293, 0.00951318359375, 0.009486432075500489, 0.009627615928649903, 0.009531200408935547, 0.009503007888793946, 0.00948031997680664, 0.009463935852050782, 0.009576736450195312, 0.009467616081237792, 0.00952297592163086, 0.009461983680725098, 0.009478400230407715, 0.009462559700012207, 0.009448415756225586, 0.010042559623718262, 0.009491007804870605, 0.009537119865417481, 0.009495200157165528, 0.0094269437789917, 0.009564160346984863, 0.009408512115478516, 0.009526911735534669, 0.009466239929199219, 0.00943660831451416, 0.009586943626403808, 0.009496895790100098, 0.009460736274719238, 0.00952627182006836, 0.009488384246826171, 0.00950915241241455, 0.009459424018859863, 0.009441280364990234, 0.009486335754394531, 0.009506815910339356, 0.009469183921813965, 0.009496831893920898, 0.009478272438049317, 0.009609567642211913, 0.009443360328674317, 0.00970956802368164, 0.00965014362335205, 0.012130335807800293, 0.01100175952911377, 0.00955401611328125, 0.010254112243652344, 0.014311039924621582, 0.009552736282348633, 0.010196191787719727, 0.010948960304260253, 0.010256768226623535, 0.00959164810180664, 0.00958521556854248, 0.009226495742797852, 0.009560064315795898, 0.009490079879760742, 0.009505663871765137, 0.009508447647094726, 0.010567808151245117, 0.00968064022064209, 0.009581184387207032, 0.009613311767578125, 0.009549823760986328, 0.009525247573852539, 0.009563743591308594, 0.009675359725952149, 0.009687968254089355, 0.00953865623474121, 0.009591711997985839, 0.0096429443359375, 0.009568384170532226, 0.009542880058288574, 0.009625568389892578, 0.009610143661499024, 0.009561792373657226, 0.009477919578552246, 0.009601311683654785, 0.009473055839538573, 0.009533472061157226, 0.00960755157470703, 0.009495295524597169, 0.009557024002075195, 0.009491231918334961, 0.00947116756439209, 0.009552703857421876, 0.009502911567687988, 0.009543071746826172, 0.009533727645874023, 0.009561535835266113, 0.00956281566619873, 0.009492480278015136, 0.009573439598083495, 0.009616607666015626, 0.009496288299560547, 0.009553600311279296, 0.009580544471740723, 0.009486080169677735, 0.009556544303894044, 0.009514592170715331, 0.009566623687744141, 0.009534848213195802, 0.0098571195602417, 0.009607711791992187, 0.00952723217010498, 0.00960915184020996, 0.011229344367980957, 0.009567968368530274, 0.009609536170959472, 0.009558015823364258, 0.009635583877563476, 0.00952518367767334, 0.009509023666381835, 0.009531488418579101, 0.009547904014587402, 0.009531264305114745, 0.009543968200683593, 0.009325119972229004, 0.009551648139953614, 0.009536031723022462, 0.009649855613708496, 0.009508000373840333, 0.009636704444885253, 0.009541631698608399, 0.009537631988525391, 0.009647904396057129, 0.009604607582092285, 0.009577088356018067, 0.009510911941528321, 0.009496512413024903, 0.009606271743774415, 0.00954054355621338, 0.01005568027496338, 0.00958243179321289, 0.009578080177307128, 0.009447008132934571, 0.009501791954040528, 0.009535264015197754, 0.009541760444641114, 0.009565728187561035, 0.009726367950439452, 0.009629504203796387, 0.009477663993835449, 0.00950937557220459, 0.009521344184875487, 0.009618623733520508, 0.009558112144470214, 0.009521759986877442, 0.009590911865234374, 0.009525247573852539, 0.0095513916015625, 0.009625760078430176, 0.009535072326660156, 0.009562591552734374, 0.009506848335266114, 0.00956822395324707, 0.00949068832397461, 0.009530655860900879, 0.00956604766845703, 0.009466848373413085, 0.009526176452636719, 0.009559040069580077, 0.009722911834716796, 0.009542624473571777, 0.009485343933105468, 0.009520095825195312, 0.00954595184326172, 0.009633567810058594, 0.009663871765136719, 0.009511072158813477, 0.0094519681930542, 0.009537568092346191, 0.009649920463562011, 0.00953593635559082, 0.009556991577148437, 0.009584799766540527, 0.010191295623779297, 0.009525471687316895, 0.009564255714416504, 0.009611359596252441, 0.009296799659729004, 0.00973350429534912, 0.009536224365234375, 0.009544896125793458, 0.009556384086608886, 0.00950614356994629, 0.009606047630310059, 0.009543744087219239, 0.009596927642822266, 0.009592831611633301, 0.009586912155151367, 0.009579423904418946, 0.009595775604248047, 0.009574399948120118, 0.009587871551513671, 0.00984774398803711, 0.009668512344360352, 0.009531135559082032, 0.00966489601135254, 0.009684864044189454, 0.009628992080688476, 0.009575263977050781, 0.010198880195617675, 0.009758048057556153, 0.009921183586120605, 0.01101420783996582, 0.010642880439758301, 0.010242560386657714, 0.011036031723022461, 0.010912256240844726, 0.00990982437133789, 0.010027487754821778, 0.009924351692199707, 0.009937567710876465, 0.009848128318786621, 0.009775168418884277, 0.009719167709350586, 0.0098472318649292, 0.009819840431213379, 0.010451071739196778, 0.009755328178405761, 0.009672703742980958, 0.009727999687194825, 0.009543295860290527, 0.009612799644470215, 0.00962649631500244, 0.009820159912109374, 0.009760448455810547, 0.00964844799041748, 0.009598976135253906, 0.009590784072875976, 0.009585920333862305, 0.009507583618164063, 0.009578495979309083, 0.009640159606933594, 0.009509696006774902, 0.00958073616027832, 0.00959507179260254, 0.009576800346374512, 0.00953724765777588, 0.009499103546142577, 0.00956937599182129, 0.009554911613464355, 0.00921247959136963, 0.00957427215576172, 0.009506943702697754, 0.009727871894836426, 0.00956169605255127, 0.00953376007080078, 0.0095665922164917, 0.009507840156555175, 0.00948265552520752, 0.009565983772277832, 0.009525888442993164, 0.009494239807128907, 0.009524991989135742, 0.009456064224243164, 0.009801759719848632, 0.011657376289367676, 0.009856160163879395, 0.009525919914245605, 0.00953932762145996, 0.009518912315368652, 0.009558239936828613, 0.009502943992614745, 0.00939417552947998, 0.009510911941528321, 0.009510911941528321, 0.009529184341430665, 0.009609375953674316, 0.009581888198852539, 0.009623680114746094, 0.009729887962341309, 0.009645792007446289, 0.00971673583984375, 0.00963327980041504, 0.009751328468322754, 0.00955958366394043, 0.00983459186553955, 0.009520832061767579, 0.009497088432312012, 0.009533023834228516, 0.009701472282409668, 0.009543904304504395, 0.009547743797302246, 0.00979152011871338, 0.009641087532043457, 0.009455648422241211, 0.009522015571594239, 0.009486687660217286, 0.009473695755004882, 0.009522432327270508, 0.009523136138916016, 0.009483072280883789, 0.009480192184448242, 0.009487520217895508, 0.009516127586364746, 0.009619359970092773, 0.009504608154296875, 0.009554207801818848, 0.009500384330749512, 0.009512288093566895, 0.009757344245910644, 0.009752191543579102, 0.009924960136413575, 0.009976127624511718, 0.009372608184814453, 0.009596927642822266, 0.009605119705200196, 0.009631744384765625, 0.009517056465148926, 0.009549823760986328, 0.009520959854125977, 0.009501055717468261, 0.009535296440124512, 0.009521280288696289, 0.009601951599121094, 0.009524191856384277, 0.00951296043395996, 0.009515007972717286, 0.009532544136047363, 0.009620351791381836, 0.009549823760986328, 0.009648127555847168, 0.009652223587036133, 0.009568191528320313, 0.009647808074951171, 0.009578240394592286, 0.00952518367767334, 0.009560928344726563, 0.009494367599487305, 0.00945366382598877, 0.009448543548583984, 0.009491135597229004, 0.009467007637023926, 0.009497504234313964, 0.0094618558883667, 0.009485952377319336, 0.009415040016174317, 0.009453824043273926, 0.009502304077148438, 0.009459327697753907, 0.009449407577514649, 0.00943779182434082, 0.009510911941528321, 0.009481408119201661, 0.009487168312072754, 0.009483776092529296, 0.00951756763458252, 0.00943513584136963, 0.009472000122070312, 0.009551360130310058, 0.009601759910583495, 0.009641056060791015, 0.009534144401550293, 0.009545408248901367, 0.009642304420471192, 0.00950496006011963, 0.009514880180358887, 0.009584159851074218, 0.009547648429870605, 0.009517663955688477, 0.00949459171295166, 0.009521023750305176, 0.009542752265930175, 0.009577343940734864, 0.009642016410827637, 0.009546048164367675, 0.00958614444732666, 0.009244000434875489, 0.00951318359375, 0.009561920166015625, 0.009609696388244628, 0.00952131175994873, 0.009803584098815918, 0.009543871879577637, 0.00961740779876709, 0.009502943992614745, 0.009524543762207032, 0.009569888114929198, 0.009493375778198242, 0.009680352210998534, 0.009592896461486816, 0.009570048332214356, 0.009548352241516114, 0.00946720027923584, 0.009464672088623047, 0.009467904090881347, 0.009469951629638672, 0.009629695892333985, 0.009527296066284179, 0.009461759567260742, 0.009543680191040039, 0.009483327865600586, 0.009474335670471191, 0.009626272201538087, 0.009582592010498046, 0.009629119873046875, 0.009529919624328613, 0.009498623847961426, 0.00951910400390625, 0.009443327903747559, 0.009712736129760742, 0.009487263679504395, 0.009523136138916016, 0.009484319686889649, 0.009431072235107422, 0.010172320365905761, 0.010084447860717773, 0.009657407760620117, 0.009599871635437011, 0.009510656356811524, 0.009636159896850585, 0.00950284767150879, 0.009539263725280762, 0.009523391723632812, 0.0094651517868042, 0.009499360084533691, 0.009463775634765624, 0.009436287879943848, 0.009483136177062989, 0.009435104370117188, 0.00953983974456787, 0.009498175621032714, 0.00986953639984131, 0.009690208435058594, 0.009558048248291016, 0.0095098876953125, 0.009571968078613281, 0.009527551651000976, 0.00951296043395996, 0.009512703895568847, 0.00922111988067627, 0.00993280029296875, 0.00948742389678955, 0.009538496017456054, 0.009465632438659667, 0.009430272102355957, 0.00955081558227539, 0.009500672340393066, 0.009477312088012696, 0.009497471809387207, 0.009447360038757324, 0.009498623847961426, 0.009449407577514649, 0.009465312004089356, 0.009487039566040039, 0.009548704147338867, 0.009583616256713867, 0.009535488128662109, 0.009529343605041504, 0.00956934356689453, 0.009505727767944335, 0.009678144454956055, 0.009550304412841796, 0.009521599769592284, 0.0094901123046875, 0.009441696166992187, 0.009434176445007323, 0.00947056007385254, 0.009422783851623534, 0.009404512405395507, 0.009603391647338868, 0.009473407745361328, 0.009489824295043945, 0.00948851203918457, 0.00949129581451416, 0.009561759948730469, 0.009571711540222167, 0.009526176452636719, 0.009508864402770996, 0.009456768035888671, 0.00947216033935547, 0.009544416427612305, 0.009488384246826171, 0.009504544258117675, 0.00952297592163086, 0.009490880012512207, 0.009471776008605957, 0.009478367805480958, 0.009520768165588379, 0.009539967536926269, 0.009611455917358399, 0.00963327980041504, 0.009604576110839844, 0.009576576232910156, 0.00973465633392334, 0.009496800422668457, 0.009517120361328126, 0.009626560211181641, 0.009558367729187011, 0.009706303596496582, 0.009703264236450196, 0.009643775939941406, 0.009627743721008301, 0.009280192375183105, 0.009534784317016601, 0.009541919708251953, 0.009574784278869628, 0.009512991905212402, 0.00962559986114502, 0.009994239807128906, 0.009976160049438477, 0.010022560119628906, 0.009934847831726074, 0.009862943649291992, 0.009913887977600098, 0.009798336029052734, 0.009661472320556641, 0.009915360450744629, 0.009824064254760742, 0.009729248046875, 0.009804479598999024, 0.010036640167236328, 0.010124223709106445, 0.010023103713989259, 0.009907936096191406, 0.009734016418457031, 0.00952723217010498, 0.009471808433532715, 0.009532032012939453, 0.009463583946228028, 0.009545503616333008, 0.009469663619995117, 0.009463680267333985, 0.009510560035705566, 0.009538623809814453, 0.0094618558883667, 0.009551679611206055, 0.009490431785583496, 0.009490015983581544, 0.009505184173583984, 0.00946288013458252, 0.009550751686096192, 0.009506815910339356, 0.009690527915954589, 0.009681504249572754, 0.009621503829956055, 0.009576448440551758, 0.00951923179626465, 0.009498496055603028, 0.009492256164550782, 0.00947379207611084, 0.009463552474975587, 0.009591520309448242, 0.009533439636230469, 0.009517056465148926, 0.009518719673156739, 0.009507264137268066, 0.009502495765686035, 0.009511072158813477, 0.009528639793395997, 0.009714367866516114, 0.009574399948120118, 0.009740511894226074, 0.009585951805114746, 0.009658368110656738, 0.009512736320495605, 0.009203807830810547, 0.009573087692260743, 0.00951308822631836, 0.00948742389678955, 0.009509696006774902, 0.009490240097045898, 0.009523103713989258, 0.00953494358062744, 0.009618240356445313, 0.009631744384765625, 0.009586463928222657, 0.0095250244140625, 0.00953593635559082, 0.009586591720581055, 0.009577792167663574, 0.00952131175994873, 0.009560640335083009, 0.00949836826324463, 0.009420672416687011, 0.00950214385986328, 0.009495264053344726, 0.009707807540893554, 0.009571423530578613, 0.009520159721374512, 0.009528256416320801, 0.009558815956115723, 0.009506367683410644, 0.00960588836669922, 0.009462847709655761, 0.009626399993896485, 0.009588576316833497, 0.009629856109619141, 0.009586655616760253, 0.009662528038024903, 0.009603039741516113, 0.009621055603027343, 0.009543456077575684, 0.009554400444030761, 0.009492256164550782, 0.009430463790893556, 0.009496576309204101, 0.009605600357055663, 0.009533087730407714, 0.009519071578979492, 0.009927136421203613, 0.009703840255737305, 0.00959670352935791, 0.009570528030395507, 0.009548128128051758, 0.009498271942138672, 0.009520768165588379, 0.009537152290344238, 0.009501055717468261, 0.009555392265319824, 0.00958681583404541, 0.009591103553771973, 0.009828767776489257, 0.009541343688964844, 0.009479680061340333, 0.009599679946899415, 0.009826047897338867, 0.00973043155670166, 0.009560128211975098, 0.0092293119430542, 0.009529343605041504, 0.00956777572631836, 0.009507295608520507, 0.009446816444396973, 0.009534048080444337, 0.009471551895141601, 0.00975107192993164, 0.009491647720336914, 0.009491392135620117, 0.009512736320495605, 0.009585856437683105, 0.009587039947509765, 0.009572832107543946, 0.00951852798461914, 0.009743103981018067, 0.009951040267944336, 0.009676128387451173, 0.00946448040008545, 0.009481696128845215, 0.009566656112670898, 0.009506912231445312, 0.009472000122070312, 0.009482048034667968, 0.009468095779418945, 0.009508543968200684, 0.009462207794189452, 0.00945689582824707, 0.00950540828704834, 0.009459712028503419, 0.009455743789672852, 0.009510432243347168, 0.009551584243774414, 0.009521087646484376, 0.00948265552520752, 0.009476320266723633, 0.00950278377532959, 0.009549823760986328, 0.00951683235168457, 0.009527520179748535, 0.009471327781677245, 0.009590911865234374, 0.009545344352722168, 0.009530367851257325, 0.009576704025268555, 0.009519871711730956, 0.009622431755065919, 0.009504223823547363, 0.009455519676208496, 0.009511551856994628, 0.009488384246826171, 0.009531359672546386, 0.00953775978088379, 0.009486144065856933, 0.009506815910339356, 0.009506175994873047, 0.009514816284179687, 0.009621472358703613, 0.009532416343688965, 0.009717856407165527, 0.009561856269836426, 0.009500224113464356, 0.009531840324401856, 0.00922214412689209, 0.009520159721374512, 0.009550271987915039, 0.009507360458374024, 0.009629695892333985, 0.009572223663330078, 0.009570655822753906, 0.009571840286254883, 0.009511103630065918, 0.009647583961486817, 0.009537983894348144, 0.00948646354675293, 0.009535615921020507, 0.009502528190612793, 0.009534879684448242, 0.009528160095214844, 0.009585856437683105, 0.009646207809448241, 0.009501248359680177, 0.009517312049865723, 0.009584351539611817, 0.009500576019287109, 0.00958995246887207, 0.009593791961669922, 0.009617568016052246, 0.009805824279785156, 0.009924448013305665, 0.00996275234222412, 0.009772095680236816, 0.00989151954650879, 0.010030271530151368, 0.01008233642578125, 0.010298144340515137, 0.009887743949890136, 0.009793312072753906, 0.009901568412780762, 0.009743295669555663, 0.009733920097351073, 0.009684991836547852, 0.009528767585754395, 0.009531968116760254, 0.009606880187988281, 0.009500736236572266, 0.009512703895568847, 0.009480128288269043, 0.009558560371398926, 0.009517056465148926, 0.009535327911376953, 0.009922719955444336, 0.009971296310424805, 0.009920928001403808, 0.009920448303222655, 0.009748543739318848, 0.009746432304382324, 0.009905471801757812, 0.009712320327758789, 0.009717599868774414, 0.00969491195678711, 0.009591263771057129, 0.009855199813842773, 0.009821951866149902, 0.00958672046661377, 0.009506815910339356, 0.009628288269042969, 0.009824224472045898, 0.009777152061462402, 0.00974028778076172, 0.009631808280944825, 0.009595999717712403, 0.009487199783325195, 0.009517248153686524, 0.009487199783325195, 0.009481184005737305, 0.00949407958984375, 0.009486783981323242, 0.009451519966125489, 0.009590527534484864, 0.009470015525817872, 0.009523200035095216, 0.009478400230407715, 0.009891136169433594, 0.009779840469360352, 0.011302911758422851, 0.010272095680236817, 0.009851103782653808, 0.00963424015045166, 0.009598560333251953, 0.009674528121948241, 0.009597760200500488, 0.009590592384338379, 0.009549823760986328, 0.009498527526855469, 0.009582688331604004, 0.009494272232055664, 0.009488639831542968, 0.009510911941528321, 0.009471648216247559, 0.009527296066284179, 0.009515359878540039, 0.009506015777587891, 0.009530143737792969, 0.009508864402770996, 0.009500672340393066, 0.009474143981933594, 0.009815967559814454, 0.009629695892333985, 0.00951910400390625, 0.009553919792175293, 0.009493632316589356, 0.009579039573669434, 0.00952560043334961, 0.009491680145263671, 0.009564959526062011, 0.009549823760986328, 0.009537311553955078, 0.009598688125610351, 0.009513471603393555, 0.009560064315795898, 0.009632991790771485, 0.009525919914245605, 0.009691264152526856, 0.00977888011932373, 0.009824576377868653, 0.009963744163513184, 0.009932576179504394, 0.009912192344665527, 0.009689984321594238, 0.010094592094421387, 0.009988096237182617, 0.009880736351013183, 0.009847904205322265, 0.00986911964416504, 0.00986131191253662, 0.009942975997924805, 0.009642848014831543, 0.009540351867675781, 0.009553536415100098, 0.00946134376525879, 0.00952620792388916, 0.009521216392517089, 0.009555392265319824, 0.009781824111938477, 0.009474047660827637, 0.009633184432983399, 0.009731743812561035, 0.009866399765014649, 0.009505727767944335, 0.009506752014160156, 0.009786591529846192, 0.009518783569335937, 0.009551679611206055, 0.009520544052124023, 0.009436256408691406, 0.00948192024230957, 0.009460927963256836, 0.009544511795043946, 0.009531392097473144, 0.009578495979309083, 0.009705727577209472, 0.00976035213470459, 0.009722175598144532, 0.009555999755859374, 0.00955577564239502, 0.009566240310668946, 0.0096146240234375, 0.00959763240814209, 0.009632960319519043, 0.00952950382232666, 0.00952950382232666, 0.009498335838317872, 0.00961631965637207, 0.009529184341430665, 0.009517056465148926, 0.009551872253417968, 0.009801728248596191, 0.009975808143615723, 0.010048831939697266, 0.010145919799804688, 0.010098591804504394, 0.01005174446105957, 0.009802240371704102, 0.009828351974487304, 0.009877120018005371, 0.009865599632263184, 0.009644191741943359, 0.009643872261047364, 0.009662464141845703, 0.00987119960784912, 0.009670111656188965, 0.009296031951904296, 0.009832575798034668, 0.009886143684387207, 0.00961903953552246, 0.00956214427947998, 0.009492575645446777, 0.009562399864196778, 0.00952451229095459, 0.009492639541625977, 0.009544480323791503, 0.009486111640930175, 0.009490431785583496, 0.00956230354309082, 0.009631423950195313, 0.009580672264099121, 0.00953536033630371, 0.009756128311157226, 0.009849120140075683, 0.009482624053955079, 0.009615039825439453, 0.009588031768798829, 0.00954470443725586, 0.009574399948120118, 0.009504768371582031, 0.009565535545349122, 0.009531935691833497, 0.0096627836227417, 0.009819487571716308, 0.00968342399597168, 0.009668607711791993, 0.009566240310668946, 0.009691200256347655, 0.009797696113586427, 0.009922176361083985, 0.010068160057067871, 0.01039568042755127, 0.009938464164733887, 0.009908960342407227, 0.0098504638671875, 0.009705504417419434, 0.009658495903015136, 0.00959488010406494, 0.009549823760986328, 0.009564352035522462, 0.00977286434173584, 0.009822431564331055, 0.009895520210266113, 0.00967903995513916, 0.009549568176269532, 0.009582367897033692, 0.00947382354736328, 0.009505472183227538, 0.009760095596313476, 0.009580448150634765, 0.009579263687133788, 0.009520959854125977, 0.009547488212585449, 0.009584159851074218, 0.009513919830322265, 0.009638015747070312, 0.009490303993225098, 0.00985647964477539, 0.0095546236038208]",tokens/s,104.07922816694872,, bfloat16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,bfloat16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,809.058304,763.232256,0.0,360.710144,345.493504,s,1,8.754025390625,8.754025390625,0.0,8.754025390625,8.754025390625,8.754025390625,8.754025390625,[8.754025390625],,kWh,2.8807342666686014e-06,3.106597237536193e-07,9.644452159994835e-07,4.155839206421704e-06,,MB,1171.29216,775.815168,0.0,362.807296,344.082944,s,20,0.4724572162628173,0.02362286081314087,0.0004968573576560261,0.023499711990356446,0.02371581039428711,0.02398011627197266,0.025366186218261717,"[0.025712703704833983, 0.023888927459716797, 0.023417568206787108, 0.023469760894775392, 0.023587648391723632, 0.023696575164794922, 0.023578624725341796, 0.023365568161010743, 0.023508031845092772, 0.023496320724487305, 0.023503103256225587, 0.023450239181518555, 0.023443103790283203, 0.02348303985595703, 0.023458911895751954, 0.023457183837890624, 0.023582624435424804, 0.023550527572631836, 0.023208480834960937, 0.023598272323608397]",tokens/s,10836.960096619327,kWh,8.045634203168038e-07,8.872349783220936e-08,5.328631443801662e-07,1.4261500625291795e-06,tokens/kWh,179504251.84990808,MB,1210.368,800.980992,0.0,387.97312,344.085504,s,20,10.047461578369141,0.5023730789184571,0.0031087004390134383,0.5016576995849609,0.5065504669189452,0.5079946884155273,0.508502135925293,"[0.49908740234375, 0.5061726989746094, 0.5028882446289062, 0.50127783203125, 0.5079613037109375, 0.5043182067871094, 0.5030824584960938, 0.5086289978027344, 0.5044768371582031, 0.5011488647460938, 0.4990799865722656, 0.49991644287109377, 0.5018919372558593, 0.4991437377929687, 0.4997825012207031, 0.49671206665039064, 0.5009382934570312, 0.5031365966796875, 0.5063937072753906, 0.5014234619140625]",tokens/s,125.40480898305835,kWh,1.434710040447485e-05,1.5822451319675708e-06,6.148156089319845e-06,2.2077501625762272e-05,tokens/kWh,2853583.755441114,,s,1260,10.036965571880346,0.007965845691968525,0.00016175707905947907,0.007924639940261841,0.008130272293090822,0.008218963527679443,0.008570739107131957,"[0.007737343788146973, 0.007944191932678223, 0.007907360076904298, 0.008116191864013671, 0.007872416019439697, 0.007840191841125489, 0.007981887817382812, 0.007910431861877442, 0.007871583938598633, 0.007911871910095215, 0.007880512237548829, 0.007856607913970947, 0.007946432113647461, 0.007856224060058594, 0.008040160179138183, 0.007916959762573242, 0.007918176174163818, 0.007919616222381591, 0.007867487907409668, 0.007846816062927246, 0.00785203218460083, 0.007813119888305664, 0.007837696075439453, 0.007895040035247802, 0.007894976139068604, 0.00788486385345459, 0.007859903812408447, 0.007856480121612549, 0.008021151542663574, 0.00794707202911377, 0.007945343971252442, 0.00791641616821289, 0.007905280113220215, 0.007880256175994874, 0.007803008079528808, 0.00792195177078247, 0.007931935787200927, 0.0078820161819458, 0.007940927982330322, 0.007853983879089355, 0.007895328044891358, 0.008023167610168457, 0.007907936096191406, 0.007933087825775146, 0.007893856048583985, 0.007871488094329833, 0.007886144161224365, 0.007851071834564209, 0.007835584163665772, 0.00787116813659668, 0.007899136066436767, 0.007888351917266846, 0.007868576049804687, 0.00811564826965332, 0.008027071952819825, 0.007886943817138671, 0.008003135681152344, 0.008026368141174316, 0.008062560081481934, 0.008037919998168946, 0.007980000019073487, 0.007919616222381591, 0.007919167995452881, 0.007670080184936524, 0.007957215785980225, 0.007942016124725343, 0.007927807807922363, 0.007954432010650634, 0.007954432010650634, 0.007959712028503417, 0.007855999946594238, 0.00784880018234253, 0.007851776123046874, 0.007948863983154296, 0.007902880191802979, 0.007886816024780273, 0.007858176231384278, 0.007878367900848388, 0.007975103855133057, 0.007936287879943847, 0.007919904232025146, 0.008000576019287109, 0.008003968238830567, 0.007913472175598145, 0.007943647861480713, 0.007869215965270996, 0.007875775814056397, 0.00847100830078125, 0.007893055915832519, 0.008153727531433105, 0.008519007682800292, 0.008359359741210938, 0.008733599662780762, 0.00800153636932373, 0.00808080005645752, 0.00834774398803711, 0.007932415962219238, 0.007954432010650634, 0.00789238405227661, 0.00789359998703003, 0.007896480083465576, 0.007874720096588135, 0.00789734411239624, 0.008181952476501464, 0.008474656105041504, 0.008203743934631347, 0.007999231815338135, 0.008747327804565429, 0.008137151718139649, 0.007945536136627197, 0.007891232013702393, 0.007864448070526123, 0.008158528327941894, 0.008014847755432129, 0.007940063953399659, 0.007888864040374756, 0.007970943927764892, 0.008513440132141113, 0.007942143917083741, 0.007906847953796387, 0.008200672149658203, 0.0079170560836792, 0.008012543678283692, 0.007960319995880127, 0.008109919548034668, 0.007946047782897949, 0.007687551975250244, 0.008039392471313477, 0.007911424160003662, 0.007915520191192627, 0.007816448211669921, 0.007864672183990479, 0.007925759792327881, 0.008136223793029786, 0.008098688125610352, 0.007927360057830811, 0.007940095901489258, 0.007895552158355712, 0.007933087825775146, 0.007845791816711425, 0.007907872200012206, 0.007979360103607178, 0.007947455883026124, 0.007928736209869384, 0.00790057611465454, 0.007814752101898194, 0.007940256118774414, 0.007809792041778564, 0.007827775955200195, 0.007983839988708497, 0.007926559925079346, 0.007905183792114258, 0.008011712074279784, 0.007868768215179444, 0.00786243200302124, 0.007849535942077636, 0.007906976222991944, 0.0079202561378479, 0.007906688213348388, 0.007919551849365235, 0.0078854398727417, 0.009421055793762206, 0.008104767799377442, 0.008098336219787597, 0.00799567985534668, 0.008018431663513183, 0.00792748785018921, 0.008021984100341797, 0.007900767803192139, 0.007863008022308349, 0.00798201608657837, 0.007977759838104248, 0.007938047885894776, 0.00791756820678711, 0.00787830400466919, 0.007946591854095459, 0.008069120407104492, 0.007983295917510987, 0.008101375579833984, 0.008012096405029296, 0.007974495887756347, 0.00793168020248413, 0.007975552082061767, 0.00789673614501953, 0.007899487972259521, 0.007903232097625732, 0.008569952011108398, 0.007969120025634765, 0.00817142391204834, 0.0075961918830871585, 0.00787340784072876, 0.007870975971221925, 0.00791980791091919, 0.007911359786987305, 0.007925343990325929, 0.00792140817642212, 0.00801411247253418, 0.007938848018646241, 0.00803001594543457, 0.00793612813949585, 0.007968768119812012, 0.008017215728759765, 0.007938752174377442, 0.007909056186676025, 0.008051008224487304, 0.007880640029907227, 0.007848000049591065, 0.007870463848114014, 0.007862271785736084, 0.00785756778717041, 0.0078854079246521, 0.007954432010650634, 0.007872511863708496, 0.007965983867645264, 0.008166111946105958, 0.007880127906799317, 0.007941760063171387, 0.007800864219665527, 0.007819712162017822, 0.007990848064422608, 0.007896063804626464, 0.007852287769317626, 0.007849023818969727, 0.008011455535888673, 0.008008031845092774, 0.00797753620147705, 0.00786636781692505, 0.007960576057434082, 0.007861951828002929, 0.007985472202301025, 0.007848095893859864, 0.007880544185638428, 0.007849247932434082, 0.007995423793792724, 0.007967423915863038, 0.00793721580505371, 0.00792195177078247, 0.00793443202972412, 0.007942431926727295, 0.008036128044128418, 0.007976960182189942, 0.007921664237976075, 0.007935008049011231, 0.008135040283203126, 0.007983712196350098, 0.007998464107513427, 0.008096128463745118, 0.008015839576721192, 0.007964320182800293, 0.008065407752990722, 0.00819814395904541, 0.008358431816101073, 0.007917407989501954, 0.008046208381652832, 0.007958144187927245, 0.007909823894500732, 0.0079716157913208, 0.008027039527893066, 0.007994463920593262, 0.007982687950134277, 0.00793942403793335, 0.00796937608718872, 0.008022239685058594, 0.008167360305786133, 0.008078911781311034, 0.007975359916687012, 0.007968128204345703, 0.00801580810546875, 0.007943935871124267, 0.007940735816955567, 0.007903711795806885, 0.00795795202255249, 0.007944608211517334, 0.008269824028015137, 0.007900256156921387, 0.008805184364318848, 0.00795414400100708, 0.008316320419311523, 0.008576319694519044, 0.007956128120422363, 0.00794540786743164, 0.007967520236968994, 0.007942304134368897, 0.008029567718505859, 0.00790563201904297, 0.008030559539794922, 0.008031711578369141, 0.007897600173950196, 0.007917695999145508, 0.007880415916442871, 0.007907008171081543, 0.008345919609069824, 0.008002655982971191, 0.008000351905822755, 0.007997504234313965, 0.00806112003326416, 0.008336640357971192, 0.008401632308959961, 0.008417119979858398, 0.00818124771118164, 0.008079872131347657, 0.008343551635742187, 0.008269824028015137, 0.008033663749694823, 0.008028223991394043, 0.008106656074523926, 0.007992928028106689, 0.008108351707458496, 0.008126784324645996, 0.007968351840972901, 0.008128607749938965, 0.007939167976379394, 0.007974912166595459, 0.007883615970611572, 0.007866464138031005, 0.00762940788269043, 0.007892608165740967, 0.007862304210662842, 0.008034015655517578, 0.007881472110748291, 0.008148863792419433, 0.007946239948272706, 0.007906623840332031, 0.008001472473144531, 0.007946944236755371, 0.007913536071777343, 0.007963744163513184, 0.007887775897979737, 0.007923935890197754, 0.007914720058441162, 0.008024352073669434, 0.007911136150360107, 0.008066880226135253, 0.00818057632446289, 0.007898303985595703, 0.007844672203063965, 0.007989151954650879, 0.00787660789489746, 0.007837471961975097, 0.007868383884429932, 0.007901440143585205, 0.007890495777130127, 0.00788262414932251, 0.007998015880584717, 0.007886688232421874, 0.008084639549255371, 0.00787936019897461, 0.007859839916229247, 0.00795084810256958, 0.007876895904541015, 0.00790723180770874, 0.007965760231018066, 0.007958752155303955, 0.00794697618484497, 0.007964511871337891, 0.008091808319091797, 0.008033568382263184, 0.007981791973114013, 0.008132575988769531, 0.008025823593139648, 0.007982944011688232, 0.008177984237670898, 0.00796281623840332, 0.008046015739440918, 0.008031840324401856, 0.007998144149780274, 0.00805027198791504, 0.008035200119018555, 0.008148768424987792, 0.008162528038024903, 0.008218784332275391, 0.008014016151428223, 0.008128959655761719, 0.008171520233154296, 0.008175616264343261, 0.008189215660095214, 0.008350399971008301, 0.008344736099243164, 0.008048831939697266, 0.008244959831237793, 0.008357600212097168, 0.008280320167541504, 0.008211808204650879, 0.008108991622924805, 0.008162143707275391, 0.008159456253051757, 0.00820304012298584, 0.008130016326904297, 0.008266048431396484, 0.008927103996276855, 0.00841868782043457, 0.00825443172454834, 0.008095744132995606, 0.008160896301269531, 0.008083904266357422, 0.007976928234100341, 0.007932223796844483, 0.007954080104827881, 0.007937695980072022, 0.007885087966918945, 0.007905344009399413, 0.007894911766052247, 0.007891071796417235, 0.007856128215789794, 0.00782473611831665, 0.007869088172912597, 0.007872511863708496, 0.007858176231384278, 0.008099871635437012, 0.008138431549072265, 0.00803395175933838, 0.007916160106658935, 0.007882239818572999, 0.007834112167358399, 0.007826591968536377, 0.00785430383682251, 0.007841919898986817, 0.007801343917846679, 0.007815231800079345, 0.007785408020019531, 0.007790719985961914, 0.007852447986602782, 0.007794303894042969, 0.007990111827850341, 0.0078438401222229, 0.007858176231384278, 0.00783686399459839, 0.007864927768707276, 0.007876512050628661, 0.007840288162231446, 0.00784771203994751, 0.007854015827178954, 0.007849376201629638, 0.007794623851776123, 0.007811808109283448, 0.007809023857116699, 0.007788576126098633, 0.007960544109344483, 0.00798095989227295, 0.007843264102935791, 0.007902912139892578, 0.007628799915313721, 0.008029664039611816, 0.00802665615081787, 0.007937151908874512, 0.007860640048980712, 0.007854464054107667, 0.007843935966491699, 0.009334783554077148, 0.009305120468139649, 0.008559103965759277, 0.007964831829071045, 0.008201919555664062, 0.008099552154541015, 0.007949376106262207, 0.007894464015960693, 0.007946656227111817, 0.007919616222381591, 0.007790592193603516, 0.007874656200408935, 0.00791542387008667, 0.00787660789489746, 0.007913407802581787, 0.00791155195236206, 0.007884736061096192, 0.007833663940429687, 0.00806015968322754, 0.00791644811630249, 0.007861504077911377, 0.00786617612838745, 0.007950655937194824, 0.007889472007751464, 0.00806281566619873, 0.00792739200592041, 0.008006048202514649, 0.007956319808959961, 0.00792905616760254, 0.00796665620803833, 0.008094847679138184, 0.00802393627166748, 0.008119423866271973, 0.008089568138122559, 0.008028544425964356, 0.008222368240356446, 0.008088447570800782, 0.008066399574279785, 0.008065695762634278, 0.008269248008728027, 0.008202783584594727, 0.008406432151794433, 0.008258079528808594, 0.008177760124206544, 0.008070336341857911, 0.008063808441162109, 0.008010815620422364, 0.008047679901123046, 0.008064800262451172, 0.007946208000183105, 0.008121567726135254, 0.008280927658081055, 0.008312928199768066, 0.008082816123962402, 0.008179327964782716, 0.008067999839782715, 0.0082227201461792, 0.008150591850280761, 0.008149439811706544, 0.008286208152770995, 0.008262880325317383, 0.008461088180541991, 0.008017919540405273, 0.007997791767120361, 0.007942815780639649, 0.008148991584777832, 0.007954783916473388, 0.00792249584197998, 0.007911168098449706, 0.00790127992630005, 0.007929855823516846, 0.007978687763214112, 0.007884992122650146, 0.00797654390335083, 0.007952928066253662, 0.007853119850158692, 0.007907584190368652, 0.00795740795135498, 0.007872288227081298, 0.007923711776733398, 0.008431615829467774, 0.007974431991577149, 0.008060864448547363, 0.008014080047607421, 0.007987071990966797, 0.007945663928985595, 0.007940832138061524, 0.007975168228149414, 0.007962624073028564, 0.007909664154052734, 0.00790828800201416, 0.007976863861083984, 0.008072128295898437, 0.007880064010620117, 0.007892831802368164, 0.007867104053497314, 0.007819263935089112, 0.00789692783355713, 0.007894495964050292, 0.007897535800933837, 0.0078789119720459, 0.007903232097625732, 0.007876319885253906, 0.007974592208862305, 0.008026432037353516, 0.008057184219360351, 0.008042240142822265, 0.008108480453491211, 0.008115519523620605, 0.008081600189208985, 0.008003840446472168, 0.008044320106506348, 0.007989471912384033, 0.007968512058258057, 0.007992640018463134, 0.007983712196350098, 0.008126784324645996, 0.007940192222595215, 0.007983327865600586, 0.0076221442222595215, 0.007946368217468261, 0.00793398380279541, 0.007923967838287353, 0.00827295970916748, 0.007970975875854493, 0.007932735919952392, 0.007953375816345214, 0.007895743846893311, 0.00788262414932251, 0.00792140817642212, 0.007912288188934326, 0.008021696090698242, 0.007932064056396484, 0.007956480026245117, 0.00796623992919922, 0.00799945592880249, 0.008013343811035157, 0.007856639862060547, 0.007864799976348877, 0.007867616176605224, 0.008022208213806153, 0.007987743854522705, 0.007953824043273926, 0.00802064037322998, 0.007974847793579102, 0.007989503860473634, 0.008043583869934081, 0.008030976295471191, 0.008110207557678223, 0.00795359992980957, 0.0079203200340271, 0.00810092830657959, 0.008118623733520508, 0.007892672061920167, 0.0078951678276062, 0.007922463893890381, 0.007878655910491944, 0.007822815895080566, 0.007957024097442627, 0.008040448188781739, 0.00786624002456665, 0.00790335988998413, 0.007815167903900147, 0.007804927825927735, 0.007825407981872558, 0.007839744091033935, 0.007956480026245117, 0.007933951854705811, 0.007897056102752686, 0.00792742395401001, 0.008016287803649902, 0.007964352130889892, 0.007956799983978271, 0.007886847972869874, 0.007894336223602295, 0.007921823978424072, 0.007987743854522705, 0.007935840129852295, 0.007964863777160644, 0.007911744117736817, 0.00785158395767212, 0.00821065616607666, 0.007647232055664062, 0.007976384162902831, 0.008061504364013672, 0.007959839820861816, 0.00804867172241211, 0.008170175552368163, 0.007896671772003174, 0.007901311874389649, 0.007899199962615967, 0.00801369571685791, 0.007923391819000244, 0.00799232006072998, 0.007941792011260987, 0.008032256126403809, 0.00792294406890869, 0.007888991832733155, 0.007888927936553955, 0.007932447910308837, 0.007912767887115478, 0.007888864040374756, 0.007840576171875, 0.007904767990112305, 0.007900864124298095, 0.007880799770355225, 0.007885536193847657, 0.007858176231384278, 0.00796284818649292, 0.007910272121429444, 0.007863200187683106, 0.007897088050842285, 0.00786947202682495, 0.007885663986206055, 0.00783302402496338, 0.007828159809112549, 0.007841792106628418, 0.007845888137817383, 0.007888160228729247, 0.007857888221740723, 0.007938303947448731, 0.00790172815322876, 0.00785430383682251, 0.007923711776733398, 0.007925055980682372, 0.007891647815704346, 0.007890016078948974, 0.007948351860046386, 0.007885024070739747, 0.007928192138671875, 0.007877024173736572, 0.00791535997390747, 0.007976960182189942, 0.007866496086120606, 0.007872416019439697, 0.007876031875610352, 0.007976863861083984, 0.007865983963012696, 0.007863296031951903, 0.007833600044250488, 0.007847936153411865, 0.007921664237976075, 0.007960063934326172, 0.008243712425231933, 0.007945280075073242, 0.007634335994720459, 0.007887455940246582, 0.00790719985961914, 0.007872640132904053, 0.007859456062316894, 0.007837535858154297, 0.007852960109710693, 0.007867712020874024, 0.007854527950286865, 0.007829311847686768, 0.007887584209442139, 0.007859936237335205, 0.007863359928131104, 0.007873472213745117, 0.007880703926086426, 0.00791539192199707, 0.007952352046966553, 0.007952415943145752, 0.007906496047973633, 0.00788809585571289, 0.008115360260009766, 0.007987775802612306, 0.007878655910491944, 0.007914527893066407, 0.007871456146240234, 0.007872831821441651, 0.007855072021484374, 0.008047136306762695, 0.007880896091461182, 0.007935167789459229, 0.007881408214569091, 0.007925471782684327, 0.008064800262451172, 0.008149472236633301, 0.008060735702514648, 0.007956831932067872, 0.00789244794845581, 0.007872128009796142, 0.007865248203277588, 0.008137760162353515, 0.008000063896179199, 0.007938464164733887, 0.007912735939025878, 0.007911488056182862, 0.007864672183990479, 0.008028703689575195, 0.007985951900482178, 0.007947455883026124, 0.008066623687744141, 0.008055232048034667, 0.00796777582168579, 0.007963488101959229, 0.007933887958526611, 0.007898528099060059, 0.007938655853271484, 0.007886847972869874, 0.007870463848114014, 0.00788479995727539, 0.00807487964630127, 0.00797324800491333, 0.00791756820678711, 0.007887040138244629, 0.007956255912780761, 0.0075895037651062015, 0.00785264015197754, 0.0078887357711792, 0.007818975925445556, 0.007845312118530274, 0.007902207851409913, 0.007891104221343995, 0.008044384002685546, 0.007862304210662842, 0.008012096405029296, 0.007924511909484863, 0.00788979196548462, 0.008326975822448731, 0.007956672191619872, 0.007913472175598145, 0.007913023948669433, 0.008159680366516113, 0.007956831932067872, 0.007921023845672608, 0.007911712169647216, 0.007952095985412598, 0.007912799835205079, 0.007933119773864745, 0.007900352001190186, 0.008175583839416503, 0.007919680118560792, 0.0078788161277771, 0.007903552055358887, 0.007914912223815919, 0.007938303947448731, 0.007951039791107178, 0.00795743989944458, 0.007979135990142822, 0.008266400337219239, 0.00806015968322754, 0.007952479839324952, 0.007910048007965088, 0.007919583797454834, 0.007916768074035645, 0.007919551849365235, 0.00798195219039917, 0.008842656135559082, 0.0081659517288208, 0.00791964817047119, 0.007872831821441651, 0.007971744060516357, 0.007851967811584473, 0.008061792373657227, 0.008132608413696289, 0.007913568019866944, 0.00788483190536499, 0.00789302396774292, 0.007849503993988037, 0.007876927852630616, 0.008034239768981934, 0.007874176025390625, 0.007901631832122802, 0.007966623783111573, 0.007841887950897217, 0.007941728115081786, 0.008001024246215821, 0.00790825605392456, 0.007867904186248779, 0.007587039947509765, 0.007920447826385499, 0.007972000122070312, 0.007893824100494384, 0.007910496234893798, 0.007895967960357665, 0.007913472175598145, 0.007874335765838623, 0.007967135906219482, 0.007941376209259034, 0.00790511989593506, 0.007887551784515381, 0.007907360076904298, 0.008022015571594238, 0.007933951854705811, 0.007951392173767089, 0.00792294406890869, 0.007931647777557373, 0.007849952220916749, 0.008091648101806641, 0.007854080200195313, 0.007938047885894776, 0.007890944004058837, 0.007947679996490478, 0.007967328071594238, 0.007924767971038818, 0.007943359851837159, 0.008163040161132812, 0.007934239864349366, 0.007916319847106933, 0.007889920234680176, 0.007978367805480956, 0.007897727966308594, 0.007872735977172852, 0.007884768009185791, 0.007886655807495117, 0.007854080200195313, 0.00784716796875, 0.00791539192199707, 0.007875167846679687, 0.00798748779296875, 0.007931456089019776, 0.007877056121826171, 0.007860223770141601, 0.007872447967529297, 0.00790067195892334, 0.007857920169830323, 0.007854911804199219, 0.007886847972869874, 0.007869696140289307, 0.007877120018005371, 0.007872767925262451, 0.00819315242767334, 0.007943039894104003, 0.00791926383972168, 0.007946591854095459, 0.007890207767486572, 0.008029184341430663, 0.00788812780380249, 0.007887328147888183, 0.007897088050842285, 0.00787990379333496, 0.00786511993408203, 0.007603968143463135, 0.007892831802368164, 0.007972320079803467, 0.007844639778137206, 0.007838975906372071, 0.007811808109283448, 0.007883999824523926, 0.007881696224212647, 0.007835519790649415, 0.007818912029266357, 0.007857664108276367, 0.007848927974700928, 0.007872384071350097, 0.007845632076263428, 0.007967103958129882, 0.007868415832519531, 0.007883008003234864, 0.007839488029479981, 0.007813055992126465, 0.007839424133300781, 0.007995872020721436, 0.007896992206573486, 0.007856128215789794, 0.007891168117523193, 0.007852159976959228, 0.007943456172943114, 0.007813504219055176, 0.007974912166595459, 0.007789631843566894, 0.00785487985610962, 0.007840127944946288, 0.007810880184173584, 0.007833568096160889, 0.007907328128814697, 0.00780083179473877, 0.007895264148712158, 0.007885695934295655, 0.007861472129821778, 0.00788812780380249, 0.007893439769744872, 0.008039648056030274, 0.007842591762542725, 0.007896768093109132, 0.00783196783065796, 0.007830944061279297, 0.007873023986816406, 0.00782966423034668, 0.007872352123260498, 0.008128512382507324, 0.0081812162399292, 0.009032480239868164, 0.009238431930541992, 0.007921343803405761, 0.008087712287902832, 0.007880703926086426, 0.007888512134552002, 0.00788313579559326, 0.007888895988464355, 0.007819519996643067, 0.008070079803466797, 0.008002367973327637, 0.007865568161010743, 0.007856671810150146, 0.0075469760894775395, 0.00783792018890381, 0.007891456127166747, 0.007826528072357179, 0.007863200187683106, 0.00784825611114502, 0.007849664211273193, 0.00784934377670288, 0.007852992057800294, 0.007865312099456788, 0.00782751989364624, 0.008024671554565429, 0.00786182403564453, 0.007848447799682617, 0.007839200019836425, 0.007862912178039552, 0.00787171220779419, 0.008204895973205567, 0.007837791919708252, 0.007841184139251708, 0.007856319904327393, 0.007897664070129394, 0.00784329605102539, 0.007876383781433105, 0.007852863788604737, 0.007826335906982422, 0.00780787181854248, 0.00783564805984497, 0.007790592193603516, 0.00786252784729004, 0.007830495834350587, 0.007841919898986817, 0.00783180809020996, 0.007835360050201415, 0.008063551902770996, 0.007917791843414306, 0.007845920085906982, 0.00783580780029297, 0.0078755202293396, 0.007874944210052491, 0.007987616062164306, 0.007845344066619873, 0.0078505277633667, 0.00785148811340332, 0.007842336177825928, 0.007882751941680909, 0.007837696075439453, 0.007987103939056396, 0.00785968017578125, 0.007848031997680664, 0.007985695838928223, 0.007900544166564941, 0.007828095912933349, 0.007886591911315919, 0.007880288124084473, 0.007924416065216065, 0.007854176044464112, 0.007844831943511963, 0.008176544189453124, 0.007854080200195313, 0.007986623764038086, 0.007858560085296631, 0.007909152030944824, 0.00786636781692505, 0.008079392433166504, 0.007912447929382324, 0.007893152236938476, 0.007844704151153565, 0.007798751831054687, 0.007873888015747071, 0.007910208225250244, 0.007829504013061523, 0.007954400062561035, 0.00788265609741211, 0.007859712123870849, 0.007860415935516358, 0.007850304126739501, 0.008038399696350097, 0.007927807807922363, 0.007886144161224365, 0.007880608081817626, 0.00783180809020996, 0.007868735790252686, 0.007833504199981689, 0.007866432189941406, 0.007860608100891113, 0.008044416427612305, 0.00789299201965332, 0.007880703926086426, 0.00787772798538208, 0.008028287887573243, 0.007907328128814697, 0.007878687858581543, 0.007889664173126221, 0.007882751941680909, 0.007935999870300293, 0.007929855823516846, 0.00791756820678711, 0.007907616138458253, 0.007952095985412598, 0.008003583908081055, 0.007888319969177247, 0.007891520023345948, 0.008031904220581055, 0.007948095798492432, 0.007891488075256348, 0.007911424160003662, 0.007886847972869874, 0.007928864002227783, 0.007850719928741454, 0.007893343925476075, 0.00815231990814209, 0.00808944034576416, 0.007955039978027344, 0.007928031921386719, 0.008037887573242187, 0.007961056232452392, 0.008110112190246582, 0.008292351722717285, 0.008040575981140137, 0.007942016124725343, 0.007913472175598145, 0.008056287765502929, 0.008160863876342773, 0.008171744346618652, 0.00812070369720459, 0.007836383819580078, 0.007987135887145997, 0.00801798439025879, 0.008056832313537597, 0.008242303848266602, 0.008145792007446289, 0.008064576148986816, 0.008161151885986328, 0.007999263763427734, 0.00799948787689209, 0.008188480377197266, 0.007977503776550293, 0.007957536220550537, 0.007931712150573731, 0.007897952079772948, 0.00791756820678711, 0.007890944004058837, 0.007867839813232422, 0.007819712162017822, 0.007856256008148193, 0.008072256088256836, 0.007967040061950684, 0.00791542387008667, 0.00790396785736084, 0.007925439834594726, 0.00786464023590088, 0.007886720180511475, 0.007876383781433105, 0.007883103847503662, 0.00790937614440918, 0.007868415832519531, 0.00789686393737793, 0.00786198377609253, 0.007877120018005371, 0.007866655826568604, 0.007853216171264648, 0.00783622407913208, 0.007971903800964356, 0.00799174404144287, 0.008015999794006348, 0.008369919776916503, 0.008079999923706055, 0.008021056175231933, 0.007923903942108155, 0.007895584106445313, 0.008035584449768067, 0.00789737606048584, 0.007895520210266113, 0.007885024070739747, 0.00789849615097046, 0.007936639785766602, 0.007972864151000977, 0.008028160095214844, 0.008062975883483887, 0.008038559913635253, 0.007966303825378418, 0.007999743938446045, 0.00799129581451416, 0.007940095901489258, 0.008021023750305176, 0.008571871757507324, 0.00809545612335205, 0.007995456218719483, 0.007999616146087647, 0.008085760116577148, 0.00813856029510498, 0.00810598373413086, 0.008166720390319824, 0.008100671768188477, 0.008038271903991699, 0.00798636817932129, 0.007942975997924805, 0.007942143917083741, 0.007997087955474854, 0.007913887977600098, 0.007903232097625732, 0.007948224067687987, 0.008062975883483887, 0.008062591552734375, 0.008161664009094238, 0.008386560440063476, 0.00824345588684082, 0.008889696121215821, 0.008348287582397461, 0.00851747226715088, 0.00836729621887207, 0.008264512062072753, 0.008118207931518556, 0.008071167945861817, 0.007955711841583252, 0.00794700813293457, 0.007899136066436767, 0.007887072086334228, 0.007929183959960938, 0.007870912075042725, 0.007870592117309571, 0.007869408130645753, 0.007978015899658204, 0.007929728031158448, 0.007913343906402588, 0.007909503936767578, 0.007957568168640137, 0.007914688110351563, 0.008056544303894042, 0.008189791679382325, 0.007948480129241943, 0.007940288066864015, 0.007981184005737305, 0.007999231815338135, 0.007947264194488525, 0.00809670352935791, 0.007980703830718994, 0.007929599761962891, 0.008001343727111816, 0.008155936241149903, 0.007931903839111328, 0.00791103982925415, 0.007897664070129394, 0.007912384033203126, 0.007932799816131592, 0.007921823978424072, 0.007878496170043945, 0.007898911952972413, 0.007989471912384033, 0.007897088050842285, 0.007886847972869874, 0.007823391914367676, 0.008120351791381836, 0.008195615768432618, 0.007948768138885499, 0.007927519798278809, 0.007894847869873047, 0.00790169620513916, 0.007901152133941651, 0.007876895904541015, 0.007928639888763427, 0.007860799789428712, 0.007860576152801514, 0.00783564805984497, 0.007864223957061768, 0.008003680229187012, 0.007925631999969483, 0.007915647983551025, 0.007890944004058837, 0.007866623878479004, 0.007937888145446777, 0.007913375854492187, 0.007959839820861816, 0.007949088096618652, 0.0078788480758667, 0.00791919994354248, 0.007927872180938721, 0.007945824146270751, 0.007954880237579346, 0.008086848258972167, 0.008106240272521973, 0.00796281623840332, 0.007944416046142579, 0.00792521619796753, 0.007959167957305908, 0.007978271961212159, 0.007940832138061524, 0.007897088050842285, 0.007835519790649415, 0.007917632102966308, 0.007882304191589355, 0.007920415878295898, 0.007896351814270019, 0.008042016029357911, 0.007928736209869384, 0.00786630392074585, 0.007919680118560792, 0.007894271850585938, 0.007946688175201416, 0.007917247772216796, 0.008007328033447265, 0.007953375816345214, 0.007927968025207519, 0.007910240173339843, 0.008176063537597656, 0.008163904190063477, 0.008398847579956055, 0.00808681583404541, 0.007967455863952637, 0.007945216178894043, 0.007908127784729003, 0.007921760082244872, 0.007917151927947999, 0.007926400184631349]",tokens/s,125.53594918468467,, float16-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,809.8816,763.232256,0.0,360.710144,345.493504,s,1,9.0937734375,9.0937734375,0.0,9.0937734375,9.0937734375,9.0937734375,9.0937734375,[9.0937734375],,kWh,3.080807275000552e-06,3.326905821665727e-07,1.0177785919997587e-06,4.431276449166883e-06,,MB,1265.983488,777.91232,0.0,362.807296,344.082944,s,22,0.19736921691894532,0.008971328041770242,0.00014806457392277755,0.008970719814300537,0.00918852138519287,0.009217401885986328,0.009327265615463258,"[0.009066495895385742, 0.008804160118103027, 0.008830656051635742, 0.00889408016204834, 0.00877900791168213, 0.009202079772949219, 0.00898083209991455, 0.009050368309020996, 0.008961503982543945, 0.00897993564605713, 0.00882908821105957, 0.008859040260314942, 0.009218208312988281, 0.00901411247253418, 0.00899078369140625, 0.009060288429260254, 0.009043040275573731, 0.00888700771331787, 0.008839808464050292, 0.009356255531311036, 0.008953248023986816, 0.00876921558380127]",tokens/s,28535.35160101954,kWh,2.557923643169887e-07,2.819674318505453e-08,1.6702533307530625e-07,4.5101444057734944e-07,tokens/kWh,567609320.1634322,MB,1312.346112,803.078144,0.0,387.97312,344.085504,s,22,10.187956878662108,0.46308894903009584,0.007142776803522997,0.4622943420410156,0.4738036651611328,0.47440325469970707,0.4755786694335937,"[0.46771792602539064, 0.45257177734375, 0.4543381958007813, 0.45043484497070313, 0.45230789184570314, 0.4614763488769531, 0.4719462890625, 0.46100653076171877, 0.4616919250488281, 0.46002606201171875, 0.4599254455566406, 0.4630877685546875, 0.4758856201171875, 0.46697140502929685, 0.4744239501953125, 0.4663924560546875, 0.4698499755859375, 0.4643876037597656, 0.47401004028320315, 0.4628967590332031, 0.45826971435546876, 0.4583383483886719]",tokens/s,136.0429786371466,kWh,1.3048987189281563e-05,1.4390907962896567e-06,5.710933914742879e-06,2.01990119003141e-05,tokens/kWh,3118964.447910461,,s,1386,10.176283417701736,0.007342195828067621,0.0002092006692209259,0.007301376104354858,0.007582000017166138,0.007704439878463745,0.008015152120590213,"[0.00725267219543457, 0.007657440185546875, 0.007507423877716065, 0.007467328071594238, 0.007401823997497559, 0.007552896022796631, 0.007419648170471191, 0.007696000099182129, 0.007534751892089844, 0.0076139202117919925, 0.0077916159629821775, 0.00790118408203125, 0.007712768077850342, 0.007587903976440429, 0.007792448043823242, 0.007555263996124268, 0.0076819839477539065, 0.007548927783966064, 0.007473152160644531, 0.007536640167236328, 0.00762063980102539, 0.007412928104400635, 0.007629600048065186, 0.007675903797149658, 0.007724800109863281, 0.007682303905487061, 0.007687327861785889, 0.0075456957817077635, 0.00801193618774414, 0.007651167869567871, 0.0075697598457336425, 0.007460447788238525, 0.007462975978851319, 0.007561215877532959, 0.007253759860992431, 0.007296639919281006, 0.007287424087524414, 0.007218751907348633, 0.007226016044616699, 0.00714470386505127, 0.0071001920700073246, 0.007317567825317383, 0.007158463954925537, 0.0070899839401245114, 0.007151455879211426, 0.007047520160675049, 0.007124735832214356, 0.007184415817260742, 0.0070763201713562015, 0.0072007360458374026, 0.0071060161590576175, 0.007184512138366699, 0.007227583885192871, 0.007268352031707764, 0.007220736026763916, 0.0072681279182434085, 0.007506783962249756, 0.007196544170379638, 0.007251967906951904, 0.007189856052398681, 0.007112415790557861, 0.007246784210205078, 0.0071764798164367675, 0.0069465599060058595, 0.007251391887664795, 0.007422495841979981, 0.0075467519760131834, 0.0074490242004394535, 0.007304192066192627, 0.007294976234436036, 0.007138271808624267, 0.00714515209197998, 0.007228831768035889, 0.007187392234802246, 0.0071695041656494145, 0.007180799961090088, 0.007104512214660645, 0.007104640007019043, 0.007206624031066895, 0.007135168075561523, 0.007100287914276123, 0.007108448028564453, 0.007164063930511474, 0.007315968036651611, 0.007143263816833496, 0.007122943878173828, 0.0071188478469848635, 0.007126175880432129, 0.0071913919448852535, 0.007218400001525879, 0.007176991939544678, 0.007141215801239014, 0.007123104095458984, 0.007153855800628662, 0.007202655792236328, 0.007132256031036377, 0.007168896198272705, 0.007085855960845947, 0.007137504100799561, 0.007095680236816406, 0.007104608058929443, 0.007105216026306153, 0.007293951988220215, 0.0072548799514770505, 0.007171135902404785, 0.00713811206817627, 0.007098495960235596, 0.007112703800201416, 0.0071181759834289555, 0.007121056079864502, 0.007159647941589356, 0.0071727681159973146, 0.007100287914276123, 0.00721727991104126, 0.00721504020690918, 0.007133120059967041, 0.007179840087890625, 0.007141183853149414, 0.0071955199241638184, 0.0072111678123474125, 0.007124159812927246, 0.007366432189941406, 0.007084799766540527, 0.007155680179595948, 0.007195903778076172, 0.007045983791351318, 0.006805344104766846, 0.007078080177307129, 0.0072173762321472166, 0.007287199974060059, 0.007447679996490478, 0.007356895923614502, 0.007798943996429443, 0.007248127937316894, 0.007256063938140869, 0.007317279815673828, 0.0072276158332824705, 0.007161215782165527, 0.0071083841323852535, 0.007125120162963867, 0.007244095802307129, 0.007117216110229492, 0.0075345921516418455, 0.007206912040710449, 0.00725545597076416, 0.0071378879547119144, 0.007157343864440918, 0.007094624042510986, 0.007063648223876953, 0.007288159847259522, 0.007193215847015381, 0.007140448093414306, 0.007114751815795899, 0.007144351959228515, 0.0071346559524536135, 0.007193120002746582, 0.007173823833465576, 0.007108960151672363, 0.007278656005859375, 0.007344255924224854, 0.00728659200668335, 0.007136864185333252, 0.007114367961883545, 0.0071114559173583985, 0.007262207984924316, 0.007080128192901611, 0.007169600009918213, 0.0070763201713562015, 0.007159584045410157, 0.007189856052398681, 0.007151455879211426, 0.007070432186126709, 0.00721340799331665, 0.007200767993927002, 0.007229184150695801, 0.00714246416091919, 0.007092160224914551, 0.007106783866882324, 0.007215904235839844, 0.007113791942596435, 0.007107776165008545, 0.007134528160095215, 0.007059904098510742, 0.007091360092163086, 0.007215775966644287, 0.007211487770080566, 0.008033375740051269, 0.007185184001922608, 0.007267776012420654, 0.0068477439880371095, 0.007182623863220215, 0.0072740478515625, 0.007230368137359619, 0.007145472049713135, 0.007120895862579346, 0.0072624959945678715, 0.0071287999153137206, 0.007125120162963867, 0.007144768238067627, 0.007144000053405762, 0.007134687900543213, 0.007148064136505127, 0.007792255878448486, 0.007161248207092285, 0.007128032207489013, 0.007151487827301025, 0.007108736038208008, 0.007093920230865479, 0.007053664207458496, 0.007081024169921875, 0.007178463935852051, 0.007119584083557129, 0.007120128154754639, 0.007221951961517334, 0.0071396160125732425, 0.0070874238014221195, 0.007225823879241943, 0.007051263809204102, 0.007116864204406739, 0.007178431987762452, 0.0071159682273864745, 0.007117248058319091, 0.007030464172363281, 0.007043519973754883, 0.007148575782775879, 0.007098495960235596, 0.0071238079071044924, 0.0070730881690979006, 0.007086783885955811, 0.007211008071899414, 0.007143424034118652, 0.007047167778015137, 0.007071648120880127, 0.0072623038291931155, 0.007081215858459473, 0.007035647869110108, 0.007065408229827881, 0.007143104076385498, 0.007053728103637695, 0.007091584205627441, 0.0070334081649780274, 0.007202400207519531, 0.007101215839385986, 0.007207871913909912, 0.00717632007598877, 0.007133664131164551, 0.007102975845336914, 0.007313151836395263, 0.0073768959045410155, 0.007161119937896729, 0.007070015907287597, 0.007133279800415039, 0.006961440086364746, 0.007171135902404785, 0.007154655933380127, 0.007196288108825683, 0.007094624042510986, 0.007120863914489746, 0.007147552013397217, 0.007157023906707764, 0.0072189121246337895, 0.007080031871795654, 0.007227359771728515, 0.007166912078857422, 0.007115808010101319, 0.0070850238800048825, 0.007108607769012451, 0.007221248149871826, 0.007230559825897217, 0.0070986242294311525, 0.0071727681159973146, 0.00713318395614624, 0.007123263835906982, 0.007267712116241455, 0.0072113280296325685, 0.007099647998809814, 0.007149695873260498, 0.007094912052154541, 0.00719046401977539, 0.0072622718811035155, 0.007135231971740722, 0.00723529577255249, 0.00728707218170166, 0.0071907520294189455, 0.007143455982208252, 0.007105535984039306, 0.007131904125213623, 0.007157248020172119, 0.007162047863006592, 0.007033152103424072, 0.0071179838180541995, 0.007129983901977539, 0.007118336200714111, 0.007217440128326416, 0.00710265588760376, 0.0071363520622253415, 0.007457695960998535, 0.00739958381652832, 0.007260223865509033, 0.00716156816482544, 0.007245888233184814, 0.007548128128051758, 0.0074514241218566896, 0.007178239822387696, 0.0071188478469848635, 0.007110527992248535, 0.00717571210861206, 0.007116672039031983, 0.007097119808197021, 0.007113823890686035, 0.007152448177337647, 0.007173471927642823, 0.007170527935028076, 0.0070878081321716305, 0.007114560127258301, 0.009299967765808105, 0.007772223949432373, 0.008242112159729004, 0.00727347183227539, 0.007237631797790528, 0.007114751815795899, 0.007097472190856933, 0.007197567939758301, 0.0071393918991088864, 0.007112832069396973, 0.0070730881690979006, 0.007134751796722412, 0.00709935998916626, 0.007142848014831543, 0.007090752124786377, 0.007194623947143554, 0.007151135921478272, 0.007151423931121826, 0.0073322558403015135, 0.007169407844543457, 0.007125120162963867, 0.007156511783599853, 0.007265632152557373, 0.007152256011962891, 0.007739424228668213, 0.007270368099212646, 0.007237728118896484, 0.007242784023284912, 0.007284927845001221, 0.007240384101867676, 0.007220384120941162, 0.007164224147796631, 0.007203360080718994, 0.007207968235015869, 0.007192895889282226, 0.007297760009765625, 0.007222976207733154, 0.007454976081848145, 0.007338079929351806, 0.0072988481521606445, 0.007278719902038574, 0.0073175039291381834, 0.007294943809509277, 0.0073434882164001465, 0.0073181757926940915, 0.007481344223022461, 0.007349247932434082, 0.007322751998901368, 0.007496895790100097, 0.007742144107818603, 0.0075419840812683105, 0.007535071849822998, 0.007510335922241211, 0.007223135948181152, 0.007221407890319824, 0.007161280155181885, 0.007171807765960693, 0.007220032215118408, 0.007262239933013916, 0.007206912040710449, 0.00716377592086792, 0.00726800012588501, 0.007188511848449707, 0.006887648105621338, 0.007389056205749512, 0.007187935829162597, 0.007219903945922852, 0.007333280086517334, 0.007299488067626953, 0.0072390718460083004, 0.007232063770294189, 0.007256224155426026, 0.00732966423034668, 0.007351359844207764, 0.007340991973876953, 0.0073864002227783205, 0.007459551811218262, 0.007647232055664062, 0.007521728038787842, 0.007412288188934326, 0.007323647975921631, 0.007417247772216797, 0.007391744136810303, 0.0073300800323486326, 0.007354047775268555, 0.007307392120361328, 0.007303167819976806, 0.007329792022705078, 0.007518208026885987, 0.008095744132995606, 0.007532576084136963, 0.007442399978637695, 0.007677951812744141, 0.007372799873352051, 0.007448575973510742, 0.0073640961647033695, 0.007520768165588379, 0.007540736198425293, 0.007531775951385498, 0.007635712146759033, 0.007712831974029541, 0.007747360229492187, 0.007868576049804687, 0.007874271869659423, 0.007722623825073242, 0.007947264194488525, 0.007828320026397705, 0.007678304195404053, 0.0075207362174987795, 0.007585792064666748, 0.007475200176239013, 0.007517248153686523, 0.007537759780883789, 0.007481184005737305, 0.007458816051483155, 0.007485087871551514, 0.0074039678573608395, 0.0074505281448364255, 0.007626976013183594, 0.007489151954650879, 0.007665823936462402, 0.007585087776184082, 0.007416512012481689, 0.007456575870513916, 0.007448768138885498, 0.007489247798919678, 0.007000063896179199, 0.007411327838897705, 0.007342463970184327, 0.007346271991729736, 0.007271327972412109, 0.007267327785491944, 0.007365664005279541, 0.007477888107299805, 0.007444831848144531, 0.007485439777374267, 0.007497727870941162, 0.007544832229614258, 0.007444479942321777, 0.0073723201751708985, 0.007253983974456787, 0.0073077759742736816, 0.007322751998901368, 0.007227647781372071, 0.007460832118988037, 0.007539552211761474, 0.00722054386138916, 0.007335552215576172, 0.007260287761688232, 0.007374623775482178, 0.007244991779327393, 0.0072046399116516115, 0.007220928192138672, 0.007270559787750244, 0.007274655818939209, 0.0072540159225463864, 0.007337279796600342, 0.007283391952514648, 0.007233535766601563, 0.0072633280754089355, 0.007424928188323974, 0.007218751907348633, 0.0071760001182556155, 0.007242144107818604, 0.007240032196044922, 0.007259520053863525, 0.007172607898712159, 0.007198336124420166, 0.0073837761878967284, 0.0072269439697265625, 0.007338272094726562, 0.007267360210418701, 0.007217951774597168, 0.007176191806793213, 0.007231135845184326, 0.007247456073760986, 0.007250815868377686, 0.007247744083404541, 0.007167744159698486, 0.007218624114990234, 0.007342495918273926, 0.007268928050994873, 0.00723356819152832, 0.00805900764465332, 0.007327424049377441, 0.007249983787536621, 0.007324960231781006, 0.007318143844604492, 0.007313439846038819, 0.006945759773254395, 0.007352255821228028, 0.007540736198425293, 0.007460864067077637, 0.007393280029296875, 0.007350272178649903, 0.007301119804382325, 0.007323647975921631, 0.007391007900238037, 0.007237855911254883, 0.00725548791885376, 0.007290783882141113, 0.007246496200561524, 0.007301119804382325, 0.007299071788787842, 0.007456768035888672, 0.007755583763122559, 0.007528639793395996, 0.007300320148468017, 0.007356544017791748, 0.007324319839477539, 0.007292928218841553, 0.0072724480628967286, 0.007310688018798828, 0.007412384033203125, 0.007303167819976806, 0.0072847681045532225, 0.007208735942840576, 0.007227647781372071, 0.007256095886230469, 0.007268256187438965, 0.007321599960327148, 0.007409728050231933, 0.0073294081687927245, 0.0073283519744873045, 0.007306975841522217, 0.007323904037475586, 0.007323391914367676, 0.007273727893829345, 0.007351039886474609, 0.007294943809509277, 0.007232863903045655, 0.0073062081336975095, 0.007315167903900147, 0.007294976234436036, 0.007207007884979248, 0.007422111988067627, 0.007206431865692139, 0.007287040233612061, 0.007331808090209961, 0.007292640209197998, 0.0072706880569458005, 0.007333375930786133, 0.007320064067840576, 0.007321599960327148, 0.007392320156097412, 0.00733894395828247, 0.007352320194244385, 0.00732150411605835, 0.007299168109893799, 0.007391232013702393, 0.007248928070068359, 0.007159967899322509, 0.007067903995513916, 0.007225344181060791, 0.0073497920036315915, 0.0073937602043151855, 0.007323647975921631, 0.007343776226043701, 0.007409023761749268, 0.007218239784240722, 0.007321631908416748, 0.007363455772399902, 0.007224607944488526, 0.007313087940216064, 0.007301184177398681, 0.007227519989013672, 0.007306335926055908, 0.007487872123718262, 0.007246208190917969, 0.007286880016326904, 0.007268383979797364, 0.007165823936462402, 0.007268352031707764, 0.007263232231140137, 0.007229440212249756, 0.007238656044006348, 0.007186431884765625, 0.007188223838806153, 0.007290815830230713, 0.007325439929962158, 0.007223872184753418, 0.007209375858306884, 0.007243360042572021, 0.007426047801971435, 0.007274496078491211, 0.007184383869171143, 0.007251520156860352, 0.007221792221069336, 0.0072293438911437986, 0.007325695991516113, 0.007324927806854248, 0.007399328231811523, 0.0074635839462280276, 0.007329567909240723, 0.0072740797996521, 0.007347008228302002, 0.007368383884429931, 0.007505983829498291, 0.00744268798828125, 0.007373888015747071, 0.007320511817932129, 0.0074505281448364255, 0.007347455978393555, 0.00722208023071289, 0.007267615795135498, 0.007237472057342529, 0.007250847816467285, 0.007292928218841553, 0.00723967981338501, 0.007210239887237548, 0.007231391906738281, 0.007344992160797119, 0.007265823841094971, 0.007303328037261963, 0.007280896186828614, 0.007008255958557129, 0.007262527942657471, 0.00719046401977539, 0.007272192001342773, 0.007245088100433349, 0.0072568001747131345, 0.0072971200942993164, 0.0071981120109558105, 0.0071910400390625, 0.007403391838073731, 0.007419199943542481, 0.0073751997947692875, 0.007544608116149903, 0.007333600044250489, 0.007231647968292236, 0.007330336093902588, 0.007396927833557129, 0.007275263786315918, 0.007352223873138428, 0.007211071968078613, 0.007283967971801758, 0.0073837761878967284, 0.0072490558624267575, 0.007304255962371826, 0.007292736053466797, 0.007314911842346191, 0.007264768123626709, 0.007303199768066406, 0.007282048225402832, 0.007321792125701904, 0.007282783985137939, 0.007178912162780762, 0.007300511837005615, 0.007280928134918213, 0.007273952007293701, 0.00732803201675415, 0.007381152153015136, 0.0072271361351013185, 0.007304800033569336, 0.00731001615524292, 0.007323391914367676, 0.007557439804077149, 0.007258111953735351, 0.007262207984924316, 0.007283967971801758, 0.007244192123413086, 0.007287136077880859, 0.007392831802368164, 0.0071905279159545895, 0.007292640209197998, 0.0072486081123352055, 0.007284736156463623, 0.007332096099853516, 0.007272192001342773, 0.0071988158226013186, 0.007281727790832519, 0.007264575958251953, 0.007244319915771485, 0.007413760185241699, 0.007272384166717529, 0.007332064151763916, 0.007292768001556396, 0.007222847938537598, 0.00707583999633789, 0.007390848159790039, 0.007364480018615722, 0.007344639778137207, 0.007280640125274658, 0.007241663932800293, 0.007260255813598633, 0.0072416958808898925, 0.007172095775604248, 0.0072540159225463864, 0.007243775844573975, 0.007206912040710449, 0.007245279788970947, 0.007180831909179688, 0.007229440212249756, 0.007251967906951904, 0.00722492790222168, 0.0072544322013854984, 0.007313119888305664, 0.0071764798164367675, 0.007472896099090577, 0.0075532798767089845, 0.007819168090820313, 0.00789308786392212, 0.007401472091674805, 0.007427231788635254, 0.00725267219543457, 0.007340127944946289, 0.007247935771942138, 0.007222400188446045, 0.007314559936523438, 0.0072271361351013185, 0.0072540159225463864, 0.00724073600769043, 0.0073935680389404295, 0.007324351787567139, 0.007288832187652588, 0.007179679870605468, 0.007318111896514892, 0.007430272102355957, 0.007853792190551758, 0.007300704002380371, 0.007303616046905517, 0.0073340158462524415, 0.007305056095123291, 0.007263584136962891, 0.007225599765777588, 0.007356991767883301, 0.007363808155059814, 0.0072507839202880855, 0.007293087959289551, 0.007513728141784668, 0.0073946242332458495, 0.007309375762939453, 0.007297855854034424, 0.007322976112365723, 0.007500415802001953, 0.007312607765197754, 0.0073920321464538575, 0.007401472091674805, 0.007458047866821289, 0.0076193280220031735, 0.007573503971099854, 0.007208223819732666, 0.007520607948303223, 0.007502336025238037, 0.007319295883178711, 0.007262207984924316, 0.007258111953735351, 0.007401663780212403, 0.007415264129638672, 0.007465312004089356, 0.0074477438926696775, 0.007503935813903809, 0.007416575908660889, 0.007401535987854004, 0.007463967800140381, 0.0075, 0.007479743957519531, 0.007708960056304932, 0.0075706238746643065, 0.007506720066070557, 0.007430240154266358, 0.007497632026672364, 0.007559167861938477, 0.007602176189422607, 0.007638783931732178, 0.007678207874298096, 0.007634880065917969, 0.008971424102783203, 0.007672160148620605, 0.007752255916595459, 0.0076679039001464845, 0.007706431865692138, 0.007974912166595459, 0.007954432010650634, 0.007676959991455078, 0.007766272068023682, 0.009019840240478515, 0.007661087989807129, 0.007492544174194336, 0.007446335792541504, 0.00759935998916626, 0.007471871852874756, 0.007337344169616699, 0.007311999797821045, 0.007331039905548096, 0.007277344226837158, 0.007255424022674561, 0.00725055980682373, 0.007453951835632324, 0.007381919860839844, 0.007300640106201172, 0.007264575958251953, 0.0073604159355163574, 0.007442527770996093, 0.007485439777374267, 0.007431583881378174, 0.0074980478286743165, 0.007375135898590088, 0.007398399829864502, 0.007436607837677002, 0.007336063861846924, 0.007456768035888672, 0.007915584087371827, 0.0077783999443054195, 0.007016128063201905, 0.0073968000411987304, 0.007361408233642578, 0.007397024154663086, 0.007381504058837891, 0.007280735969543457, 0.0073029761314392086, 0.007425983905792236, 0.007302944183349609, 0.007383264064788818, 0.007342080116271973, 0.007229504108428955, 0.00729699182510376, 0.00756499195098877, 0.007387296199798584, 0.007597856044769287, 0.0074336638450622555, 0.0072897601127624514, 0.007426047801971435, 0.007300352096557617, 0.0072360320091247555, 0.007352640151977539, 0.007325695991516113, 0.007260159969329834, 0.007339392185211182, 0.007322239875793457, 0.007413760185241699, 0.007411712169647216, 0.007438560009002685, 0.007375967979431152, 0.007383200168609619, 0.007405792236328125, 0.0072780799865722655, 0.007342912197113037, 0.007384416103363037, 0.007229279994964599, 0.007316287994384766, 0.007388800144195557, 0.007522687911987304, 0.007485439777374267, 0.0074670081138610836, 0.0073944320678710936, 0.007410560131072998, 0.0073396477699279785, 0.007240064144134521, 0.0074651517868042, 0.007372608184814453, 0.0073088321685791015, 0.007350751876831055, 0.0074691839218139645, 0.0074423041343688965, 0.007573503971099854, 0.007723008155822754, 0.0074878082275390626, 0.007685823917388916, 0.007710720062255859, 0.007628159999847412, 0.0075640959739685055, 0.007509856224060058, 0.007403647899627686, 0.007446368217468261, 0.0074561920166015625, 0.007592031955718994, 0.007128352165222168, 0.0075335359573364255, 0.007507775783538818, 0.007554240226745605, 0.007582240104675293, 0.007463391780853272, 0.007493631839752197, 0.007536096096038818, 0.007480000019073486, 0.007733183860778809, 0.007661664009094239, 0.0074627199172973635, 0.007536640167236328, 0.0075673599243164065, 0.00739353609085083, 0.007423744201660156, 0.007444479942321777, 0.007282783985137939, 0.007319615840911865, 0.007513951778411865, 0.007319744110107422, 0.007372704029083252, 0.007374112129211426, 0.0072997121810913085, 0.007417984008789062, 0.007432064056396484, 0.0073438401222229, 0.00750928020477295, 0.007713791847229004, 0.007435743808746338, 0.007473631858825684, 0.007550911903381348, 0.0074468798637390135, 0.007529407978057861, 0.008129504203796387, 0.007828896045684814, 0.008658592224121093, 0.0077955198287963864, 0.008214688301086426, 0.008052576065063476, 0.007617919921875, 0.007518847942352295, 0.007487135887145996, 0.007502240180969238, 0.00765667200088501, 0.007426784038543701, 0.007335328102111817, 0.007426655769348144, 0.007407135963439941, 0.0073991999626159665, 0.007436992168426514, 0.0073432960510253905, 0.007502016067504883, 0.0075491518974304195, 0.0073545918464660645, 0.007388415813446045, 0.007375840187072754, 0.007378911972045899, 0.007441792011260986, 0.007531231880187989, 0.007448480129241943, 0.007376992225646973, 0.007464128017425537, 0.00721068811416626, 0.0075138239860534665, 0.0077890238761901855, 0.007802976131439209, 0.0076984639167785645, 0.0076409921646118165, 0.007642816066741944, 0.007541152000427246, 0.00752403211593628, 0.007395648002624512, 0.007436351776123047, 0.007424960136413574, 0.0077448000907897946, 0.007968063831329347, 0.007455488204956054, 0.007376512050628662, 0.007483424186706543, 0.007385503768920898, 0.007255167961120606, 0.007293407917022705, 0.007327616214752197, 0.007577055931091308, 0.007236480236053467, 0.007345952033996582, 0.007266304016113281, 0.007286335945129395, 0.007301568031311035, 0.00729702377319336, 0.0073638720512390135, 0.007262944221496582, 0.00723967981338501, 0.007368703842163086, 0.007585887908935547, 0.007423967838287354, 0.007331552028656006, 0.007347648143768311, 0.007254559993743897, 0.007344543933868408, 0.007354207992553711, 0.007265791893005371, 0.0073077759742736816, 0.007325471878051758, 0.0072911038398742675, 0.007281824111938477, 0.007297887802124024, 0.007235583782196045, 0.0072902398109436035, 0.007362400054931641, 0.0073220481872558595, 0.007303711891174317, 0.0073029761314392086, 0.007231679916381836, 0.007313216209411621, 0.007252255916595459, 0.007280352115631103, 0.007394976139068603, 0.007352223873138428, 0.0073034238815307614, 0.007295167922973632, 0.007325695991516113, 0.007311423778533935, 0.007460671901702881, 0.007637119770050048, 0.0072540159225463864, 0.007509024143218994, 0.007330368041992188, 0.007368832111358642, 0.007323008060455322, 0.007334271907806397, 0.0073200960159301755, 0.007329792022705078, 0.007258111953735351, 0.0073400321006774905, 0.0073136000633239745, 0.007216959953308105, 0.007366655826568603, 0.007575551986694336, 0.007360767841339111, 0.007368095874786377, 0.0074237117767333985, 0.007235424041748047, 0.007398176193237305, 0.007306591987609863, 0.007262879848480224, 0.007374847888946533, 0.007353759765625, 0.007383647918701172, 0.00742195177078247, 0.007387135982513428, 0.007514111995697022, 0.007456768035888672, 0.0074354238510131836, 0.007454912185668946, 0.007629471778869629, 0.007533823966979981, 0.007430912017822266, 0.0075075201988220215, 0.0075166082382202145, 0.007356416225433349, 0.007381184101104736, 0.007579455852508545, 0.007545023918151855, 0.007685952186584473, 0.0075504322052001955, 0.007454400062561035, 0.007463712215423584, 0.0074670081138610836, 0.007420095920562744, 0.007548160076141357, 0.0073406720161437984, 0.007365632057189942, 0.007397632122039795, 0.007426752090454102, 0.007518271923065186, 0.007794688224792481, 0.007673791885375977, 0.007768095970153809, 0.007664703845977784, 0.0077465281486511234, 0.007794688224792481, 0.007573311805725098, 0.007526591777801513, 0.007417664051055908, 0.007506112098693848, 0.007343776226043701, 0.007398848056793213, 0.0069807682037353515, 0.007290080070495606, 0.0072837119102478025, 0.007301055908203125, 0.007376832008361816, 0.0072549118995666505, 0.007325024127960205, 0.007311520099639893, 0.007230271816253662, 0.008398015975952149, 0.008816927909851073, 0.007380224227905273, 0.007332064151763916, 0.007327775955200195, 0.0072854399681091305, 0.007370751857757568, 0.007405504226684571, 0.007343776226043701, 0.007299488067626953, 0.007325056076049804, 0.007511775970458984, 0.007422880172729492, 0.007346176147460938, 0.007333568096160889, 0.007307583808898926, 0.00732147216796875, 0.007284095764160156, 0.007586080074310303, 0.007315936088562011, 0.007264256000518799, 0.007346176147460938, 0.007463967800140381, 0.007205855846405029, 0.007237599849700928, 0.00721235179901123, 0.007177184104919434, 0.007196415901184082, 0.007237631797790528, 0.0072540159225463864, 0.0072540159225463864, 0.007507711887359619, 0.007299071788787842, 0.007208640098571777, 0.007244383811950684, 0.0072457919120788574, 0.007360511779785156, 0.0072540159225463864, 0.007356416225433349, 0.00753657579421997, 0.007551392078399658, 0.007591648101806641, 0.007409023761749268, 0.0072520642280578616, 0.007258592128753662, 0.007266560077667236, 0.007313151836395263, 0.00732755184173584, 0.007307007789611817, 0.007379392147064209, 0.007530176162719727, 0.007297344207763672, 0.00722876787185669, 0.00724560022354126, 0.006895679950714112, 0.007477248191833496, 0.007284512042999267, 0.007276415824890137, 0.0076332478523254395, 0.007233215808868408, 0.007329728126525879, 0.007225728034973145, 0.007260255813598633, 0.007233312129974365, 0.007264095783233643, 0.007459104061126709, 0.00733132791519165, 0.0074338560104370115, 0.007240575790405273, 0.0072286720275878905, 0.007205056190490723, 0.007469632148742676, 0.00733135986328125, 0.007315936088562011, 0.007376863956451416, 0.007428127765655518, 0.007286784172058106, 0.007429152011871338, 0.007581759929656982, 0.00756828784942627, 0.007563136100769043, 0.0075994558334350586, 0.007516928195953369, 0.007454751968383789, 0.007464352130889892, 0.007408415794372559, 0.0073807997703552244, 0.0073391680717468264, 0.007671743869781494, 0.007598048210144043, 0.007764095783233642, 0.007788735866546631, 0.007442880153656006, 0.007499680042266845, 0.007657760143280029, 0.007662784099578857, 0.0075907201766967775, 0.007591360092163086, 0.007795263767242431, 0.008085280418395997, 0.007733727931976318, 0.007673024177551269, 0.007584320068359375, 0.007526495933532715, 0.007652480125427246, 0.0077105917930603025, 0.007495967864990234, 0.007627647876739502, 0.007837728023529052, 0.007828192234039306, 0.007867392063140868, 0.0077601919174194334, 0.007667391777038574, 0.007626656055450439, 0.0076904001235961915, 0.007728288173675537, 0.007711328029632568, 0.007708672046661377, 0.007680384159088135, 0.007684031963348389, 0.007593696117401123, 0.007525951862335205, 0.007535136222839355, 0.007477312088012695, 0.007462143898010254, 0.007467967987060547, 0.007370751857757568, 0.007401472091674805, 0.0073175039291381834, 0.007292928218841553, 0.007235583782196045, 0.007510015964508057, 0.0073497920036315915, 0.007286623954772949, 0.007254655838012695, 0.007212800025939941, 0.007209504127502441, 0.007240575790405273, 0.00725055980682373, 0.007277056217193604, 0.007245535850524902, 0.007186431884765625, 0.00721727991104126, 0.00722441577911377, 0.00738918399810791, 0.007432415962219239, 0.0073357439041137696, 0.007319808006286621, 0.007299615859985352, 0.007671679973602295, 0.007313119888305664, 0.007493728160858154, 0.007323200225830078, 0.007345920085906983, 0.0073062081336975095, 0.007307456016540527, 0.0072325439453125, 0.007244416236877441, 0.0074791679382324215, 0.00726796817779541, 0.007196640014648438, 0.007226399898529053, 0.0072434239387512206, 0.007243008136749268, 0.00726035213470459, 0.007303743839263916, 0.007316864013671875, 0.007273087978363037, 0.007216671943664551, 0.007215583801269531, 0.0071844801902771, 0.00748092794418335, 0.007243231773376465, 0.007234079837799072, 0.007190847873687744, 0.007188511848449707, 0.007506976127624512, 0.007273439884185791, 0.007309343814849854, 0.0072969598770141605, 0.006955999851226807, 0.007212448120117188, 0.007238016128540039, 0.007211040019989014, 0.00723964786529541, 0.007214560031890869, 0.0072484159469604495, 0.0072509760856628415, 0.007187424182891845, 0.0072308797836303715, 0.00725875186920166, 0.007464831829071045, 0.007387231826782226, 0.007260159969329834, 0.007226399898529053, 0.007379615783691406, 0.0076332159042358395, 0.007513440132141113, 0.007459296226501465, 0.007360608100891113, 0.007251935958862305, 0.007215231895446777, 0.007346176147460938, 0.007266016006469727, 0.007204192161560059, 0.007192671775817871, 0.00720579195022583, 0.007253824234008789, 0.007303296089172363, 0.007237631797790528, 0.007234591960906983, 0.007244800090789795, 0.007366623878479004, 0.007234687805175781, 0.007240575790405273, 0.007225344181060791, 0.007219200134277344, 0.007200384140014648, 0.00725439977645874, 0.007243775844573975, 0.007280640125274658, 0.007201791763305664, 0.007355040073394776, 0.007410016059875488, 0.007264256000518799, 0.007220704078674317, 0.007209504127502441, 0.007184383869171143, 0.007372799873352051, 0.007242015838623047, 0.007397247791290283, 0.007393119812011718, 0.007243904113769531, 0.007138336181640625, 0.007150271892547608, 0.007184544086456299, 0.007195839881896973, 0.0071790719032287596, 0.007253312110900879, 0.007238048076629639, 0.007393055915832519, 0.007197184085845947, 0.007196256160736084, 0.00692249584197998, 0.007130559921264649, 0.007236159801483154, 0.0072674241065979, 0.007295904159545899, 0.007251967906951904, 0.007204512119293213, 0.007189856052398681, 0.007322527885437012, 0.007457888126373291, 0.0073985280990600585, 0.007369823932647705, 0.007322368144989014, 0.007276735782623291, 0.007257919788360595, 0.007352352142333984, 0.007442431926727295, 0.007356448173522949, 0.0074997439384460445, 0.00735964822769165, 0.00737286376953125, 0.007379551887512207, 0.007686336040496827, 0.007523392200469971, 0.007425119876861572, 0.007308320045471191, 0.007232319831848144, 0.007241727828979493, 0.007206079959869385, 0.007202879905700683, 0.007187295913696289, 0.007152607917785644, 0.007193535804748535, 0.007198719978332519, 0.00718342399597168, 0.007189055919647217, 0.007344384193420411, 0.007250048160552978, 0.007179488182067871, 0.007223711967468262, 0.007297215938568115, 0.007198912143707275, 0.007198719978332519, 0.007312992095947266, 0.007209375858306884, 0.007223296165466309, 0.007226816177368164, 0.007172671794891358, 0.007243584156036377, 0.007246016025543213, 0.0072005119323730465, 0.007212575912475586, 0.00715008020401001, 0.0073602237701416015, 0.007297535896301269, 0.007216544151306153, 0.007207520008087158, 0.007215136051177978, 0.007262176036834717, 0.0072111678123474125, 0.007192319869995118, 0.007215199947357177, 0.007148960113525391]",tokens/s,136.19903682999262,, float16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, bfloat16-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,bfloat16,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,809.648128,763.232256,0.0,360.710144,345.493504,s,1,8.685005859375,8.685005859375,0.0,8.685005859375,8.685005859375,8.685005859375,8.685005859375,[8.685005859375],,kWh,2.7809221124992214e-06,2.995342207839588e-07,9.233340720004278e-07,4.003790405283608e-06,,MB,1174.704128,775.815168,0.0,362.807296,344.082944,s,17,0.4032558078765869,0.023720929875093347,0.0004658086047884976,0.023610368728637695,0.02393660774230957,0.024274283981323242,0.025251016159057615,"[0.02549519920349121, 0.023625951766967773, 0.023540319442749022, 0.02353443145751953, 0.023682592391967773, 0.023610368728637695, 0.02342531204223633, 0.023617120742797853, 0.023514848709106445, 0.02396905517578125, 0.02360211181640625, 0.023644479751586914, 0.023511199951171874, 0.023436832427978515, 0.023659423828125, 0.02347158432006836, 0.023914976119995116]",tokens/s,10792.157025378525,kWh,8.092336025854164e-07,8.919051348769022e-08,5.352335768476458e-07,1.4336576929207523e-06,tokens/kWh,178564242.54136848,MB,1214.050304,800.980992,0.0,387.97312,344.085504,s,17,9.691316406250001,0.5700774356617648,0.002770446575745793,0.5700615234375,0.5738010620117188,0.5747426879882812,0.5748063989257812,"[0.5670534057617187, 0.5700615234375, 0.5693871459960937, 0.5720411987304688, 0.5721550903320313, 0.565869873046875, 0.56830029296875, 0.5675416259765625, 0.5727633056640625, 0.570430908203125, 0.5695615844726563, 0.5660501708984375, 0.5747227783203125, 0.5700859375, 0.5672826538085938, 0.5748223266601562, 0.5731865844726562]",tokens/s,110.5113026037726,kWh,1.611985195366458e-05,1.7777970234750955e-06,6.740512984485694e-06,2.463816196162537e-05,tokens/kWh,2557008.923722649,,s,1071,9.682578884124746,0.009040689901143562,0.00018576921177131952,0.008988896369934081,0.009203519821166992,0.009331104278564453,0.009906124973297117,"[0.00883801555633545, 0.00911740779876709, 0.009056032180786133, 0.009021151542663574, 0.009085087776184083, 0.008987104415893555, 0.008966303825378419, 0.0090250244140625, 0.009027071952819824, 0.009021599769592286, 0.008938048362731934, 0.008947711944580078, 0.008969311714172363, 0.008974687576293945, 0.0089301118850708, 0.008980256080627441, 0.008936544418334962, 0.008968544006347657, 0.008898336410522461, 0.008913791656494141, 0.008919136047363281, 0.009012319564819337, 0.008929023742675781, 0.008940704345703125, 0.00895564842224121, 0.009133536338806153, 0.008991616249084473, 0.008937151908874511, 0.00895584011077881, 0.008988736152648925, 0.008973407745361327, 0.008958175659179687, 0.008952256202697753, 0.008940032005310058, 0.00903551959991455, 0.009084927558898925, 0.009140224456787109, 0.009172543525695801, 0.009064736366271973, 0.008992927551269531, 0.00903286361694336, 0.009001343727111817, 0.008935584068298339, 0.008922847747802734, 0.008915552139282227, 0.00899891185760498, 0.00895206356048584, 0.008949503898620606, 0.00895139217376709, 0.009167263984680176, 0.009068863868713379, 0.008994688034057617, 0.00897766399383545, 0.008979007720947266, 0.009016480445861817, 0.008956768035888673, 0.00908463954925537, 0.009064703941345216, 0.009005087852478028, 0.008976384162902832, 0.008972127914428711, 0.008982751846313477, 0.008982399940490722, 0.008689663887023925, 0.009072192192077637, 0.009131999969482421, 0.009137951850891113, 0.009109439849853516, 0.009079135894775391, 0.009058591842651367, 0.009031423568725586, 0.009031935691833496, 0.009086912155151367, 0.00913167953491211, 0.009023167610168458, 0.009029984474182129, 0.008978943824768066, 0.009029631614685058, 0.009090880393981933, 0.009054400444030762, 0.009242624282836913, 0.00911359977722168, 0.00898579216003418, 0.009135040283203125, 0.009026944160461426, 0.009118207931518555, 0.009260064125061035, 0.009095616340637207, 0.009060895919799805, 0.00903551959991455, 0.008969535827636719, 0.009077695846557618, 0.009011296272277832, 0.009023391723632812, 0.008996864318847657, 0.00898691177368164, 0.009015007972717285, 0.008976160049438476, 0.008926912307739258, 0.008963968276977539, 0.008966815948486329, 0.008941663742065429, 0.009201087951660157, 0.009152928352355956, 0.009062463760375977, 0.009155903816223145, 0.009098976135253907, 0.009029727935791015, 0.008986880302429199, 0.008991583824157715, 0.008987872123718262, 0.009028160095214843, 0.00899068832397461, 0.00898256015777588, 0.008919039726257324, 0.008996864318847657, 0.009050111770629882, 0.00903987216949463, 0.009054368019104003, 0.009226304054260254, 0.009017120361328125, 0.009024991989135742, 0.009026080131530763, 0.008943615913391113, 0.008966143608093263, 0.008941023826599122, 0.008685888290405274, 0.008992768287658692, 0.00898252773284912, 0.008974080085754394, 0.008932671546936034, 0.009005279541015624, 0.008947423934936524, 0.008991840362548829, 0.008988832473754883, 0.00900438404083252, 0.00896451187133789, 0.008954943656921387, 0.008940256118774415, 0.008994624137878418, 0.008937567710876464, 0.009027968406677246, 0.008964032173156737, 0.008966143608093263, 0.00898252773284912, 0.008974495887756348, 0.008984448432922364, 0.008990143775939942, 0.008942367553710938, 0.00897219181060791, 0.009159520149230957, 0.009102208137512207, 0.009103679656982422, 0.009099072456359863, 0.009277440071105958, 0.009520511627197265, 0.009620096206665039, 0.009256959915161133, 0.009300224304199219, 0.00939139175415039, 0.00919001579284668, 0.009140416145324707, 0.009070176124572754, 0.009035807609558105, 0.009184576034545899, 0.009011936187744141, 0.008966015815734864, 0.008931391716003418, 0.008945919990539552, 0.008968000411987304, 0.009047519683837891, 0.008954655647277833, 0.008970335960388183, 0.008959712028503418, 0.008976320266723632, 0.008939552307128906, 0.009008895874023438, 0.008984736442565918, 0.008970303535461426, 0.00892518424987793, 0.008953696250915527, 0.008969535827636719, 0.008946528434753418, 0.009037823677062988, 0.008935263633728027, 0.00909712028503418, 0.008946080207824707, 0.008949055671691894, 0.008911392211914063, 0.008837056159973145, 0.008959168434143067, 0.008942208290100097, 0.009306495666503906, 0.009113471984863281, 0.009114720344543458, 0.009120415687561036, 0.009208064079284668, 0.009104991912841797, 0.009239168167114257, 0.009192543983459473, 0.00908086395263672, 0.009006815910339356, 0.009126848220825195, 0.009237631797790528, 0.009157312393188477, 0.009061792373657226, 0.008964896202087402, 0.009006719589233398, 0.008996864318847657, 0.00903548812866211, 0.009009087562561035, 0.00903001594543457, 0.00908944034576416, 0.009060192108154296, 0.009021535873413086, 0.009246720314025878, 0.009030688285827636, 0.009171903610229492, 0.009115903854370118, 0.009035679817199707, 0.00918511962890625, 0.009420831680297852, 0.009409791946411132, 0.009515583992004395, 0.009469696044921875, 0.00924499225616455, 0.009063648223876954, 0.008979680061340332, 0.009053024291992187, 0.008965312004089355, 0.008971936225891113, 0.00899071979522705, 0.008980480194091797, 0.008930399894714355, 0.00893126392364502, 0.008918335914611816, 0.009033568382263183, 0.00893727970123291, 0.008926688194274902, 0.009130528450012207, 0.009019392013549805, 0.0089303035736084, 0.008952832221984864, 0.008951871871948243, 0.008978367805480957, 0.008979583740234375, 0.008968223571777343, 0.008934240341186523, 0.00902143955230713, 0.009127296447753907, 0.008989312171936036, 0.008978431701660156, 0.00872447967529297, 0.00895747184753418, 0.008984352111816406, 0.008958368301391602, 0.008973983764648438, 0.008927871704101562, 0.008913920402526856, 0.008941951751708985, 0.00896399974822998, 0.009134367942810058, 0.008959775924682617, 0.008999679565429687, 0.009044896125793457, 0.009103839874267577, 0.009029312133789063, 0.009001503944396973, 0.008995424270629883, 0.008951647758483887, 0.009002880096435547, 0.00891318416595459, 0.00893836784362793, 0.009081567764282227, 0.009009023666381837, 0.008915200233459472, 0.008963871955871582, 0.008910592079162599, 0.008922911643981934, 0.008925951957702637, 0.009025216102600098, 0.009001184463500976, 0.008996864318847657, 0.009013376235961913, 0.009167903900146485, 0.009212800025939941, 0.009153696060180664, 0.009063263893127442, 0.00901852798461914, 0.008969344139099122, 0.008972000122070313, 0.008984064102172852, 0.008972800254821778, 0.009164159774780273, 0.00901961612701416, 0.008972415924072266, 0.00899510383605957, 0.008965632438659669, 0.008931648254394532, 0.008945856094360351, 0.009018655776977538, 0.009123647689819336, 0.010275872230529785, 0.009394047737121582, 0.011168928146362304, 0.009109951972961427, 0.009045791625976563, 0.009050751686096191, 0.009008895874023438, 0.008980735778808594, 0.0089334077835083, 0.00964195156097412, 0.009204768180847167, 0.009038816452026368, 0.00897049617767334, 0.008675488471984863, 0.008997471809387207, 0.008975711822509766, 0.008948384284973144, 0.008953503608703613, 0.008948063850402832, 0.008945568084716797, 0.008935296058654785, 0.008943840026855469, 0.008999008178710938, 0.008986528396606446, 0.008912991523742676, 0.009042911529541016, 0.008977343559265136, 0.0089169921875, 0.008955167770385743, 0.008936063766479492, 0.009062496185302735, 0.008916416168212891, 0.008962335586547852, 0.008984864234924317, 0.008994848251342773, 0.00894803237915039, 0.008996512413024902, 0.008955904006958008, 0.009015199661254882, 0.00894371223449707, 0.00893337631225586, 0.008905983924865723, 0.008974592208862304, 0.008942208290100097, 0.00892300796508789, 0.008977824211120606, 0.009023903846740722, 0.00897439956665039, 0.008976351737976075, 0.008988832473754883, 0.009076576232910156, 0.00906771183013916, 0.00899500846862793, 0.008980863571166993, 0.008994720458984374, 0.008953887939453125, 0.008942048072814941, 0.008967519760131836, 0.0089965763092041, 0.008958847999572753, 0.00895308780670166, 0.009071136474609375, 0.008988096237182617, 0.008938336372375488, 0.0089303035736084, 0.008912927627563476, 0.008919232368469239, 0.008968064308166504, 0.008987551689147949, 0.008922719955444336, 0.008993184089660645, 0.00944495964050293, 0.008970944404602051, 0.008985759735107422, 0.00892579174041748, 0.009081088066101074, 0.008693887710571288, 0.009017855644226073, 0.008978431701660156, 0.009123680114746093, 0.00893945598602295, 0.008945887565612792, 0.008971648216247559, 0.009001600265502929, 0.008922752380371093, 0.009523584365844726, 0.00902348804473877, 0.009119744300842286, 0.008951904296875, 0.008998623847961426, 0.008984000205993652, 0.0089935359954834, 0.009013248443603515, 0.008929280281066895, 0.008925536155700683, 0.008949407577514648, 0.009009152412414552, 0.008970239639282226, 0.008935423851013183, 0.00900220775604248, 0.008917152404785156, 0.008943936347961427, 0.008951199531555177, 0.009097567558288574, 0.009000639915466308, 0.008970975875854492, 0.009830080032348632, 0.009167424201965332, 0.009009056091308594, 0.009029120445251464, 0.009020128250122071, 0.008982303619384766, 0.008996864318847657, 0.009045503616333007, 0.009054400444030762, 0.009076416015625, 0.009028223991394043, 0.008962047576904298, 0.009035584449768066, 0.008941984176635743, 0.008951552391052246, 0.008969632148742676, 0.008977024078369141, 0.008923135757446288, 0.009052160263061524, 0.008931136131286621, 0.008996064186096192, 0.00893836784362793, 0.009048352241516113, 0.008994624137878418, 0.009009344100952148, 0.008924832344055176, 0.008988800048828125, 0.009051712036132812, 0.009009632110595703, 0.009156864166259765, 0.008971296310424804, 0.008943872451782226, 0.008986880302429199, 0.008869407653808593, 0.009038304328918457, 0.009041919708251953, 0.009101311683654785, 0.009080415725708007, 0.00898681640625, 0.009042400360107422, 0.008990015983581543, 0.008928895950317383, 0.008975168228149415, 0.008974207878112792, 0.009092639923095704, 0.008939552307128906, 0.008966560363769532, 0.008965951919555664, 0.009026016235351562, 0.009070655822753907, 0.008921152114868163, 0.008910911560058594, 0.009358367919921874, 0.008952447891235352, 0.009111583709716797, 0.009096768379211426, 0.009021120071411133, 0.00896281623840332, 0.008939519882202148, 0.009010175704956054, 0.008979455947875976, 0.008929247856140137, 0.008947744369506835, 0.008969568252563476, 0.009038496017456054, 0.009033727645874023, 0.008937472343444825, 0.00929792022705078, 0.008953856468200684, 0.008962047576904298, 0.008949600219726562, 0.009050271987915038, 0.008976384162902832, 0.008947199821472167, 0.00894153594970703, 0.008944224357604981, 0.00904531192779541, 0.00889305591583252, 0.00892240047454834, 0.008903615951538086, 0.008992735862731933, 0.00891654396057129, 0.008972543716430664, 0.0089169921875, 0.009002976417541504, 0.008898271560668945, 0.008933695793151855, 0.008953568458557128, 0.009016672134399413, 0.00895036792755127, 0.00934665584564209, 0.008973055839538575, 0.009068544387817384, 0.009092320442199707, 0.008984992027282715, 0.008993023872375489, 0.008859423637390137, 0.009015520095825196, 0.008964096069335938, 0.008925056457519532, 0.008972000122070313, 0.008904704093933105, 0.008978207588195801, 0.008952447891235352, 0.008983936309814453, 0.008966367721557617, 0.009032095909118652, 0.009523200035095216, 0.009230239868164063, 0.009068160057067872, 0.00897708797454834, 0.009074463844299316, 0.00902086353302002, 0.008992544174194336, 0.009454367637634277, 0.009064448356628419, 0.009041919708251953, 0.009009152412414552, 0.00900716781616211, 0.00893945598602295, 0.008923263549804687, 0.00890176010131836, 0.008873056411743165, 0.008967840194702148, 0.00906595230102539, 0.009120287895202637, 0.009311776161193847, 0.009338720321655274, 0.009237119674682617, 0.009606240272521972, 0.009408831596374512, 0.00932249641418457, 0.00929366397857666, 0.009204607963562011, 0.00915443229675293, 0.009236479759216308, 0.00908016014099121, 0.009007295608520509, 0.008942303657531738, 0.008935168266296386, 0.00897599983215332, 0.009091327667236328, 0.009246848106384277, 0.0093306884765625, 0.009298048019409179, 0.009275263786315917, 0.00925926399230957, 0.00916163158416748, 0.009089887619018555, 0.008984000205993652, 0.008964799880981445, 0.008926943778991699, 0.008956255912780762, 0.008945599555969238, 0.008966015815734864, 0.009107456207275391, 0.008922911643981934, 0.008900832176208497, 0.008972319602966308, 0.008717727661132813, 0.009038368225097656, 0.00899078369140625, 0.009031647682189942, 0.008943615913391113, 0.00894758415222168, 0.0089965763092041, 0.009011648178100585, 0.008933216094970704, 0.008965888023376466, 0.008923168182373048, 0.008943743705749511, 0.008922656059265137, 0.008919936180114746, 0.009277279853820802, 0.00900710391998291, 0.009191424369812011, 0.008970239639282226, 0.008947392463684083, 0.008952128410339355, 0.008929280281066895, 0.008937664031982422, 0.008933152198791504, 0.008923168182373048, 0.008942879676818848, 0.00891977596282959, 0.009011199951171875, 0.008988287925720215, 0.00917849636077881, 0.009014368057250977, 0.008939423561096192, 0.008962047576904298, 0.008955904006958008, 0.00968892765045166, 0.009171104431152344, 0.008969535827636719, 0.008907456398010254, 0.009029631614685058, 0.009005056381225587, 0.009036992073059082, 0.00897926425933838, 0.008900351524353027, 0.009000351905822754, 0.009029696464538575, 0.008989407539367675, 0.008949824333190918, 0.00899846363067627, 0.009062623977661133, 0.008959775924682617, 0.008972736358642578, 0.0089169921875, 0.009103360176086426, 0.009275615692138672, 0.009924672126770019, 0.010090432167053222, 0.009225567817687989, 0.009083328247070312, 0.009261055946350098, 0.009152511596679687, 0.00896828842163086, 0.009027520179748534, 0.008992447853088378, 0.008999199867248535, 0.008930527687072753, 0.009287487983703614, 0.008995871543884277, 0.008970335960388183, 0.00890454387664795, 0.009000960350036622, 0.008984288215637208, 0.008976767539978028, 0.009098976135253907, 0.009035967826843261, 0.009002240180969238, 0.008966879844665527, 0.008933088302612305, 0.009017663955688477, 0.008927136421203614, 0.00897267246246338, 0.008924896240234376, 0.009055328369140626, 0.008945887565612792, 0.008962752342224121, 0.00894156837463379, 0.008961471557617187, 0.008996671676635742, 0.008982943534851075, 0.00916540813446045, 0.009040927886962891, 0.008946271896362304, 0.00889583969116211, 0.008932479858398438, 0.008973088264465332, 0.008878975868225098, 0.008945280075073243, 0.008919424057006835, 0.008986432075500489, 0.008945856094360351, 0.00897436809539795, 0.008967647552490235, 0.008990976333618164, 0.008968416213989258, 0.008947744369506835, 0.00896720027923584, 0.008925919532775878, 0.009221983909606933, 0.009075103759765625, 0.009070591926574707, 0.008953856468200684, 0.009355263710021973, 0.009150239944458008, 0.009004927635192872, 0.010159616470336913, 0.008955039978027345, 0.0089717435836792, 0.009107135772705078, 0.008970080375671386, 0.008991264343261718, 0.009062560081481933, 0.009027584075927735, 0.00912515163421631, 0.008980959892272948, 0.009368063926696778, 0.008978143692016601, 0.008953151702880859, 0.009400704383850097, 0.008725215911865234, 0.008998847961425782, 0.009002559661865235, 0.009058688163757324, 0.009010527610778809, 0.00894223976135254, 0.008974016189575195, 0.009072832107543945, 0.009043999671936035, 0.009017536163330078, 0.008966048240661622, 0.0089334077835083, 0.009015232086181641, 0.008962112426757813, 0.008943455696105957, 0.009021023750305175, 0.009003552436828613, 0.008930624008178711, 0.008946368217468262, 0.008959456443786621, 0.009132575988769532, 0.008962047576904298, 0.00890988826751709, 0.008958623886108399, 0.00893776035308838, 0.008924351692199708, 0.008933183670043945, 0.008954879760742187, 0.009092448234558105, 0.009033984184265136, 0.008970656394958497, 0.008904704093933105, 0.008947903633117676, 0.009042880058288575, 0.00892751979827881, 0.008893024444580079, 0.008943103790283203, 0.009078783988952637, 0.009175456047058106, 0.009121631622314453, 0.009043392181396484, 0.008947967529296875, 0.008947967529296875, 0.00894803237915039, 0.009056256294250489, 0.009093119621276855, 0.008987872123718262, 0.008935903549194336, 0.00895411205291748, 0.0089967041015625, 0.008956128120422364, 0.008949407577514648, 0.008956416130065918, 0.00892092800140381, 0.00892950439453125, 0.008908576011657714, 0.009037664413452148, 0.008913311958312988, 0.008940608024597167, 0.00888259220123291, 0.008935711860656738, 0.009023200035095215, 0.008922528266906739, 0.008644096374511719, 0.008897024154663086, 0.008994720458984374, 0.008964127540588378, 0.00893881607055664, 0.008960991859436036, 0.008959775924682617, 0.00894976043701172, 0.009305919647216797, 0.008993215560913086, 0.009007904052734375, 0.008976767539978028, 0.008918656349182128, 0.008934368133544921, 0.009025535583496093, 0.009177151679992676, 0.00924995231628418, 0.009491231918334961, 0.009313664436340331, 0.00909990406036377, 0.009039423942565917, 0.009085375785827637, 0.00904310417175293, 0.009524288177490235, 0.009123295783996581, 0.009492671966552735, 0.009261152267456055, 0.009185312271118164, 0.009158656120300293, 0.01024937629699707, 0.009343839645385743, 0.009320575714111328, 0.009232255935668945, 0.00910540771484375, 0.009017279624938965, 0.00898464012145996, 0.00901529598236084, 0.00901734447479248, 0.00920364761352539, 0.008980031967163085, 0.00902400016784668, 0.009189375877380371, 0.008945664405822755, 0.008976384162902832, 0.009058272361755371, 0.008971455574035645, 0.008952768325805664, 0.008949503898620606, 0.009494239807128907, 0.00905465602874756, 0.009100831985473633, 0.009167327880859374, 0.00898252773284912, 0.008923135757446288, 0.008927231788635253, 0.008930591583251954, 0.008960960388183593, 0.009119423866271972, 0.008939616203308106, 0.00899891185760498, 0.009098464012145997, 0.0100382080078125, 0.00920729637145996, 0.008682239532470703, 0.008926527976989746, 0.008991168022155762, 0.009030207633972168, 0.009063232421875, 0.00900806427001953, 0.0091278076171875, 0.00917743968963623, 0.009129823684692383, 0.00899888038635254, 0.00895702362060547, 0.00895241641998291, 0.009007455825805664, 0.008962047576904298, 0.008937536239624023, 0.008951744079589844, 0.008973759651184082, 0.008952383995056152, 0.009281696319580078, 0.009173983573913575, 0.008999615669250489, 0.008974528312683106, 0.008970239639282226, 0.009028672218322754, 0.008977248191833495, 0.009123359680175781, 0.008972864151000976, 0.00895132827758789, 0.008954336166381836, 0.008902655601501466, 0.008929471969604492, 0.008947232246398925, 0.008984928131103515, 0.008929216384887695, 0.008963456153869629, 0.008925824165344239, 0.009013248443603515, 0.0089518404006958, 0.008916959762573243, 0.008928768157958985, 0.009072768211364745, 0.009012800216674804, 0.008964320182800294, 0.008928000450134277, 0.009006048202514648, 0.009950079917907716, 0.010856800079345703, 0.00913987159729004, 0.00905958366394043, 0.009079551696777344, 0.009025535583496093, 0.008971776008605957, 0.008946335792541504, 0.008990240097045899, 0.008947903633117676, 0.008951935768127441, 0.008988256454467774, 0.009127519607543945, 0.008990912437438965, 0.008956352233886718, 0.008952192306518555, 0.009071904182434082, 0.00896668815612793, 0.0087741117477417, 0.009019392013549805, 0.009343008041381835, 0.008974080085754394, 0.009015520095825196, 0.009000096321105957, 0.008988896369934081, 0.008964799880981445, 0.00909443187713623, 0.008960864067077638, 0.008982175827026368, 0.008958111763000489, 0.008949824333190918, 0.008948800086975097, 0.008978495597839356, 0.008975168228149415, 0.008942720413208007, 0.008907648086547851, 0.009181119918823243, 0.008943039894104004, 0.009114239692687988, 0.008984383583068847, 0.008941727638244629, 0.008906847953796386, 0.008994751930236817, 0.009027423858642579, 0.00901574420928955, 0.008998623847961426, 0.00897433567047119, 0.008972288131713867, 0.008978431701660156, 0.008937472343444825, 0.008926207542419434, 0.008944000244140625, 0.008965056419372558, 0.008965824127197266, 0.008920351982116699, 0.009081567764282227, 0.00927455997467041, 0.009331520080566407, 0.009043968200683594, 0.008938688278198242, 0.008969023704528808, 0.008921312332153321, 0.008949664115905762, 0.009754495620727539, 0.008992863655090331, 0.008912639617919922, 0.008925472259521484, 0.008939295768737794, 0.008921183586120606, 0.008924351692199708, 0.008907648086547851, 0.008875967979431153, 0.00897763156890869, 0.00892188835144043, 0.009007136344909667, 0.008912063598632813, 0.00896236801147461, 0.009033663749694824, 0.008897055625915527, 0.008931520462036133, 0.009010592460632324, 0.00964358425140381, 0.009404607772827148, 0.009353471755981444, 0.009221471786499024, 0.009128095626831055, 0.009091584205627442, 0.009070943832397461, 0.00900879955291748, 0.008978400230407715, 0.008998944282531738, 0.00899071979522705, 0.008935423851013183, 0.00908073616027832, 0.008994688034057617, 0.008987968444824218, 0.009117631912231446, 0.008986847877502441, 0.008995583534240723, 0.008888319969177246, 0.008971296310424804, 0.008971232414245606, 0.009041536331176757, 0.008987008094787597, 0.00894976043701172, 0.008956000328063965, 0.008970144271850587, 0.008976384162902832, 0.008962240219116211, 0.008930656433105469, 0.009191904067993164, 0.009113087654113769, 0.009189888000488282, 0.00993280029296875, 0.009552191734313965, 0.009461440086364747, 0.009490367889404296, 0.009399840354919434, 0.009212448120117187, 0.009154463768005371, 0.009011072158813476, 0.009039263725280761, 0.009171551704406738, 0.009175104141235351, 0.009060511589050293, 0.009037440299987792, 0.009102784156799316, 0.009055168151855468, 0.00922214412689209, 0.009278752326965333, 0.009197343826293945, 0.00904646396636963, 0.009031968116760254, 0.00902342414855957, 0.008976160049438476, 0.00912435245513916, 0.009017087936401368, 0.008956159591674805, 0.008996864318847657, 0.008923135757446288, 0.008924863815307616, 0.009005375862121582, 0.009016672134399413, 0.0095382080078125, 0.009898176193237304, 0.009987263679504395, 0.00977779197692871, 0.009099264144897461, 0.00904297637939453, 0.009118687629699708, 0.009045472145080566, 0.008991264343261718, 0.008949248313903809, 0.008978367805480957, 0.009005375862121582, 0.008966400146484374, 0.00894976043701172, 0.00902303981781006, 0.008960448265075683, 0.009029631614685058, 0.008931327819824218, 0.008927488327026367, 0.008952672004699708, 0.008973247528076172, 0.008984543800354005, 0.008987839698791505, 0.009024095535278321, 0.008972512245178223, 0.008973376274108887, 0.009143232345581054, 0.009060511589050293, 0.008937503814697266, 0.008918656349182128, 0.008939711570739747, 0.008994751930236817, 0.008986687660217286, 0.008978079795837403, 0.009005087852478028, 0.008978655815124512, 0.008911040306091309, 0.008935135841369629, 0.008915136337280274, 0.008914976119995118, 0.008961824417114258, 0.008915295600891113, 0.008945504188537597, 0.00897436809539795, 0.008970335960388183, 0.009275263786315917, 0.009132032394409179, 0.009203519821166992, 0.009106623649597167, 0.009241472244262696, 0.009441120147705078, 0.009220383644104003, 0.00912384033203125, 0.00911359977722168, 0.009098367691040039, 0.009048000335693359, 0.009063360214233398, 0.009104448318481445, 0.009126655578613281, 0.009242783546447754, 0.009434783935546875, 0.009316736221313477, 0.009203712463378906, 0.009168448448181153]",tokens/s,110.61102758026348,, float32-sdpa,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float32,True,False,,sdpa,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,808.648704,1128.136704,0.0,725.614592,666.338304,s,1,8.5063798828125,8.5063798828125,0.0,8.5063798828125,8.5063798828125,8.5063798828125,8.5063798828125,[8.5063798828125],,kWh,2.8833770291664485e-06,3.1052884687646534e-07,1.0300008240001109e-06,4.223906700043025e-06,,MB,1131.823104,1144.91392,0.0,731.906048,652.62848,s,23,0.36113961791992183,0.015701722518257473,0.0005259276167186959,0.015581664085388183,0.015703801345825195,0.015743958282470705,0.017622733745574954,"[0.01815171241760254, 0.015587776184082032, 0.015486207962036133, 0.015581664085388183, 0.015555680274963379, 0.015714207649230958, 0.01554800033569336, 0.0155546236038208, 0.015747263908386232, 0.015610112190246581, 0.015511263847351074, 0.01551897621154785, 0.01563468837738037, 0.0156364164352417, 0.01566217613220215, 0.015549728393554688, 0.015611616134643555, 0.015646207809448243, 0.01554150390625, 0.015575967788696288, 0.015558112144470214, 0.01555344009399414, 0.015602272033691406]",tokens/s,16303.943704414034,kWh,5.873249253521144e-07,6.475696076958605e-08,3.8876568101005985e-07,1.0408475671317603e-06,tokens/kWh,245953401.9044242,MB,1143.656448,1191.051264,0.0,778.043392,652.63104,s,23,10.01610955810547,0.4354830242654551,0.0028478349757276407,0.43584521484375,0.43897933349609375,0.43951525268554686,0.4409448724365234,"[0.4314525146484375, 0.43269073486328125, 0.4310337219238281, 0.43289071655273437, 0.4413370361328125, 0.43679214477539063, 0.4378771667480469, 0.43615936279296874, 0.43584521484375, 0.4395544738769531, 0.4374628295898437, 0.4370532836914062, 0.4391622619628906, 0.4314674682617187, 0.4336787414550781, 0.43569174194335936, 0.43824761962890624, 0.43741000366210936, 0.4366462707519531, 0.4338517150878906, 0.433899169921875, 0.4348321533203125, 0.43107321166992185]",tokens/s,144.66694793962262,kWh,1.2274679773923227e-05,1.353703317865651e-06,6.149005249685616e-06,1.9777388341474494e-05,tokens/kWh,3185455.9819652643,,s,1449,10.00852402162551,0.006907193941770544,0.00015694529658662304,0.006864448070526123,0.007038630199432373,0.0071436160087585444,0.007662887668609619,"[0.006809216022491455, 0.00695139217376709, 0.006881631851196289, 0.006817279815673828, 0.0068635201454162595, 0.006840320110321045, 0.006827295780181885, 0.0067526397705078125, 0.006832479953765869, 0.00684441614151001, 0.006809599876403808, 0.0068379840850830076, 0.006807839870452881, 0.006768928050994873, 0.006829279899597168, 0.0068224000930786136, 0.006860256195068359, 0.006943264007568359, 0.007143424034118652, 0.006850560188293457, 0.006768064022064209, 0.006943295955657959, 0.006858463764190674, 0.006828288078308106, 0.006950943946838379, 0.006887423992156983, 0.006801727771759033, 0.006862815856933594, 0.006774496078491211, 0.006846816062927246, 0.006837920188903808, 0.006749887943267822, 0.006822207927703858, 0.006825984001159668, 0.006768223762512207, 0.00682751989364624, 0.006928832054138184, 0.006840799808502197, 0.006789120197296142, 0.006802976131439209, 0.006853087902069092, 0.006778880119323731, 0.0068280320167541505, 0.006948416233062744, 0.00681004810333252, 0.0067686400413513184, 0.00680515193939209, 0.006881792068481446, 0.006920032024383545, 0.00689356803894043, 0.006838272094726563, 0.006817791938781738, 0.006774784088134766, 0.0068278398513793945, 0.0068113279342651364, 0.006785535812377929, 0.00689356803894043, 0.00684611177444458, 0.0068765759468078615, 0.0068494720458984375, 0.006876192092895508, 0.006859744071960449, 0.006770688056945801, 0.006743775844573975, 0.006875423908233642, 0.006831583976745606, 0.006814239978790283, 0.006848351955413818, 0.006826144218444824, 0.006856287956237793, 0.006848159790039062, 0.006908671855926514, 0.0068436160087585445, 0.006850431919097901, 0.006888351917266846, 0.007096320152282715, 0.006883232116699218, 0.006857120037078858, 0.006887104034423828, 0.006825984001159668, 0.00685811185836792, 0.006822720050811768, 0.006845695972442627, 0.006895679950714112, 0.0070415358543396, 0.006893375873565674, 0.006875328063964844, 0.006793087959289551, 0.006839968204498291, 0.006818272113800049, 0.006823135852813721, 0.0068607678413391115, 0.006818624019622803, 0.006858751773834228, 0.006799359798431396, 0.006818016052246093, 0.006856480121612549, 0.006758399963378906, 0.007032512187957763, 0.0069491839408874516, 0.006821887969970703, 0.006840320110321045, 0.006856512069702148, 0.006848703861236572, 0.006881279945373535, 0.006864480018615723, 0.006838687896728516, 0.006807551860809326, 0.006833375930786133, 0.006838912010192871, 0.006815904140472412, 0.006833631992340088, 0.006851103782653809, 0.006993184089660645, 0.006879903793334961, 0.006823999881744385, 0.0068466558456420894, 0.006887231826782227, 0.006819039821624756, 0.0068615999221801754, 0.00682751989364624, 0.006821824073791504, 0.0068652482032775875, 0.0069592962265014645, 0.0069807682037353515, 0.006886271953582763, 0.006795775890350342, 0.006880671977996826, 0.006834784030914307, 0.006799327850341797, 0.0068403520584106444, 0.006887423992156983, 0.006785024166107178, 0.006829887866973877, 0.0068048639297485355, 0.006794047832489014, 0.00684441614151001, 0.00683622407913208, 0.006831456184387207, 0.0068158721923828125, 0.006789663791656494, 0.006838592052459716, 0.006760128021240234, 0.0069324798583984375, 0.006868735790252685, 0.006809216022491455, 0.006822527885437012, 0.006827680110931397, 0.00685916805267334, 0.0068023681640625, 0.006790207862854004, 0.0068197760581970215, 0.006775872230529785, 0.006804031848907471, 0.006821919918060302, 0.006777184009552002, 0.006795263767242431, 0.006839839935302734, 0.006830560207366943, 0.00686243200302124, 0.0068460159301757815, 0.0068288960456848145, 0.006793407917022705, 0.006794623851776123, 0.006852831840515137, 0.006787295818328857, 0.006766592025756836, 0.006831776142120361, 0.00680131196975708, 0.006799456119537354, 0.006827616214752198, 0.006834943771362304, 0.006803455829620361, 0.0068148159980773925, 0.006837152004241944, 0.006878560066223145, 0.006812320232391357, 0.006977536201477051, 0.006829887866973877, 0.006805215835571289, 0.006803936004638672, 0.006799295902252197, 0.006868288040161132, 0.006955776214599609, 0.00680947208404541, 0.006869120121002197, 0.00742195177078247, 0.006808896064758301, 0.006867648124694824, 0.0068280320167541505, 0.006796864032745362, 0.006842527866363525, 0.006856991767883301, 0.006789087772369385, 0.006807583808898926, 0.006817791938781738, 0.006845664024353027, 0.006822048187255859, 0.006836863994598388, 0.00687011194229126, 0.006816448211669922, 0.0068057279586791995, 0.006838272094726563, 0.006948863983154297, 0.006805823802947998, 0.006874911785125733, 0.006852511882781982, 0.0068689918518066405, 0.006840320110321045, 0.0068830718994140625, 0.006869247913360596, 0.006794688224792481, 0.0068791680335998535, 0.00686348819732666, 0.006854752063751221, 0.0068791360855102535, 0.006850207805633545, 0.0069383358955383305, 0.006854656219482422, 0.0068510079383850096, 0.006938816070556641, 0.00683958387374878, 0.007029759883880615, 0.0068724479675292965, 0.006870560169219971, 0.0068206720352172855, 0.006881279945373535, 0.006825984001159668, 0.006860799789428711, 0.006780064105987549, 0.006853504180908203, 0.006926303863525391, 0.006800447940826416, 0.006853568077087402, 0.006841728210449219, 0.0068552961349487304, 0.006842559814453125, 0.006838079929351807, 0.006862847805023193, 0.006838272094726563, 0.00739734411239624, 0.0068772158622741695, 0.006878687858581543, 0.006860415935516357, 0.007037856101989746, 0.006852608203887939, 0.00690176010131836, 0.006807551860809326, 0.00684825611114502, 0.006923520088195801, 0.006822591781616211, 0.006868864059448242, 0.006973440170288086, 0.006868768215179444, 0.006766816139221191, 0.00686191987991333, 0.0068412480354309085, 0.006788928031921387, 0.006849728107452393, 0.006814688205718994, 0.006843584060668945, 0.006834015846252441, 0.006833151817321777, 0.006959104061126709, 0.006782976150512696, 0.006884672164916992, 0.007914175987243652, 0.006873087882995605, 0.00686684799194336, 0.006840223789215088, 0.006887616157531739, 0.006893152236938476, 0.006818175792694092, 0.006917791843414307, 0.006846496105194092, 0.006945119857788086, 0.006791168212890625, 0.006883615970611572, 0.006849311828613282, 0.006830207824707031, 0.006835008144378662, 0.006882688045501709, 0.006890111923217773, 0.006909952163696289, 0.007724575996398926, 0.007524831771850586, 0.008027232170104981, 0.00794921588897705, 0.007245247840881348, 0.007100800037384033, 0.007059648036956787, 0.007028736114501953, 0.007001823902130127, 0.007082272052764893, 0.006948607921600342, 0.006930143833160401, 0.0070620479583740235, 0.006946176052093506, 0.006916736125946045, 0.006969632148742676, 0.006901504039764405, 0.00693449592590332, 0.007116799831390381, 0.0074403839111328125, 0.00707750415802002, 0.006954368114471436, 0.00709497594833374, 0.006958879947662353, 0.006889535903930664, 0.006938176155090332, 0.006874015808105469, 0.006852447986602783, 0.006994080066680908, 0.006889472007751465, 0.006922239780426025, 0.006764863967895508, 0.006915967941284179, 0.0068754878044128415, 0.006858751773834228, 0.006989823818206787, 0.006872608184814453, 0.006805408000946045, 0.006847040176391601, 0.006840256214141846, 0.006923744201660156, 0.00689961576461792, 0.0069270401000976565, 0.006866079807281494, 0.006900576114654541, 0.006839647769927979, 0.0068221759796142575, 0.006881663799285889, 0.00683619213104248, 0.006856959819793701, 0.007032608032226563, 0.006774623870849609, 0.006820000171661377, 0.006862624168395996, 0.006844639778137207, 0.006817791938781738, 0.00683622407913208, 0.006850431919097901, 0.006814879894256592, 0.006826015949249267, 0.006832767963409423, 0.006862847805023193, 0.00677510404586792, 0.006850560188293457, 0.006834176063537598, 0.006795263767242431, 0.006873087882995605, 0.006834271907806396, 0.007200384140014648, 0.007067935943603516, 0.007069248199462891, 0.007012800216674804, 0.007223296165466309, 0.006975808143615723, 0.00697926378250122, 0.006905280113220215, 0.007014976024627685, 0.006969344139099121, 0.00699177598953247, 0.006983935832977295, 0.00742793607711792, 0.007860223770141601, 0.006969471931457519, 0.006885216236114502, 0.006857952117919922, 0.007250751972198486, 0.006894879817962647, 0.006893407821655273, 0.006935423851013184, 0.00685584020614624, 0.006925151824951172, 0.006897664070129395, 0.006838335990905762, 0.006889408111572266, 0.006962080001831055, 0.006897664070129395, 0.006913440227508545, 0.006885983943939209, 0.006852704048156738, 0.006942527770996094, 0.006914112091064453, 0.006813727855682373, 0.006936543941497802, 0.0069632320404052735, 0.007274496078491211, 0.007245823860168457, 0.007118656158447266, 0.007454912185668946, 0.007009664058685303, 0.007021183967590332, 0.006959104061126709, 0.006930431842803955, 0.006856575965881347, 0.006874527931213379, 0.006828767776489258, 0.006944736003875732, 0.0070340800285339355, 0.006801663875579834, 0.006940351963043213, 0.0069231362342834476, 0.006996255874633789, 0.0069642882347106935, 0.006948575973510742, 0.007121856212615967, 0.007266304016113281, 0.00694271993637085, 0.006985375881195069, 0.006924704074859619, 0.006847775936126709, 0.006909791946411133, 0.006822303771972656, 0.006931039810180664, 0.006893375873565674, 0.006850560188293457, 0.006918144226074219, 0.0068853759765625, 0.006875135898590088, 0.006940671920776367, 0.006985727787017822, 0.006989823818206787, 0.006873087882995605, 0.006920191764831543, 0.0069894719123840335, 0.006846816062927246, 0.0068915200233459475, 0.006958752155303955, 0.006822239875793457, 0.006870816230773926, 0.006899712085723877, 0.006856800079345703, 0.0068895998001098635, 0.006827616214752198, 0.006998432159423828, 0.006944767951965332, 0.006883327960968018, 0.006917791843414307, 0.006871327877044677, 0.006942560195922851, 0.006905312061309814, 0.0070440640449523926, 0.006895616054534912, 0.006899712085723877, 0.006922239780426025, 0.00690396785736084, 0.006936192035675049, 0.006872352123260498, 0.006882239818572998, 0.0069376640319824216, 0.006840415954589844, 0.007052031993865967, 0.0070059518814086915, 0.0068379840850830076, 0.006894207954406738, 0.006850560188293457, 0.006860799789428711, 0.006946432113647461, 0.006797344207763672, 0.006936927795410156, 0.007055103778839111, 0.00683187198638916, 0.006877696037292481, 0.0069790401458740235, 0.006995935916900634, 0.007047200202941894, 0.006849055767059326, 0.0069550080299377445, 0.006937759876251221, 0.00681660795211792, 0.006897759914398193, 0.006868703842163086, 0.006844607830047608, 0.006838272094726563, 0.00690934419631958, 0.006902368068695069, 0.006867072105407715, 0.006805376052856445, 0.006910975933074951, 0.006812479972839356, 0.006832320213317871, 0.006860799789428711, 0.006809599876403808, 0.006880352020263672, 0.00682476806640625, 0.006949024200439453, 0.006975423812866211, 0.006774816036224365, 0.006930399894714356, 0.0068853759765625, 0.006846464157104492, 0.0068853759765625, 0.006813695907592773, 0.00687718391418457, 0.006860799789428711, 0.006874527931213379, 0.007209472179412842, 0.00748144006729126, 0.0071632637977600095, 0.006857344150543213, 0.006860511779785156, 0.007225535869598389, 0.006862080097198487, 0.006985504150390625, 0.0070063362121582035, 0.007005023956298828, 0.006957056045532227, 0.006874144077301025, 0.006923232078552246, 0.006874335765838623, 0.007017248153686524, 0.006883200168609619, 0.0068568320274353025, 0.0068689918518066405, 0.007013792037963867, 0.007215424060821533, 0.006927807807922363, 0.006852735996246338, 0.006814432144165039, 0.00692633581161499, 0.006848512172698974, 0.0070054721832275395, 0.006867680072784424, 0.00682755184173584, 0.006914591789245606, 0.006812704086303711, 0.006867392063140869, 0.006838751792907715, 0.006798399925231934, 0.007022880077362061, 0.006853280067443848, 0.006895040035247803, 0.006813471794128418, 0.0068039679527282714, 0.006832255840301514, 0.006785535812377929, 0.0067717118263244626, 0.006845088005065918, 0.006804543972015381, 0.006925248146057129, 0.00774073600769043, 0.006928671836853027, 0.006889664173126221, 0.006832096099853515, 0.006851935863494873, 0.006921120166778564, 0.006881279945373535, 0.006842368125915528, 0.006852608203887939, 0.006841599941253662, 0.006766848087310791, 0.006826367855072022, 0.006891039848327637, 0.006947008132934571, 0.006805408000946045, 0.006924799919128418, 0.007010079860687256, 0.006879456043243408, 0.007159776210784912, 0.006963263988494873, 0.006971424102783203, 0.0069419522285461424, 0.006986752033233643, 0.006913728237152099, 0.006940415859222412, 0.00727452802658081, 0.007000063896179199, 0.007097856044769287, 0.007077407836914062, 0.007090271949768067, 0.007131487846374512, 0.007122943878173828, 0.007170400142669678, 0.007027232170104981, 0.006932127952575684, 0.006902016162872314, 0.0069192957878112795, 0.0069205441474914555, 0.006831456184387207, 0.006837183952331543, 0.006858367919921875, 0.007078559875488282, 0.006889152050018311, 0.006876927852630615, 0.006892864227294922, 0.006822879791259766, 0.007, 0.00684438419342041, 0.00689299201965332, 0.006830751895904541, 0.006883327960968018, 0.007137279987335205, 0.006895616054534912, 0.006821407794952393, 0.00692467212677002, 0.006983712196350098, 0.006800992012023926, 0.007024415969848633, 0.006875648021697998, 0.006950655937194824, 0.006836703777313232, 0.007217055797576905, 0.006965248107910156, 0.007313119888305664, 0.006846752166748047, 0.006872576236724853, 0.006858431816101075, 0.006832479953765869, 0.00682857608795166, 0.006864160060882568, 0.006934976100921631, 0.006795167922973633, 0.007059840202331543, 0.006923871994018555, 0.007119296073913574, 0.007650623798370362, 0.007133855819702149, 0.00695091199874878, 0.00704095983505249, 0.007069759845733643, 0.007017568111419677, 0.006968224048614502, 0.006975488185882568, 0.006873087882995605, 0.006875135898590088, 0.006873087882995605, 0.007038335800170898, 0.006859231948852539, 0.006789408206939697, 0.006833888053894043, 0.0068689918518066405, 0.006987199783325195, 0.007023168087005615, 0.007927584171295165, 0.00841913604736328, 0.00787286376953125, 0.006852320194244385, 0.006887775897979736, 0.00686684799194336, 0.00696943998336792, 0.00684662389755249, 0.007028575897216797, 0.006846464157104492, 0.006821887969970703, 0.00687718391418457, 0.006778463840484619, 0.0068897600173950194, 0.0068609600067138675, 0.006922175884246826, 0.006853919982910156, 0.006839039802551269, 0.006897535800933838, 0.0068215360641479495, 0.007104544162750244, 0.007050911903381348, 0.006934847831726074, 0.006787487983703613, 0.006869056224822998, 0.0068689918518066405, 0.006891039848327637, 0.006838240146636963, 0.006823455810546875, 0.006818784236907959, 0.006780863761901856, 0.00686406421661377, 0.006884384155273438, 0.006942560195922851, 0.006889120101928711, 0.006852960109710694, 0.006895616054534912, 0.006817408084869385, 0.006803840160369873, 0.006818848133087158, 0.006832608222961426, 0.007155776023864746, 0.006871456146240234, 0.0068403520584106444, 0.006897056102752685, 0.006916384220123291, 0.006822207927703858, 0.007000063896179199, 0.006844768047332764, 0.006858399868011475, 0.00690937614440918, 0.006783455848693848, 0.0068895678520202635, 0.006881279945373535, 0.006830080032348633, 0.006993919849395752, 0.0068915200233459475, 0.006822976112365722, 0.006707200050354004, 0.006875072002410889, 0.006948927879333496, 0.00686406421661377, 0.00682476806640625, 0.0068590397834777835, 0.006878943920135498, 0.006815455913543701, 0.006943136215209961, 0.00712886381149292, 0.007201888084411621, 0.007322624206542969, 0.0071015038490295414, 0.007136191844940186, 0.007138815879821778, 0.00704585599899292, 0.006961984157562256, 0.006965280055999756, 0.0070828161239624024, 0.007018623828887939, 0.007124800205230713, 0.006940864086151123, 0.0070266880989074704, 0.006964255809783935, 0.00684335994720459, 0.006854656219482422, 0.006938432216644287, 0.006844255924224853, 0.006777184009552002, 0.006852863788604736, 0.0068750400543212895, 0.006807072162628174, 0.006934847831726074, 0.0069567360877990725, 0.006885280132293701, 0.006824160099029541, 0.006983871936798096, 0.006848320007324219, 0.006791232109069824, 0.0069797120094299316, 0.00690723180770874, 0.006843039989471436, 0.006799359798431396, 0.0068280320167541505, 0.006819839954376221, 0.006848512172698974, 0.006929696083068848, 0.006918879985809326, 0.006889472007751465, 0.00684441614151001, 0.006883327960968018, 0.006878943920135498, 0.0068262720108032224, 0.0068055038452148435, 0.006868735790252685, 0.006893824100494385, 0.006840320110321045, 0.006963007926940918, 0.00725161600112915, 0.007011903762817383, 0.006947103977203369, 0.006970047950744629, 0.007020544052124023, 0.006862847805023193, 0.0068618240356445315, 0.007158239841461181, 0.0071910719871521, 0.007090176105499267, 0.006922239780426025, 0.006908095836639404, 0.006864704132080078, 0.006987775802612305, 0.007001183986663819, 0.006925216197967529, 0.006864448070526123, 0.006879007816314698, 0.006830751895904541, 0.006830080032348633, 0.006860799789428711, 0.006942272186279297, 0.0071684479713439945, 0.0069079680442810055, 0.00690783977508545, 0.006834176063537598, 0.0068915200233459475, 0.00687497615814209, 0.0068978238105773925, 0.006835487842559815, 0.006857791900634766, 0.006928095817565918, 0.006840256214141846, 0.006856607913970947, 0.006903903961181641, 0.006879231929779053, 0.006858751773834228, 0.00689356803894043, 0.007061823844909668, 0.006888544082641602, 0.006912543773651123, 0.006901440143585205, 0.006950463771820068, 0.00713100814819336, 0.0074618239402770995, 0.007366176128387451, 0.007262688159942627, 0.007197728157043457, 0.007116864204406739, 0.007119775772094727, 0.007139008045196533, 0.007037248134613037, 0.007034143924713135, 0.007182432174682617, 0.007006847858428955, 0.006943967819213867, 0.006933279991149902, 0.006803232192993164, 0.00708841609954834, 0.007184319972991944, 0.006916096210479736, 0.006806719779968262, 0.0068616318702697755, 0.006843455791473389, 0.006820191860198974, 0.006931039810180664, 0.006827680110931397, 0.006815616130828857, 0.006751711845397949, 0.006883872032165527, 0.006871039867401123, 0.006782976150512696, 0.006858687877655029, 0.006839615821838379, 0.006842624187469482, 0.006844927787780761, 0.006887423992156983, 0.006909952163696289, 0.006826047897338867, 0.006817728042602539, 0.00683622407913208, 0.0069918718338012695, 0.00681990385055542, 0.006856704235076904, 0.006927455902099609, 0.006822015762329102, 0.006824287891387939, 0.0068261117935180665, 0.0068364801406860356, 0.006804992198944091, 0.006830111980438232, 0.006844255924224853, 0.006744416236877442, 0.006799903869628907, 0.006921567916870117, 0.006834176063537598, 0.006771103858947754, 0.006805568218231201, 0.006858687877655029, 0.00678326416015625, 0.006864607810974121, 0.006819839954376221, 0.006758399963378906, 0.006833920001983643, 0.006912255764007568, 0.006845439910888672, 0.006781951904296875, 0.0069417600631713865, 0.006837024211883545, 0.006811903953552246, 0.006816895961761474, 0.0068098239898681644, 0.006793791770935058, 0.0067870721817016606, 0.006834176063537598, 0.006873087882995605, 0.00678652811050415, 0.006857247829437256, 0.006883327960968018, 0.0068111357688903805, 0.006816256046295166, 0.006838272094726563, 0.0068689918518066405, 0.006856704235076904, 0.006945888042449951, 0.006989952087402344, 0.006847455978393555, 0.006939519882202148, 0.006839231967926025, 0.006849984169006348, 0.006804031848907471, 0.006697375774383545, 0.00684441614151001, 0.0068271679878234865, 0.00680998420715332, 0.006826464176177978, 0.0068503360748291015, 0.006775008201599121, 0.006791168212890625, 0.006819519996643066, 0.006756671905517578, 0.0068076481819152835, 0.006841279983520508, 0.006786015987396241, 0.0070043840408325194, 0.006835999965667724, 0.006817024230957031, 0.006757120132446289, 0.006833600044250488, 0.006826816082000732, 0.006819647789001465, 0.006966400146484375, 0.006885248184204102, 0.006834911823272705, 0.006803423881530762, 0.006856959819793701, 0.006862847805023193, 0.006829631805419922, 0.006822527885437012, 0.006846271991729737, 0.0068915200233459475, 0.0068748478889465335, 0.007375135898590088, 0.007012063980102539, 0.006877791881561279, 0.006790016174316406, 0.0068484477996826176, 0.006846975803375244, 0.0068419198989868165, 0.007039807796478272, 0.006922239780426025, 0.007135231971740722, 0.006897664070129395, 0.006825984001159668, 0.006868671894073486, 0.006844575881958008, 0.006836703777313232, 0.0068338561058044435, 0.006880320072174072, 0.0068023681640625, 0.006846464157104492, 0.006936831951141358, 0.006932096004486084, 0.006824063777923584, 0.006823584079742432, 0.007021984100341797, 0.0068772158622741695, 0.006796160221099854, 0.006942304134368896, 0.007061632156372071, 0.0072297601699829106, 0.007053311824798584, 0.006854656219482422, 0.006891168117523193, 0.006737887859344482, 0.006992159843444824, 0.007686143875122071, 0.007139328002929687, 0.007137279987335205, 0.006879231929779053, 0.006897664070129395, 0.006887423992156983, 0.006955103874206543, 0.006850111961364746, 0.006852960109710694, 0.006878431797027588, 0.007676191806793213, 0.006943136215209961, 0.006950560092926025, 0.006866911888122559, 0.006858880043029785, 0.007099936008453369, 0.0069848642349243165, 0.006943967819213867, 0.0068919677734375, 0.006895616054534912, 0.006902944087982177, 0.006897600173950196, 0.006814496040344238, 0.006862847805023193, 0.006866623878479004, 0.006857151985168457, 0.006800672054290771, 0.006867136001586914, 0.006910079956054687, 0.0068919358253479, 0.006948863983154297, 0.007038047790527344, 0.006906784057617187, 0.00688153600692749, 0.007007999897003174, 0.006976799964904785, 0.006912735939025879, 0.006811615943908692, 0.006856704235076904, 0.0068362560272216795, 0.006801407814025879, 0.006835968017578125, 0.00685200023651123, 0.0068329920768737794, 0.006809599876403808, 0.006874176025390625, 0.006846432209014893, 0.006910079956054687, 0.006884191989898682, 0.006837952136993408, 0.006788928031921387, 0.006793727874755859, 0.006851808071136474, 0.006818399906158447, 0.0067749757766723635, 0.006848159790039062, 0.006809375762939453, 0.006810175895690918, 0.006844064235687256, 0.006850815773010254, 0.006852159976959229, 0.006878399848937988, 0.007315584182739258, 0.00744927978515625, 0.007075808048248291, 0.007204895973205567, 0.007128416061401367, 0.007142047882080078, 0.007005792140960693, 0.00693884801864624, 0.006864448070526123, 0.00689737606048584, 0.007012864112854004, 0.00681769609451294, 0.007029183864593506, 0.006917376041412354, 0.006875967979431153, 0.006837952136993408, 0.0071437439918518066, 0.007005695819854736, 0.006914559841156006, 0.0068628802299499515, 0.006908959865570068, 0.006875584125518799, 0.006847040176391601, 0.006830016136169434, 0.006926112174987793, 0.006930560111999512, 0.007074175834655761, 0.006967008113861084, 0.007088128089904785, 0.00721833610534668, 0.007500895977020264, 0.007214848041534424, 0.0070997757911682126, 0.007056000232696533, 0.006975743770599365, 0.00684825611114502, 0.006876863956451416, 0.006856095790863037, 0.006847392082214356, 0.006944767951965332, 0.006856704235076904, 0.006829055786132812, 0.006814720153808594, 0.006798751831054687, 0.006846432209014893, 0.006865344047546387, 0.006803264141082763, 0.0068652801513671875, 0.00684227180480957, 0.0067779521942138675, 0.006852863788604736, 0.006973919868469239, 0.006866879940032959, 0.00681171178817749, 0.006856063842773438, 0.00685148811340332, 0.0068642239570617675, 0.006948544025421143, 0.006831039905548095, 0.006952991962432861, 0.006789152145385742, 0.006838240146636963, 0.006820159912109375, 0.006833727836608887, 0.006979328155517578, 0.00721343994140625, 0.0068889279365539555, 0.006797855854034424, 0.00684227180480957, 0.006882431983947754, 0.006910272121429443, 0.006811808109283448, 0.006899295806884765, 0.006929279804229737, 0.0068709440231323245, 0.00686243200302124, 0.006881951808929443, 0.00693395185470581, 0.0069014720916748045, 0.006869728088378906, 0.007001440048217774, 0.0068553280830383305, 0.006784416198730468, 0.0070416321754455564, 0.0068847999572753905, 0.0067794561386108395, 0.0068254079818725584, 0.006822463989257813, 0.006811808109283448, 0.006818880081176757, 0.006804255962371826, 0.00684006404876709, 0.0067873277664184575, 0.006823647975921631, 0.006846303939819336, 0.0068141441345214845, 0.007374207973480224, 0.006869631767272949, 0.008908767700195313, 0.007186399936676025, 0.006914112091064453, 0.0068689918518066405, 0.006843423843383789, 0.0071710400581359865, 0.006940671920776367, 0.0068334717750549315, 0.006892223834991455, 0.006997375965118408, 0.006865536212921143, 0.006830080032348633, 0.006840320110321045, 0.006918144226074219, 0.006987423896789551, 0.006953567981719971, 0.006962944030761719, 0.006962495803833008, 0.0069326720237731934, 0.006918655872344971, 0.006897119998931884, 0.006929952144622803, 0.006818655967712402, 0.006844160079956055, 0.007162208080291748, 0.006938687801361084, 0.006823616027832031, 0.006805376052856445, 0.006852223873138428, 0.006827712059020996, 0.0068568639755249025, 0.006852608203887939, 0.006850592136383057, 0.0068345279693603515, 0.007033279895782471, 0.007225056171417236, 0.006956768035888672, 0.006831744194030762, 0.006845376014709473, 0.006853888034820557, 0.006786816120147705, 0.006808576107025147, 0.006940671920776367, 0.006821887969970703, 0.006973440170288086, 0.00687718391418457, 0.00687718391418457, 0.006772736072540283, 0.006866943836212158, 0.0068707199096679685, 0.006883359909057617, 0.007092512130737304, 0.006949120044708252, 0.00698137617111206, 0.00710041618347168, 0.007023871898651123, 0.007136127948760986, 0.007059103965759277, 0.007102687835693359, 0.006963007926940918, 0.006952672004699707, 0.007051743984222412, 0.006976736068725586, 0.006814367771148681, 0.006868735790252685, 0.007011807918548584, 0.006850848197937012, 0.006828639984130859, 0.006862592220306396, 0.0068439040184021, 0.006875296115875244, 0.006873600006103516, 0.006862368106842041, 0.0068204479217529295, 0.00684441614151001, 0.0069244480133056644, 0.00684988784790039, 0.006932928085327149, 0.006873151779174805, 0.006839680194854736, 0.006965888023376465, 0.006818943977355957, 0.006852863788604736, 0.006867487907409668, 0.0068169918060302735, 0.007991424083709717, 0.007049983978271485, 0.006949024200439453, 0.006891136169433594, 0.006815455913543701, 0.006787903785705566, 0.006944736003875732, 0.0069326720237731934, 0.006829919815063476, 0.006886591911315918, 0.006865407943725586, 0.0068139200210571285, 0.006934624195098877, 0.0069408960342407225, 0.0068923521041870115, 0.006865632057189941, 0.006859007835388183, 0.006854656219482422, 0.006811647891998291, 0.00695091199874878, 0.006854656219482422, 0.006930431842803955, 0.006811647891998291, 0.006838272094726563, 0.006864352226257324, 0.006821856021881103, 0.006866975784301758, 0.006969888210296631, 0.0068239040374755856, 0.006765727996826172, 0.006828927993774414, 0.006840320110321045, 0.0067974720001220705, 0.006920032024383545, 0.00686678409576416, 0.006875296115875244, 0.006952960014343262, 0.007308928012847901, 0.006853024005889893, 0.006817759990692139, 0.006850783824920654, 0.006864607810974121, 0.006883168220520019, 0.006794976234436035, 0.0068264961242675785, 0.007290880203247071, 0.006993919849395752, 0.0068767681121826175, 0.0068542399406433105, 0.006812479972839356, 0.006826240062713623, 0.0068841280937194825, 0.0068698878288269045, 0.006894688129425049, 0.006880256175994873, 0.006923520088195801, 0.007016287803649902, 0.0068800320625305176, 0.006789247989654541, 0.0069918718338012695, 0.006950175762176514, 0.00683900785446167, 0.006838079929351807, 0.006850304126739502, 0.006834623813629151, 0.006802432060241699, 0.0068351998329162595, 0.006807551860809326, 0.006785759925842285, 0.0068973441123962404, 0.007723008155822754, 0.007425248146057129, 0.006951712131500244, 0.006852287769317627, 0.0069225277900695804, 0.006892672061920166, 0.006867648124694824, 0.006850207805633545, 0.006969632148742676, 0.006967584133148194, 0.006868735790252685, 0.0068774399757385255, 0.006810624122619629, 0.0068884482383728025, 0.006794816017150879, 0.006853055953979492, 0.006839712142944336, 0.006777599811553955, 0.0068219838142395016, 0.006859583854675293, 0.0069539198875427245, 0.006852223873138428, 0.006807936191558838, 0.006821887969970703, 0.006784832000732422, 0.006832287788391113, 0.006844448089599609, 0.006807487964630127, 0.006815199851989746, 0.006853216171264649, 0.006837664127349853, 0.006916704177856445, 0.0068150081634521485, 0.0067918081283569335, 0.0068076481819152835, 0.006916096210479736, 0.0068895998001098635, 0.006882847785949707, 0.006744768142700195, 0.006833824157714843, 0.0068416957855224605, 0.00731715202331543, 0.006846560001373291, 0.0068782720565795895, 0.006837728023529052, 0.006795648097991943, 0.006823264122009278, 0.006848383903503418, 0.006847487926483154, 0.006787903785705566, 0.006829023838043213, 0.007001567840576172, 0.006785439968109131, 0.006850687980651855, 0.006843967914581299, 0.006827455997467041, 0.006814911842346192, 0.006868447780609131, 0.006905471801757813, 0.006761184215545654, 0.006975008010864258, 0.0068056960105895995, 0.006849343776702881, 0.0068392958641052244, 0.006971392154693603, 0.0068336639404296875, 0.006982143878936768, 0.006900896072387695, 0.006916927814483643, 0.006897056102752685, 0.007064095973968506, 0.006930528163909912, 0.006887423992156983, 0.006849696159362793, 0.006859615802764892, 0.006864575862884521, 0.006862944126129151, 0.006816287994384765, 0.006906655788421631, 0.007005119800567627, 0.007009471893310547, 0.0071339840888977055, 0.006895616054534912, 0.007094272136688232, 0.006887423992156983, 0.006798880100250244, 0.006832608222961426, 0.007212704181671143, 0.006860191822052002, 0.006811679840087891, 0.00686575984954834, 0.006864448070526123, 0.007674208164215088, 0.006821663856506348, 0.0068364481925964355, 0.0068568320274353025, 0.006817791938781738, 0.006914080142974854, 0.006872928142547608, 0.006817791938781738, 0.006801023960113526, 0.0068306241035461426, 0.006854656219482422, 0.006773888111114502, 0.00683900785446167, 0.006862336158752441, 0.006867616176605224, 0.006788127899169922, 0.0068739838600158695, 0.006828127861022949, 0.0067909760475158695, 0.006850912094116211, 0.006851583957672119, 0.0068180480003356934, 0.007024543762207031, 0.007040031909942627, 0.006911647796630859, 0.006819839954376221, 0.006903456211090088, 0.006870848178863525, 0.006865056037902832, 0.006820223808288574, 0.006858751773834228, 0.006862815856933594, 0.006748479843139648, 0.006817984104156494, 0.006859327793121338, 0.00684227180480957, 0.006831999778747559, 0.00684659194946289, 0.006838272094726563, 0.006784224033355713, 0.006830175876617431, 0.006836575984954834, 0.006777184009552002, 0.006782976150512696, 0.006790336132049561, 0.006838496208190918, 0.006746719837188721, 0.00698476791381836, 0.00685971212387085, 0.006895616054534912, 0.00694271993637085, 0.006860064029693603, 0.0068410558700561525, 0.00680511999130249, 0.006830463886260987, 0.0068199682235717774, 0.006748032093048095, 0.006813695907592773, 0.006811488151550293, 0.006895391941070556, 0.006887807846069336, 0.00683622407913208, 0.006957056045532227, 0.006797088146209717, 0.006910272121429443, 0.006803135871887207, 0.006803679943084717, 0.0068130559921264645, 0.006806144237518311, 0.006807199954986572, 0.006777184009552002, 0.0068321280479431154, 0.006829311847686767, 0.006770559787750244, 0.006810304164886475, 0.006844287872314453, 0.006903168201446533, 0.006814655780792237, 0.006830080032348633, 0.006838272094726563, 0.006790239810943604, 0.006863296031951904, 0.006883135795593262, 0.006933152198791504, 0.0067870721817016606, 0.006802591800689697, 0.0071504321098327635, 0.0067870721817016606, 0.006852255821228027, 0.006910208225250244, 0.006879039764404297, 0.006736159801483154, 0.00681769609451294, 0.006819935798645019, 0.006762495994567871]",tokens/s,144.7765921197902,, bfloat16-flash_attention_2,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,bfloat16,True,False,,flash_attention_2,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.2,,0.34.2,,,,1.22.0,,,,0.13.0,,"Traceback (most recent call last): File ""/workspace/src/common/benchmark_runner.py"", line 118, in execute_and_log_benchmark benchmark_report = Benchmark.launch(benchmark_config) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 47, in launch report = launcher.launch(worker=cls.run, worker_args=[config]) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 66, in launch raise ChildProcessError(response[""traceback""]) ChildProcessError: Traceback (most recent call last): File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/launchers/process/launcher.py"", line 104, in target report = worker(*worker_args) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/benchmark/base.py"", line 68, in run report = scenario.run(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 96, in run self.run_text_generation_memory_tracking(backend) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/scenarios/inference/scenario.py"", line 200, in run_text_generation_memory_tracking _ = backend.prefill(self.inputs, prefill_kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/optimum_benchmark/backends/pytorch/backend.py"", line 450, in prefill return self.pretrained_model.generate(**inputs, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/utils/_contextlib.py"", line 116, in decorate_context return func(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 2047, in generate result = self._sample( File ""/usr/local/lib/python3.10/dist-packages/transformers/generation/utils.py"", line 3007, in _sample outputs = self(**model_inputs, return_dict=True) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1316, in forward transformer_outputs = self.transformer( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 1130, in forward outputs = block( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 615, in forward attn_outputs = self.attn( File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py"", line 1562, in _call_impl return forward_call(*args, **kwargs) File ""/usr/local/lib/python3.10/dist-packages/transformers/models/gpt2/modeling_gpt2.py"", line 436, in forward attn_output = _flash_attention_forward( File ""/usr/local/lib/python3.10/dist-packages/transformers/modeling_flash_attention_utils.py"", line 296, in _flash_attention_forward attn_output = flash_attn_func( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 880, in flash_attn_func return FlashAttnFunc.apply( File ""/usr/local/lib/python3.10/dist-packages/torch/autograd/function.py"", line 574, in apply return super().apply(*args, **kwargs) # type: ignore[misc] File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 546, in forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = _flash_attn_forward( File ""/usr/local/lib/python3.10/dist-packages/flash_attn/flash_attn_interface.py"", line 52, in _flash_attn_forward out, q, k, v, out_padded, softmax_lse, S_dmask, rng_state = flash_attn_cuda.fwd( RuntimeError: FlashAttention only supports Ampere GPUs or newer. ",,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,, float32-eager,pytorch,2.4.1+cu124,optimum_benchmark.backends.pytorch.backend.PyTorchBackend,text-generation,transformers,gpt2,openai-community/gpt2,openai-community/gpt2,cuda,0,42,,,True,True,,float32,True,False,,eager,,False,,False,forward,,False,,inference,optimum_benchmark.scenarios.inference.scenario.InferenceScenario,10,10,10,1,2,256,,True,True,True,64,64,process,optimum_benchmark.launchers.process.launcher.ProcessLauncher,True,kill,False,spawn, Intel(R) Xeon(R) Platinum 8259CL CPU @ 2.50GHz,8,33163.759616,Linux,x86_64,Linux-5.10.225-213.878.amzn2.x86_64-x86_64-with-glibc2.35,x86_64,3.10.12,['Tesla T4'],1,16106127360,0.5.0,,4.45.1,,0.34.2,,,,1.22.0,,,,0.13.0,,,MB,812.040192,1128.136704,0.0,725.614592,666.338304,s,1,8.694095703125,8.694095703125,0.0,8.694095703125,8.694095703125,8.694095703125,8.694095703125,[8.694095703125],,kWh,3.118389166666683e-06,3.3646691366362836e-07,8.994451640000154e-07,4.354301244330327e-06,,MB,1176.735744,1144.91392,0.0,731.906048,652.62848,s,18,0.30580239868164055,0.016989022148980033,0.00032913152976232876,0.01691343975067139,0.016985987091064456,0.017229247951507567,0.018110963420867918,"[0.018331392288208008, 0.016939136505126955, 0.01685686492919922, 0.016965087890625, 0.016922975540161134, 0.017034751892089844, 0.016856544494628905, 0.016927455902099608, 0.016867679595947267, 0.016826688766479494, 0.016950815200805665, 0.016892799377441405, 0.01693382453918457, 0.01690390396118164, 0.016848127365112306, 0.01694233512878418, 0.01690284729003906, 0.01689916801452637]",tokens/s,15068.55413778888,kWh,5.953298127460958e-07,6.565417808522909e-08,3.949335587617108e-07,1.0559175495930357e-06,tokens/kWh,242443171.91116458,MB,1188.569088,1191.051264,0.0,778.043392,652.63104,s,18,9.920239685058597,0.5511244269476996,0.0035952931157251402,0.5506103515625,0.5567093322753907,0.5575028930664063,0.5577226977539063,"[0.549052734375, 0.55336865234375, 0.5526063232421875, 0.5461886596679687, 0.5494835815429687, 0.5468536987304687, 0.54844482421875, 0.5468862915039062, 0.5481532592773437, 0.553040283203125, 0.5496785278320313, 0.5515421752929688, 0.5541467895507812, 0.5563900146484375, 0.5574544067382813, 0.551823974609375, 0.5473478393554687, 0.5577776489257813]",tokens/s,114.31175415126093,kWh,1.5588616344052154e-05,1.719166594819628e-06,7.08168060965934e-06,2.438946354853112e-05,tokens/kWh,2583082.644464078,,s,1134,9.914355662345889,0.008742818044396725,0.0001934488465357941,0.008692895889282227,0.008911549091339111,0.009027417325973511,0.009382829504013063,"[0.0086015043258667, 0.00863532829284668, 0.008640512466430664, 0.008681471824645997, 0.008635904312133789, 0.008634976387023926, 0.00865385627746582, 0.008672127723693848, 0.008789536476135253, 0.008743391990661622, 0.00867859172821045, 0.008676063537597657, 0.008859647750854491, 0.00873583984375, 0.008643584251403809, 0.008738143920898438, 0.008618656158447266, 0.00858521556854248, 0.008564736366271973, 0.00861907196044922, 0.008635199546813965, 0.008646783828735351, 0.00862003231048584, 0.008695232391357421, 0.008659487724304199, 0.008656928062438965, 0.00860524845123291, 0.008679871559143066, 0.008714431762695313, 0.008689472198486328, 0.008687616348266602, 0.008658432006835937, 0.008622591972351074, 0.008703871726989746, 0.008675456047058105, 0.008722432136535644, 0.008822784423828126, 0.008775487899780273, 0.008675519943237304, 0.008711872100830078, 0.009041248321533203, 0.009075679779052735, 0.008668736457824707, 0.008896960258483886, 0.0086627197265625, 0.00865721607208252, 0.008762720108032227, 0.00882755184173584, 0.008816351890563965, 0.008796256065368652, 0.008829119682312012, 0.008683520317077637, 0.008730624198913574, 0.008683520317077637, 0.008699040412902832, 0.00867414379119873, 0.008775679588317872, 0.008683520317077637, 0.008685248374938965, 0.008679743766784667, 0.008948991775512696, 0.008712127685546875, 0.008714367866516113, 0.008697855949401855, 0.008726495742797852, 0.008667136192321777, 0.00866921615600586, 0.008773183822631836, 0.008665535926818848, 0.008695808410644532, 0.008681471824645997, 0.008660991668701172, 0.008720383644104004, 0.008965888023376466, 0.008950016021728516, 0.008805536270141601, 0.008722720146179199, 0.008728447914123536, 0.008798144340515136, 0.008725248336791992, 0.008751104354858399, 0.008777215957641601, 0.008705887794494629, 0.008710816383361816, 0.008715264320373535, 0.008764415740966798, 0.00870195198059082, 0.008660927772521972, 0.008722496032714844, 0.008769439697265626, 0.008758784294128418, 0.008796959877014161, 0.008900544166564942, 0.00872640037536621, 0.00869375991821289, 0.009027584075927735, 0.008830975532531739, 0.008785920143127441, 0.008705375671386718, 0.008672191619873046, 0.008744671821594238, 0.0087326717376709, 0.00869375991821289, 0.008685024261474609, 0.008691264152526855, 0.008686047554016113, 0.008720255851745606, 0.008665023803710937, 0.008692735671997071, 0.00867910385131836, 0.008697855949401855, 0.008691103935241699, 0.008689663887023925, 0.008725184440612793, 0.008680512428283691, 0.008655936241149902, 0.008644607543945313, 0.008673215866088867, 0.008812224388122559, 0.008989952087402343, 0.009053088188171387, 0.009070591926574707, 0.009117695808410644, 0.009274975776672363, 0.00909887981414795, 0.009263903617858886, 0.008751711845397948, 0.008848480224609375, 0.008882240295410156, 0.00879702377319336, 0.008793631553649902, 0.00875699234008789, 0.008696224212646484, 0.008845631599426269, 0.00870809555053711, 0.008693056106567383, 0.00873574447631836, 0.008914400100708007, 0.008968192100524902, 0.008907232284545898, 0.008785663604736328, 0.008729727745056152, 0.008702752113342285, 0.008761055946350097, 0.008687359809875488, 0.008751744270324708, 0.008968128204345702, 0.009072704315185547, 0.00897862434387207, 0.009082688331604003, 0.00878927993774414, 0.008794848442077636, 0.008673248291015625, 0.008882207870483399, 0.008703392028808593, 0.00872265625, 0.00865113639831543, 0.008665056228637695, 0.00868560028076172, 0.008681471824645997, 0.008733792304992676, 0.008790431976318359, 0.008691328048706055, 0.008668031692504883, 0.008717535972595215, 0.008678112030029297, 0.008615776062011719, 0.008601823806762695, 0.008645952224731445, 0.00868012809753418, 0.008706015586853027, 0.008808192253112792, 0.008675616264343261, 0.008699392318725586, 0.008696224212646484, 0.008781920433044434, 0.008642560005187988, 0.008665087699890137, 0.008636608123779296, 0.00886303997039795, 0.008948224067687988, 0.008675328254699707, 0.008677184104919433, 0.00862611198425293, 0.008617504119873047, 0.009318623542785644, 0.009070912361145019, 0.008697695732116699, 0.008638815879821777, 0.008577024459838867, 0.008765088081359864, 0.008744352340698243, 0.008649663925170898, 0.008717887878417969, 0.008640928268432617, 0.008670432090759277, 0.008664992332458496, 0.008640480041503907, 0.00860870361328125, 0.008606752395629884, 0.008616928100585938, 0.00879315185546875, 0.009020352363586425, 0.008673088073730469, 0.008657088279724121, 0.008691712379455567, 0.008647744178771972, 0.008819647789001465, 0.00861564826965332, 0.008647999763488769, 0.008683903694152832, 0.008614496231079101, 0.008638496398925782, 0.008681023597717285, 0.008649120330810547, 0.008622079849243165, 0.00872447967529297, 0.008648256301879882, 0.008623968124389649, 0.008675647735595702, 0.008648991584777832, 0.008673088073730469, 0.008585408210754394, 0.008597503662109375, 0.008654848098754882, 0.008622271537780762, 0.008681440353393554, 0.008766592025756836, 0.008665823936462402, 0.008658944129943847, 0.008644607543945313, 0.008687552452087402, 0.00869148826599121, 0.008617600440979005, 0.008653311729431153, 0.008646816253662109, 0.008642368316650391, 0.008719552040100097, 0.008641535758972169, 0.008660991668701172, 0.00860364818572998, 0.008630271911621093, 0.008607744216918945, 0.008666560173034668, 0.008651328086853028, 0.00862003231048584, 0.00871628761291504, 0.00862003231048584, 0.008640255928039551, 0.008615679740905761, 0.008663552284240723, 0.008583231925964356, 0.008562432289123536, 0.008622591972351074, 0.008696703910827637, 0.008676223754882813, 0.008648544311523438, 0.008648127555847167, 0.008636384010314942, 0.008635135650634765, 0.008651840209960937, 0.008672191619873046, 0.008789119720458984, 0.008672384262084961, 0.00864230442047119, 0.008650752067565918, 0.008650495529174804, 0.008631775856018067, 0.008650943756103516, 0.008652671813964844, 0.008693856239318848, 0.008745599746704101, 0.0086812162399292, 0.008704192161560058, 0.008703071594238282, 0.008680416107177734, 0.008742207527160645, 0.00886019229888916, 0.008749216079711915, 0.008686911582946778, 0.008638688087463379, 0.008618464469909668, 0.00897433567047119, 0.008751008033752441, 0.00869702434539795, 0.008686495780944823, 0.008927488327026367, 0.008750847816467285, 0.00878115177154541, 0.008788607597351073, 0.008810527801513672, 0.008950783729553222, 0.00887007999420166, 0.00887222385406494, 0.008686176300048828, 0.008699520111083985, 0.008925760269165039, 0.008821791648864746, 0.00886780834197998, 0.008714240074157715, 0.008708160400390625, 0.008927935600280762, 0.008728863716125489, 0.008761152267456055, 0.008636320114135742, 0.00861184024810791, 0.008642560005187988, 0.008640064239501952, 0.00861228847503662, 0.008652799606323243, 0.008737855911254883, 0.008707327842712402, 0.008609472274780273, 0.008652352333068847, 0.008667072296142579, 0.008710623741149903, 0.008689760208129883, 0.008675328254699707, 0.008746815681457519, 0.008722399711608887, 0.008590656280517578, 0.008623007774353028, 0.008621600151062012, 0.008636672019958496, 0.009500896453857422, 0.008707615852355957, 0.00864303970336914, 0.008645695686340332, 0.00865775966644287, 0.008629631996154786, 0.00867807960510254, 0.008619680404663085, 0.008612256050109863, 0.008646431922912598, 0.008616127967834473, 0.008587008476257324, 0.008651007652282714, 0.008667136192321777, 0.008684672355651856, 0.008602463722229003, 0.008593440055847168, 0.008593215942382812, 0.008664575576782227, 0.008665792465209961, 0.008650208473205566, 0.008757792472839355, 0.008656543731689453, 0.008661343574523925, 0.008714240074157715, 0.00860159969329834, 0.008662848472595215, 0.008612031936645509, 0.008663040161132812, 0.008621503829956054, 0.008619808197021484, 0.008637215614318848, 0.008642175674438476, 0.008706432342529297, 0.008626175880432128, 0.00858521556854248, 0.008676799774169922, 0.00868614387512207, 0.008654848098754882, 0.008728575706481934, 0.008652544021606446, 0.008712448120117188, 0.00864230442047119, 0.008640768051147461, 0.008615936279296875, 0.008613887786865235, 0.008634367942810058, 0.008595552444458008, 0.008648032188415528, 0.00909164810180664, 0.008723615646362304, 0.0087390079498291, 0.008663711547851562, 0.008706048011779785, 0.008615455627441405, 0.00867801570892334, 0.008691712379455567, 0.008646976470947266, 0.008762816429138183, 0.008722687721252441, 0.008656895637512207, 0.008681440353393554, 0.008695615768432616, 0.008659168243408203, 0.008727999687194824, 0.008727104187011718, 0.008691360473632812, 0.008692064285278321, 0.008648256301879882, 0.008780223846435547, 0.00876255989074707, 0.008712544441223144, 0.008712448120117188, 0.008620351791381835, 0.008619039535522462, 0.008674176216125488, 0.00869753646850586, 0.008933024406433105, 0.008747679710388184, 0.008926464080810546, 0.008761599540710449, 0.008699711799621581, 0.008684224128723144, 0.008679007530212402, 0.008642016410827636, 0.008659903526306151, 0.008665216445922852, 0.008619456291198731, 0.008625951766967774, 0.008751711845397948, 0.008626239776611328, 0.008639840126037598, 0.008646431922912598, 0.008690560340881347, 0.008638496398925782, 0.008709471702575683, 0.008632767677307128, 0.008628576278686524, 0.008615424156188965, 0.008687808036804199, 0.008579232215881348, 0.008654656410217286, 0.009021632194519043, 0.0088985595703125, 0.008866016387939452, 0.00869545555114746, 0.008640992164611816, 0.008672927856445313, 0.00868489646911621, 0.008681504249572754, 0.008704000473022461, 0.008718015670776368, 0.00871110439300537, 0.008785663604736328, 0.008675007820129395, 0.008706624031066895, 0.00867676830291748, 0.008621312141418458, 0.00866921615600586, 0.008739423751831055, 0.008705951690673829, 0.008659040451049805, 0.008656384468078614, 0.008695679664611817, 0.008704768180847169, 0.008738816261291504, 0.008656895637512207, 0.00868556785583496, 0.008704000473022461, 0.008695808410644532, 0.008677375793457032, 0.008695679664611817, 0.00866316795349121, 0.008720191955566406, 0.008640447616577148, 0.008750816345214844, 0.008612223625183105, 0.008671392440795898, 0.008820735931396484, 0.00869375991821289, 0.008643936157226563, 0.008665375709533692, 0.008693728446960449, 0.00866335964202881, 0.008623807907104493, 0.008652607917785644, 0.008673888206481933, 0.008656895637512207, 0.008626175880432128, 0.008711615562438964, 0.008620608329772949, 0.008613759994506837, 0.008630399703979491, 0.008596927642822265, 0.008659680366516114, 0.008728416442871093, 0.008664575576782227, 0.008724575996398925, 0.008626591682434083, 0.008638175964355468, 0.008644895553588867, 0.008650336265563965, 0.008671648025512695, 0.008650655746459962, 0.008742752075195313, 0.008653056144714356, 0.008691167831420899, 0.008751104354858399, 0.008702495574951171, 0.008638400077819824, 0.008632384300231933, 0.0086048002243042, 0.008652799606323243, 0.008695679664611817, 0.00869478416442871, 0.008773887634277344, 0.008666015625, 0.008649344444274903, 0.00862230396270752, 0.008773632049560547, 0.008936736106872558, 0.008710880279541016, 0.008722432136535644, 0.008654720306396484, 0.008659071922302246, 0.008662976264953614, 0.008634431838989258, 0.008847328186035156, 0.008658304214477539, 0.00870627212524414, 0.008655296325683594, 0.008699904441833496, 0.008695808410644532, 0.008634367942810058, 0.008673055648803711, 0.00889468765258789, 0.008622079849243165, 0.008629887580871583, 0.00865932846069336, 0.008624128341674805, 0.008662976264953614, 0.008664544105529786, 0.00870361614227295, 0.008689760208129883, 0.008647551536560058, 0.008648703575134278, 0.00865891170501709, 0.008629792213439942, 0.008643072128295898, 0.00866921615600586, 0.008648672103881837, 0.008930432319641112, 0.008762240409851074, 0.008799903869628907, 0.008634719848632812, 0.008668416023254395, 0.008624896049499512, 0.00869375991821289, 0.008699904441833496, 0.008640512466430664, 0.008667296409606933, 0.008755040168762208, 0.008873984336853028, 0.00882211208343506, 0.008644960403442382, 0.008659008026123047, 0.008824928283691406, 0.008904576301574707, 0.008728416442871093, 0.008637920379638672, 0.008659647941589356, 0.008665375709533692, 0.008635904312133789, 0.00861235237121582, 0.008687711715698243, 0.008656800270080567, 0.008615424156188965, 0.008622591972351074, 0.008589311599731446, 0.00871628761291504, 0.008843263626098634, 0.008695808410644532, 0.00863974380493164, 0.00858726406097412, 0.008634367942810058, 0.008667136192321777, 0.008683520317077637, 0.00867311954498291, 0.008710304260253906, 0.008626175880432128, 0.00935103988647461, 0.009721983909606933, 0.009891167640686035, 0.008882847785949707, 0.008793984413146973, 0.008722559928894043, 0.00871014404296875, 0.008715264320373535, 0.008692319869995118, 0.00877184009552002, 0.008684800148010255, 0.008718655586242676, 0.008634976387023926, 0.00864038372039795, 0.008659071922302246, 0.008670207977294921, 0.008651776313781738, 0.008617631912231445, 0.008636768341064454, 0.008816287994384766, 0.00869331169128418, 0.00861417579650879, 0.008631072044372558, 0.008590239524841309, 0.008620287895202637, 0.008682047843933106, 0.008671232223510742, 0.008646656036376953, 0.008617888450622559, 0.00866540813446045, 0.008676575660705567, 0.008693535804748536, 0.008648639678955078, 0.008664928436279296, 0.008691807746887208, 0.008688544273376465, 0.008674559593200683, 0.00867199993133545, 0.008727647781372071, 0.008740960121154785, 0.008763551712036133, 0.008677536010742188, 0.008709823608398438, 0.008772480010986327, 0.008988384246826173, 0.008773856163024903, 0.008689663887023925, 0.008742912292480469, 0.008836607933044433, 0.008813055992126465, 0.00895792007446289, 0.008962080001831055, 0.008992768287658692, 0.009058303833007812, 0.009115551948547363, 0.008925279617309571, 0.00870195198059082, 0.008761343955993652, 0.008746527671813964, 0.008661151885986328, 0.008735136032104492, 0.008701663970947265, 0.008655039787292481, 0.008683103561401367, 0.008714367866516113, 0.008716511726379394, 0.008694047927856446, 0.008681247711181641, 0.008664192199707032, 0.008684512138366698, 0.008691616058349609, 0.008777536392211915, 0.00869497585296631, 0.008946687698364257, 0.00888003158569336, 0.008671327590942383, 0.008691712379455567, 0.008690848350524902, 0.008831647872924805, 0.00872208023071289, 0.00866483211517334, 0.00865766429901123, 0.008665120124816895, 0.008648703575134278, 0.008639840126037598, 0.008663711547851562, 0.008642784118652344, 0.008619808197021484, 0.008689279556274415, 0.008650815963745118, 0.008614208221435547, 0.008613375663757325, 0.008615519523620606, 0.008667840003967284, 0.008769120216369629, 0.008753791809082031, 0.008615936279296875, 0.008652095794677734, 0.008663488388061523, 0.008927680015563965, 0.008789823532104493, 0.008705856323242188, 0.00867347240447998, 0.008719488143920899, 0.00869660758972168, 0.008755295753479005, 0.008851455688476563, 0.008754176139831543, 0.008696831703186036, 0.008740415573120117, 0.008706080436706542, 0.008800671577453614, 0.008761119842529298, 0.00872265625, 0.008658944129943847, 0.008909119606018066, 0.00886451244354248, 0.008756159782409668, 0.008940608024597167, 0.008665087699890137, 0.009052096366882324, 0.009269311904907226, 0.0089551362991333, 0.008833791732788086, 0.008734720230102539, 0.008654848098754882, 0.008660639762878418, 0.008661343574523925, 0.008662464141845704, 0.008720671653747559, 0.008675616264343261, 0.008652159690856934, 0.00866982364654541, 0.009129983901977539, 0.008681471824645997, 0.008681471824645997, 0.008648639678955078, 0.008800000190734862, 0.008732992172241211, 0.008619423866271972, 0.008736543655395508, 0.008665663719177246, 0.008667391777038574, 0.008664447784423828, 0.008665727615356446, 0.008715776443481446, 0.008950271606445313, 0.008826016426086426, 0.008763520240783692, 0.008674015998840332, 0.008613887786865235, 0.008724255561828613, 0.008630496025085449, 0.008599552154541015, 0.00882688045501709, 0.008720607757568359, 0.008724127769470215, 0.00874508762359619, 0.008685088157653809, 0.00861580753326416, 0.008661600112915039, 0.00864367961883545, 0.008651328086853028, 0.008720735549926757, 0.008812447547912598, 0.008730719566345215, 0.008828927993774414, 0.008748543739318848, 0.009867775917053222, 0.008855551719665527, 0.008828927993774414, 0.008704000473022461, 0.008697855949401855, 0.008635968208312987, 0.008654879570007325, 0.008626591682434083, 0.008689663887023925, 0.008657119750976562, 0.008666912078857421, 0.008668352127075195, 0.008702816009521485, 0.008822751998901366, 0.008669504165649414, 0.008677087783813476, 0.008807168006896973, 0.008644800186157226, 0.008750847816467285, 0.008749119758605957, 0.008644607543945313, 0.00923852825164795, 0.0086364164352417, 0.008660767555236816, 0.008698080062866211, 0.008802207946777343, 0.008635807991027832, 0.0086692476272583, 0.008641152381896973, 0.008748671531677247, 0.00874505615234375, 0.008660448074340821, 0.0086778564453125, 0.00869820785522461, 0.008631808280944824, 0.008681280136108398, 0.008796992301940918, 0.008862815856933593, 0.008817440032958984, 0.008779552459716797, 0.0087390079498291, 0.009099295616149902, 0.008867839813232421, 0.008906496047973634, 0.00906060791015625, 0.008996864318847657, 0.009043968200683594, 0.00908902359008789, 0.008989727973937989, 0.00904905605316162, 0.008880127906799316, 0.00882198429107666, 0.008667936325073242, 0.008755200386047364, 0.008715680122375488, 0.008667200088500977, 0.008688032150268555, 0.008674976348876953, 0.00875158405303955, 0.008988672256469727, 0.008861984252929687, 0.008818400382995606, 0.008813887596130372, 0.008778431892395019, 0.008902303695678711, 0.008907103538513183, 0.008929120063781739, 0.00904531192779541, 0.008886816024780274, 0.008722208023071288, 0.008650591850280762, 0.008689632415771484, 0.008671775817871093, 0.0087390079498291, 0.00870304012298584, 0.008632736206054687, 0.008639007568359376, 0.008576895713806152, 0.008899935722351074, 0.008993599891662598, 0.00885696029663086, 0.008877920150756837, 0.0089136323928833, 0.009025856018066407, 0.00896713638305664, 0.009054207801818847, 0.009058783531188965, 0.009000896453857422, 0.009090527534484864, 0.009159744262695313, 0.009131648063659669, 0.008971936225891113, 0.008911392211914063, 0.008812159538269042, 0.008816991806030273, 0.008865823745727539, 0.008910367965698242, 0.008931808471679688, 0.009037247657775879, 0.008984288215637208, 0.00910217571258545, 0.008967424392700195, 0.008819456100463868, 0.008793120384216308, 0.00874185562133789, 0.00871833610534668, 0.00872771167755127, 0.008724991798400878, 0.008671168327331544, 0.008714143753051757, 0.008724864006042481, 0.00869593620300293, 0.008667136192321777, 0.008671232223510742, 0.00881049633026123, 0.008733695983886718, 0.008716608047485351, 0.008630463600158692, 0.008780415534973145, 0.008773504257202148, 0.00877302360534668, 0.008682080268859863, 0.00880134391784668, 0.008728704452514648, 0.008771903991699219, 0.008724448204040527, 0.008704704284667969, 0.008749024391174316, 0.008974207878112792, 0.008711872100830078, 0.008786175727844238, 0.008641759872436523, 0.008770112037658692, 0.008677248001098634, 0.008785951614379882, 0.008687999725341797, 0.00867734432220459, 0.008678720474243165, 0.008880864143371582, 0.008820768356323241, 0.008773632049560547, 0.008775679588317872, 0.008864992141723633, 0.008749855995178222, 0.008779104232788087, 0.008790016174316406, 0.008691712379455567, 0.008706720352172851, 0.008642560005187988, 0.008707072257995606, 0.008772607803344726, 0.008693120002746583, 0.008663392066955567, 0.008694304466247558, 0.008654591560363769, 0.008662976264953614, 0.008672415733337402, 0.008665023803710937, 0.00879100799560547, 0.012177760124206544, 0.010573472023010254, 0.009073696136474609, 0.00879100799560547, 0.008967424392700195, 0.009388383865356445, 0.008741279602050782, 0.008722432136535644, 0.008704000473022461, 0.008674336433410644, 0.00869046401977539, 0.008656319618225097, 0.008706815719604492, 0.008678496360778809, 0.008638848304748535, 0.00865334415435791, 0.008785920143127441, 0.00871833610534668, 0.008763392448425293, 0.008746944427490234, 0.008703743934631348, 0.008745535850524903, 0.008727840423583984, 0.008712672233581542, 0.008695520401000977, 0.008672639846801758, 0.008743840217590332, 0.008765439987182617, 0.008959808349609376, 0.008962240219116211, 0.008902560234069825, 0.008953856468200684, 0.008994912147521973, 0.008833024024963379, 0.008654848098754882, 0.008717535972595215, 0.008753952026367187, 0.008684608459472656, 0.008690815925598144, 0.008771391868591308, 0.008793120384216308, 0.008729056358337402, 0.008729087829589843, 0.008662112236022949, 0.008644607543945313, 0.008779423713684082, 0.009003680229187011, 0.008789440155029297, 0.008719840049743653, 0.008708895683288575, 0.008697855949401855, 0.008683520317077637, 0.00868070411682129, 0.008702207565307617, 0.008716959953308106, 0.008602560043334961, 0.00874505615234375, 0.008843168258666993, 0.008809375762939452, 0.00873862361907959, 0.009042112350463868, 0.008943615913391113, 0.008730624198913574, 0.008837120056152344, 0.010280960083007813, 0.008828927993774414, 0.008748671531677247, 0.008749440193176269, 0.008734720230102539, 0.008783871650695801, 0.008744959831237792, 0.008689087867736816, 0.008915519714355469, 0.0087326717376709, 0.00868556785583496, 0.008634367942810058, 0.00872652816772461, 0.008769087791442871, 0.00867311954498291, 0.008688223838806153, 0.008721599578857422, 0.008710975646972657, 0.008673536300659179, 0.008680831909179688, 0.00858726406097412, 0.008655008316040038, 0.00875648021697998, 0.008684224128723144, 0.008630784034729003, 0.008640031814575195, 0.00869820785522461, 0.008705663681030273, 0.008730912208557129, 0.008654656410217286, 0.008733887672424317, 0.00870297622680664, 0.00868329620361328, 0.008601823806762695, 0.008835071563720704, 0.008749055862426757, 0.008695808410644532, 0.00867910385131836, 0.008624128341674805, 0.008707807540893555, 0.00877014446258545, 0.008713664054870606, 0.008702655792236329, 0.008625056266784668, 0.008738271713256836, 0.008613696098327637, 0.008677151679992676, 0.008666048049926758, 0.008691712379455567, 0.008667136192321777, 0.008613216400146484, 0.008665760040283204, 0.00864412784576416, 0.008708160400390625, 0.008558208465576173, 0.008719136238098144, 0.008697855949401855, 0.008642560005187988, 0.008617119789123535, 0.008631135940551758, 0.008644224166870117, 0.008687744140625, 0.008683775901794434, 0.00860364818572998, 0.008672800064086914, 0.008665568351745605, 0.008777728080749512, 0.008719584465026856, 0.008636639595031739, 0.008681280136108398, 0.00865561580657959, 0.008691712379455567, 0.008665216445922852, 0.008707967758178711, 0.008693087577819824, 0.008700575828552246, 0.008630047798156739, 0.008593631744384765, 0.008670592308044433, 0.008690624237060547, 0.008734399795532227, 0.008640000343322754, 0.008655360221862793, 0.008675168037414551, 0.008683839797973632, 0.008682527542114259, 0.008631104469299317, 0.00866710376739502, 0.008805472373962403, 0.008696767807006836, 0.008689215660095215, 0.008696224212646484, 0.00877340793609619, 0.008737024307250976, 0.00872985553741455, 0.008911616325378418, 0.008824831962585449, 0.008683520317077637, 0.008773632049560547, 0.008675328254699707, 0.008671168327331544, 0.008654335975646972, 0.00881436824798584, 0.00870479965209961, 0.0086179838180542, 0.00860364818572998, 0.008595808029174805, 0.008611776351928712, 0.008869952201843261, 0.009049471855163574, 0.009630335807800292, 0.008885408401489258, 0.0091278076171875, 0.008999903678894043, 0.008771583557128907, 0.008729951858520508, 0.00881116771697998, 0.008913087844848632, 0.008666751861572265, 0.008713919639587403, 0.008661375999450684, 0.00861347198486328, 0.00859945583343506, 0.008610431671142577, 0.008673536300659179, 0.009271360397338868, 0.00871555233001709, 0.008680895805358886, 0.008787167549133302, 0.008668928146362305, 0.00881174373626709, 0.00864691162109375, 0.008655391693115234, 0.008623904228210449, 0.008596896171569824, 0.008673983573913574, 0.008654975891113281, 0.008644607543945313, 0.008640640258789063, 0.009251744270324706, 0.00984995174407959, 0.008998175621032715, 0.008608351707458496, 0.008801600456237794, 0.008696512222290039, 0.008799327850341796, 0.00939510440826416, 0.00859340763092041, 0.008617247581481934, 0.008624832153320312, 0.008673312187194825, 0.008687456130981446, 0.008636320114135742, 0.008667200088500977, 0.008624320030212402, 0.008646944046020507, 0.00874176025390625, 0.008633184432983398, 0.008645888328552245, 0.008743200302124023, 0.008714624404907226, 0.008763327598571777, 0.009027327537536621, 0.009759072303771973, 0.009269536018371582, 0.009297696113586426, 0.00929964828491211, 0.009193792343139648, 0.009371552467346191]",tokens/s,114.37959647815161,,